=== modified file 'src/alloc.c' --- src/alloc.c 2014-01-03 06:42:23 +0000 +++ src/alloc.c 2014-01-19 03:12:44 +0000 @@ -95,6 +95,11 @@ #define MMAP_MAX_AREAS 100000000 +/* Specify the allocation size over which to request bytes from mmap + directly. */ + +#define MMAP_THRESHOLD (64*1024) + #endif /* not DOUG_LEA_MALLOC */ /* Mark, unmark, query mark bit of a Lisp string. S must be a pointer @@ -204,6 +209,13 @@ static char *stack_copy; static ptrdiff_t stack_copy_size; +/* True if we need to preserve memory regions for dumping. */ +#ifdef CANNOT_DUMP +#define might_dump 0 +#else +static bool might_dump = true; +#endif + /* Copy to DEST a block of memory from SRC of size SIZE bytes, avoiding any address sanitization. */ @@ -963,21 +975,10 @@ #endif /* BLOCK_ALIGN has to be a power of 2. */ -#define BLOCK_ALIGN (1 << 10) +#define BLOCK_ALIGN (1 << 16) -/* Padding to leave at the end of a malloc'd block. This is to give - malloc a chance to minimize the amount of memory wasted to alignment. - It should be tuned to the particular malloc library used. - On glibc-2.3.2, malloc never tries to align, so a padding of 0 is best. - aligned_alloc on the other hand would ideally prefer a value of 4 - because otherwise, there's 1020 bytes wasted between each ablocks. - In Emacs, testing shows that those 1020 can most of the time be - efficiently used by malloc to place other objects, so a value of 0 can - still preferable unless you have a lot of aligned blocks and virtually - nothing else. */ -#define BLOCK_PADDING 0 #define BLOCK_BYTES \ - (BLOCK_ALIGN - sizeof (struct ablocks *) - BLOCK_PADDING) + (BLOCK_ALIGN - sizeof (struct ablocks *)) /* Internal data structures and constants. */ @@ -1001,11 +1002,6 @@ (if not, the word before the first ablock holds a pointer to the real base). */ struct ablocks *abase; - /* The padding of all but the last ablock is unused. The padding of - the last ablock in an ablocks is not allocated. */ -#if BLOCK_PADDING - char padding[BLOCK_PADDING]; -#endif }; /* A bunch of consecutive aligned blocks. */ @@ -1015,7 +1011,7 @@ }; /* Size of the block requested from malloc or aligned_alloc. */ -#define ABLOCKS_BYTES (sizeof (struct ablocks) - BLOCK_PADDING) +#define ABLOCKS_BYTES (sizeof (struct ablocks)) #define ABLOCK_ABASE(block) \ (((uintptr_t) (block)->abase) <= (1 + 2 * ABLOCKS_SIZE) \ @@ -1062,7 +1058,8 @@ /* Prevent mmap'ing the chunk. Lisp data may not be mmap'ed because mapped region contents are not preserved in a dumped Emacs. */ - mallopt (M_MMAP_MAX, 0); + if (might_dump) + mallopt (M_MMAP_MAX, 0); #endif #ifdef USE_ALIGNED_ALLOC @@ -1084,7 +1081,8 @@ #ifdef DOUG_LEA_MALLOC /* Back to a reasonable maximum of mmap'ed areas. */ - mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); + if (might_dump) + mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); #endif #if ! USE_LSB_TAG @@ -1728,14 +1726,16 @@ mmap'ed data typically have an address towards the top of the address space, which won't fit into an EMACS_INT (at least on 32-bit systems with the current tagging scheme). --fx */ - mallopt (M_MMAP_MAX, 0); + if (might_dump) + mallopt (M_MMAP_MAX, 0); #endif b = lisp_malloc (size + GC_STRING_EXTRA, MEM_TYPE_NON_LISP); #ifdef DOUG_LEA_MALLOC /* Back to a reasonable maximum of mmap'ed areas. */ - mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); + if (might_dump) + mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); #endif b->next_free = b->data; @@ -3039,7 +3039,8 @@ /* Prevent mmap'ing the chunk. Lisp data may not be mmap'ed because mapped region contents are not preserved in a dumped Emacs. */ - mallopt (M_MMAP_MAX, 0); + if (might_dump) + mallopt (M_MMAP_MAX, 0); #endif if (nbytes <= VBLOCK_BYTES_MAX) @@ -3057,7 +3058,8 @@ #ifdef DOUG_LEA_MALLOC /* Back to a reasonable maximum of mmap'ed areas. */ - mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); + if (might_dump) + mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); #endif consing_since_gc += nbytes; @@ -6777,9 +6779,9 @@ #endif #ifdef DOUG_LEA_MALLOC - mallopt (M_TRIM_THRESHOLD, 128 * 1024); /* Trim threshold. */ - mallopt (M_MMAP_THRESHOLD, 64 * 1024); /* Mmap threshold. */ - mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); /* Max. number of mmap'ed areas. */ + mallopt (M_TRIM_THRESHOLD, 2 * MMAP_THRESHOLD); + mallopt (M_MMAP_THRESHOLD, MMAP_THRESHOLD); + mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); #endif init_strings (); init_vectors (); @@ -6804,6 +6806,11 @@ #if USE_VALGRIND valgrind_p = RUNNING_ON_VALGRIND != 0; #endif + +#ifndef CANNOT_DUMP + if (initialized) + might_dump = false; +#endif } void