22 #if defined(__clang_analyzer__) && !defined(SDL_DISABLE_ANALYZE_MACROS) 23 #define SDL_DISABLE_ANALYZE_MACROS 1 26 #include "../SDL_internal.h" 34 #define LACKS_SYS_TYPES_H 36 #define LACKS_STRINGS_H 37 #define LACKS_STRING_H 38 #define LACKS_STDLIB_H 489 #define WIN32_LEAN_AND_MEAN 492 #define HAVE_MORECORE 0 493 #define LACKS_UNISTD_H 494 #define LACKS_SYS_PARAM_H 495 #define LACKS_SYS_MMAN_H 496 #define LACKS_STRING_H 497 #define LACKS_STRINGS_H 498 #define LACKS_SYS_TYPES_H 499 #define LACKS_ERRNO_H 500 #define LACKS_FCNTL_H 501 #define MALLOC_FAILURE_ACTION 502 #define MMAP_CLEARS 0 505 #if defined(DARWIN) || defined(_DARWIN) 507 #ifndef HAVE_MORECORE 508 #define HAVE_MORECORE 0 513 #ifndef LACKS_SYS_TYPES_H 514 #include <sys/types.h> 518 #define MAX_SIZE_T (~(size_t)0) 521 #define ONLY_MSPACES 0 530 #ifndef MALLOC_ALIGNMENT 531 #define MALLOC_ALIGNMENT ((size_t)8U) 537 #define ABORT abort() 539 #ifndef ABORT_ON_ASSERT_FAILURE 540 #define ABORT_ON_ASSERT_FAILURE 1 542 #ifndef PROCEED_ON_ERROR 543 #define PROCEED_ON_ERROR 0 555 #define MMAP_CLEARS 1 559 #define HAVE_MREMAP 1 561 #define HAVE_MREMAP 0 564 #ifndef MALLOC_FAILURE_ACTION 565 #define MALLOC_FAILURE_ACTION errno = ENOMEM; 567 #ifndef HAVE_MORECORE 569 #define HAVE_MORECORE 0 571 #define HAVE_MORECORE 1 575 #define MORECORE_CONTIGUOUS 0 578 #define MORECORE sbrk 580 #ifndef MORECORE_CONTIGUOUS 581 #define MORECORE_CONTIGUOUS 1 584 #ifndef DEFAULT_GRANULARITY 585 #if MORECORE_CONTIGUOUS 586 #define DEFAULT_GRANULARITY (0) 588 #define DEFAULT_GRANULARITY ((size_t)64U * (size_t)1024U) 591 #ifndef DEFAULT_TRIM_THRESHOLD 592 #ifndef MORECORE_CANNOT_TRIM 593 #define DEFAULT_TRIM_THRESHOLD ((size_t)2U * (size_t)1024U * (size_t)1024U) 595 #define DEFAULT_TRIM_THRESHOLD MAX_SIZE_T 598 #ifndef DEFAULT_MMAP_THRESHOLD 600 #define DEFAULT_MMAP_THRESHOLD ((size_t)256U * (size_t)1024U) 602 #define DEFAULT_MMAP_THRESHOLD MAX_SIZE_T 605 #ifndef USE_BUILTIN_FFS 606 #define USE_BUILTIN_FFS 0 608 #ifndef USE_DEV_RANDOM 609 #define USE_DEV_RANDOM 0 612 #define NO_MALLINFO 0 614 #ifndef MALLINFO_FIELD_TYPE 615 #define MALLINFO_FIELD_TYPE size_t 619 #define memset SDL_memset 622 #define memcpy SDL_memcpy 632 #define M_TRIM_THRESHOLD (-1) 633 #define M_GRANULARITY (-2) 634 #define M_MMAP_THRESHOLD (-3) 663 #ifdef HAVE_USR_INCLUDE_MALLOC_H 664 #include "/usr/include/malloc.h" 693 #ifndef USE_DL_PREFIX 694 #define dlcalloc calloc 696 #define dlmalloc malloc 697 #define dlmemalign memalign 698 #define dlrealloc realloc 699 #define dlvalloc valloc 700 #define dlpvalloc pvalloc 701 #define dlmallinfo mallinfo 702 #define dlmallopt mallopt 703 #define dlmalloc_trim malloc_trim 704 #define dlmalloc_stats malloc_stats 705 #define dlmalloc_usable_size malloc_usable_size 706 #define dlmalloc_footprint malloc_footprint 707 #define dlmalloc_max_footprint malloc_max_footprint 708 #define dlindependent_calloc independent_calloc 709 #define dlindependent_comalloc independent_comalloc 1052 typedef void *mspace;
1065 mspace create_mspace(
size_t capacity,
int locked);
1073 size_t destroy_mspace(mspace msp);
1084 mspace create_mspace_with_base(
void *
base,
size_t capacity,
int locked);
1090 void *mspace_malloc(mspace msp,
size_t bytes);
1100 void mspace_free(mspace msp,
void *mem);
1111 void *mspace_realloc(mspace msp,
void *mem,
size_t newsize);
1117 void *mspace_calloc(mspace msp,
size_t n_elements,
size_t elem_size);
1123 void *mspace_memalign(mspace msp,
size_t alignment,
size_t bytes);
1129 void **mspace_independent_calloc(mspace msp,
size_t n_elements,
1130 size_t elem_size,
void *chunks[]);
1136 void **mspace_independent_comalloc(mspace msp,
size_t n_elements,
1137 size_t sizes[],
void *chunks[]);
1143 size_t mspace_footprint(mspace msp);
1149 size_t mspace_max_footprint(mspace msp);
1157 struct mallinfo mspace_mallinfo(mspace msp);
1164 void mspace_malloc_stats(mspace msp);
1170 int mspace_trim(mspace msp,
size_t pad);
1175 int mspace_mallopt(
int,
int);
1196 #pragma warning( disable : 4146 ) 1199 #ifndef LACKS_STDIO_H 1203 #ifndef LACKS_ERRNO_H 1209 #ifndef LACKS_STDLIB_H 1213 #if ABORT_ON_ASSERT_FAILURE 1214 #define assert(x) if(!(x)) ABORT 1221 #ifndef LACKS_STRING_H 1225 #ifndef LACKS_STRINGS_H 1226 #include <strings.h> 1230 #ifndef LACKS_SYS_MMAN_H 1231 #include <sys/mman.h> 1233 #ifndef LACKS_FCNTL_H 1238 #ifndef LACKS_UNISTD_H 1241 #if !defined(__FreeBSD__) && !defined(__OpenBSD__) && !defined(__NetBSD__) 1242 extern void *sbrk(ptrdiff_t);
1248 #ifndef malloc_getpagesize 1249 # ifdef _SC_PAGESIZE 1250 # ifndef _SC_PAGE_SIZE 1251 # define _SC_PAGE_SIZE _SC_PAGESIZE 1254 # ifdef _SC_PAGE_SIZE 1255 # define malloc_getpagesize sysconf(_SC_PAGE_SIZE) 1257 # if defined(BSD) || defined(DGUX) || defined(HAVE_GETPAGESIZE) 1258 extern size_t getpagesize();
1259 # define malloc_getpagesize getpagesize() 1262 # define malloc_getpagesize getpagesize() 1264 # ifndef LACKS_SYS_PARAM_H 1265 # include <sys/param.h> 1267 # ifdef EXEC_PAGESIZE 1268 # define malloc_getpagesize EXEC_PAGESIZE 1272 # define malloc_getpagesize NBPG 1274 # define malloc_getpagesize (NBPG * CLSIZE) 1278 # define malloc_getpagesize NBPC 1281 # define malloc_getpagesize PAGESIZE 1283 # define malloc_getpagesize ((size_t)4096U) 1297 #define SIZE_T_SIZE (sizeof(size_t)) 1298 #define SIZE_T_BITSIZE (sizeof(size_t) << 3) 1302 #define SIZE_T_ZERO ((size_t)0) 1303 #define SIZE_T_ONE ((size_t)1) 1304 #define SIZE_T_TWO ((size_t)2) 1305 #define TWO_SIZE_T_SIZES (SIZE_T_SIZE<<1) 1306 #define FOUR_SIZE_T_SIZES (SIZE_T_SIZE<<2) 1307 #define SIX_SIZE_T_SIZES (FOUR_SIZE_T_SIZES+TWO_SIZE_T_SIZES) 1308 #define HALF_MAX_SIZE_T (MAX_SIZE_T / 2U) 1311 #define CHUNK_ALIGN_MASK (MALLOC_ALIGNMENT - SIZE_T_ONE) 1314 #define is_aligned(A) (((size_t)((A)) & (CHUNK_ALIGN_MASK)) == 0) 1317 #define align_offset(A)\ 1318 ((((size_t)(A) & CHUNK_ALIGN_MASK) == 0)? 0 :\ 1319 ((MALLOC_ALIGNMENT - ((size_t)(A) & CHUNK_ALIGN_MASK)) & CHUNK_ALIGN_MASK)) 1331 #define MFAIL ((void*)(MAX_SIZE_T)) 1332 #define CMFAIL ((char*)(MFAIL)) 1335 #define IS_MMAPPED_BIT (SIZE_T_ZERO) 1336 #define USE_MMAP_BIT (SIZE_T_ZERO) 1337 #define CALL_MMAP(s) MFAIL 1338 #define CALL_MUNMAP(a, s) (-1) 1339 #define DIRECT_MMAP(s) MFAIL 1342 #define IS_MMAPPED_BIT (SIZE_T_ONE) 1343 #define USE_MMAP_BIT (SIZE_T_ONE) 1346 #define CALL_MUNMAP(a, s) munmap((a), (s)) 1347 #define MMAP_PROT (PROT_READ|PROT_WRITE) 1348 #if !defined(MAP_ANONYMOUS) && defined(MAP_ANON) 1349 #define MAP_ANONYMOUS MAP_ANON 1351 #ifdef MAP_ANONYMOUS 1352 #define MMAP_FLAGS (MAP_PRIVATE|MAP_ANONYMOUS) 1353 #define CALL_MMAP(s) mmap(0, (s), MMAP_PROT, MMAP_FLAGS, -1, 0) 1359 #define MMAP_FLAGS (MAP_PRIVATE) 1360 static int dev_zero_fd = -1;
1361 #define CALL_MMAP(s) ((dev_zero_fd < 0) ? \ 1362 (dev_zero_fd = open("/dev/zero", O_RDWR), \ 1363 mmap(0, (s), MMAP_PROT, MMAP_FLAGS, dev_zero_fd, 0)) : \ 1364 mmap(0, (s), MMAP_PROT, MMAP_FLAGS, dev_zero_fd, 0)) 1367 #define DIRECT_MMAP(s) CALL_MMAP(s) 1375 VirtualAlloc(0, size, MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE);
1383 void *
ptr = VirtualAlloc(0, size, MEM_RESERVE | MEM_COMMIT | MEM_TOP_DOWN,
1392 MEMORY_BASIC_INFORMATION minfo;
1395 if (VirtualQuery(cptr, &minfo,
sizeof(minfo)) == 0)
1397 if (minfo.BaseAddress != cptr || minfo.AllocationBase != cptr ||
1398 minfo.State != MEM_COMMIT || minfo.RegionSize > size)
1400 if (VirtualFree(cptr, 0, MEM_RELEASE) == 0)
1402 cptr += minfo.RegionSize;
1403 size -= minfo.RegionSize;
1408 #define CALL_MMAP(s) win32mmap(s) 1409 #define CALL_MUNMAP(a, s) win32munmap((a), (s)) 1410 #define DIRECT_MMAP(s) win32direct_mmap(s) 1414 #if HAVE_MMAP && HAVE_MREMAP 1415 #define CALL_MREMAP(addr, osz, nsz, mv) mremap((addr), (osz), (nsz), (mv)) 1417 #define CALL_MREMAP(addr, osz, nsz, mv) MFAIL 1421 #define CALL_MORECORE(S) MORECORE(S) 1423 #define CALL_MORECORE(S) MFAIL 1427 #define USE_NONCONTIGUOUS_BIT (4U) 1430 #define EXTERN_BIT (8U) 1453 #include <pthread.h> 1454 #define MLOCK_T pthread_mutex_t 1455 #define INITIAL_LOCK(l) pthread_mutex_init(l, NULL) 1456 #define ACQUIRE_LOCK(l) pthread_mutex_lock(l) 1457 #define RELEASE_LOCK(l) pthread_mutex_unlock(l) 1460 static MLOCK_T morecore_mutex = PTHREAD_MUTEX_INITIALIZER;
1471 #define MLOCK_T long 1476 #ifdef InterlockedCompareExchangePointer 1477 if (!InterlockedCompareExchange(sl, 1, 0))
1480 if (!InterlockedCompareExchange((
void **) sl, (
void *) 1, (
void *) 0))
1490 InterlockedExchange(sl, 0);
1493 #define INITIAL_LOCK(l) *(l)=0 1494 #define ACQUIRE_LOCK(l) win32_acquire_lock(l) 1495 #define RELEASE_LOCK(l) win32_release_lock(l) 1497 static MLOCK_T morecore_mutex;
1502 #define USE_LOCK_BIT (2U) 1504 #define USE_LOCK_BIT (0U) 1505 #define INITIAL_LOCK(l) 1508 #if USE_LOCKS && HAVE_MORECORE 1509 #define ACQUIRE_MORECORE_LOCK() ACQUIRE_LOCK(&morecore_mutex); 1510 #define RELEASE_MORECORE_LOCK() RELEASE_LOCK(&morecore_mutex); 1512 #define ACQUIRE_MORECORE_LOCK() 1513 #define RELEASE_MORECORE_LOCK() 1517 #define ACQUIRE_MAGIC_INIT_LOCK() ACQUIRE_LOCK(&magic_init_mutex); 1518 #define RELEASE_MAGIC_INIT_LOCK() RELEASE_LOCK(&magic_init_mutex); 1520 #define ACQUIRE_MAGIC_INIT_LOCK() 1521 #define RELEASE_MAGIC_INIT_LOCK() 1679 #define MCHUNK_SIZE (sizeof(mchunk)) 1682 #define CHUNK_OVERHEAD (TWO_SIZE_T_SIZES) 1684 #define CHUNK_OVERHEAD (SIZE_T_SIZE) 1688 #define MMAP_CHUNK_OVERHEAD (TWO_SIZE_T_SIZES) 1690 #define MMAP_FOOT_PAD (FOUR_SIZE_T_SIZES) 1693 #define MIN_CHUNK_SIZE\ 1694 ((MCHUNK_SIZE + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK) 1697 #define chunk2mem(p) ((void*)((char*)(p) + TWO_SIZE_T_SIZES)) 1698 #define mem2chunk(mem) ((mchunkptr)((char*)(mem) - TWO_SIZE_T_SIZES)) 1700 #define align_as_chunk(A) (mchunkptr)((A) + align_offset(chunk2mem(A))) 1703 #define MAX_REQUEST ((-MIN_CHUNK_SIZE) << 2) 1704 #define MIN_REQUEST (MIN_CHUNK_SIZE - CHUNK_OVERHEAD - SIZE_T_ONE) 1707 #define pad_request(req) \ 1708 (((req) + CHUNK_OVERHEAD + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK) 1711 #define request2size(req) \ 1712 (((req) < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(req)) 1725 #define PINUSE_BIT (SIZE_T_ONE) 1726 #define CINUSE_BIT (SIZE_T_TWO) 1727 #define INUSE_BITS (PINUSE_BIT|CINUSE_BIT) 1730 #define FENCEPOST_HEAD (INUSE_BITS|SIZE_T_SIZE) 1733 #define cinuse(p) ((p)->head & CINUSE_BIT) 1734 #define pinuse(p) ((p)->head & PINUSE_BIT) 1735 #define chunksize(p) ((p)->head & ~(INUSE_BITS)) 1737 #define clear_pinuse(p) ((p)->head &= ~PINUSE_BIT) 1738 #define clear_cinuse(p) ((p)->head &= ~CINUSE_BIT) 1741 #define chunk_plus_offset(p, s) ((mchunkptr)(((char*)(p)) + (s))) 1742 #define chunk_minus_offset(p, s) ((mchunkptr)(((char*)(p)) - (s))) 1745 #define next_chunk(p) ((mchunkptr)( ((char*)(p)) + ((p)->head & ~INUSE_BITS))) 1746 #define prev_chunk(p) ((mchunkptr)( ((char*)(p)) - ((p)->prev_foot) )) 1749 #define next_pinuse(p) ((next_chunk(p)->head) & PINUSE_BIT) 1752 #define get_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_foot) 1753 #define set_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_foot = (s)) 1756 #define set_size_and_pinuse_of_free_chunk(p, s)\ 1757 ((p)->head = (s|PINUSE_BIT), set_foot(p, s)) 1760 #define set_free_with_pinuse(p, s, n)\ 1761 (clear_pinuse(n), set_size_and_pinuse_of_free_chunk(p, s)) 1763 #define is_mmapped(p)\ 1764 (!((p)->head & PINUSE_BIT) && ((p)->prev_foot & IS_MMAPPED_BIT)) 1767 #define overhead_for(p)\ 1768 (is_mmapped(p)? MMAP_CHUNK_OVERHEAD : CHUNK_OVERHEAD) 1772 #define calloc_must_clear(p) (!is_mmapped(p)) 1774 #define calloc_must_clear(p) (1) 1886 #define leftmost_child(t) ((t)->child[0] != 0? (t)->child[0] : (t)->child[1]) 1953 #define is_mmapped_segment(S) ((S)->sflags & IS_MMAPPED_BIT) 1954 #define is_extern_segment(S) ((S)->sflags & EXTERN_BIT) 2035 #define NSMALLBINS (32U) 2036 #define NTREEBINS (32U) 2037 #define SMALLBIN_SHIFT (3U) 2038 #define SMALLBIN_WIDTH (SIZE_T_ONE << SMALLBIN_SHIFT) 2039 #define TREEBIN_SHIFT (8U) 2040 #define MIN_LARGE_SIZE (SIZE_T_ONE << TREEBIN_SHIFT) 2041 #define MAX_SMALL_SIZE (MIN_LARGE_SIZE - SIZE_T_ONE) 2042 #define MAX_SMALL_REQUEST (MAX_SMALL_SIZE - CHUNK_ALIGN_MASK - CHUNK_OVERHEAD) 2091 #define is_global(M) ((M) == &_gm_) 2092 #define is_initialized(M) ((M)->top != 0) 2098 #define use_lock(M) ((M)->mflags & USE_LOCK_BIT) 2099 #define enable_lock(M) ((M)->mflags |= USE_LOCK_BIT) 2100 #define disable_lock(M) ((M)->mflags &= ~USE_LOCK_BIT) 2102 #define use_mmap(M) ((M)->mflags & USE_MMAP_BIT) 2103 #define enable_mmap(M) ((M)->mflags |= USE_MMAP_BIT) 2104 #define disable_mmap(M) ((M)->mflags &= ~USE_MMAP_BIT) 2106 #define use_noncontiguous(M) ((M)->mflags & USE_NONCONTIGUOUS_BIT) 2107 #define disable_contiguous(M) ((M)->mflags |= USE_NONCONTIGUOUS_BIT) 2109 #define set_lock(M,L)\ 2110 ((M)->mflags = (L)?\ 2111 ((M)->mflags | USE_LOCK_BIT) :\ 2112 ((M)->mflags & ~USE_LOCK_BIT)) 2115 #define page_align(S)\ 2116 (((S) + (mparams.page_size)) & ~(mparams.page_size - SIZE_T_ONE)) 2119 #define granularity_align(S)\ 2120 (((S) + (mparams.granularity)) & ~(mparams.granularity - SIZE_T_ONE)) 2122 #define is_page_aligned(S)\ 2123 (((size_t)(S) & (mparams.page_size - SIZE_T_ONE)) == 0) 2124 #define is_granularity_aligned(S)\ 2125 (((size_t)(S) & (mparams.granularity - SIZE_T_ONE)) == 0) 2128 #define segment_holds(S, A)\ 2129 ((char*)(A) >= S->base && (char*)(A) < S->base + S->size) 2135 msegmentptr sp = &m->seg;
2139 if ((sp = sp->next) == 0)
2148 msegmentptr sp = &m->seg;
2150 if ((
char *) sp >= ss->base && (
char *) sp < ss->
base + ss->size)
2152 if ((sp = sp->next) == 0)
2157 #ifndef MORECORE_CANNOT_TRIM 2158 #define should_trim(M,s) ((s) > (M)->trim_check) 2160 #define should_trim(M,s) (0) 2168 #define TOP_FOOT_SIZE\ 2169 (align_offset(chunk2mem(0))+pad_request(sizeof(struct malloc_segment))+MIN_CHUNK_SIZE) 2183 #define GLOBALLY_INITIALIZE() (mparams.page_size == 0 && init_mparams()) 2185 #define PREACTION(M) ((GLOBALLY_INITIALIZE() || use_lock(M))? ACQUIRE_LOCK(&(M)->mutex) : 0) 2186 #define POSTACTION(M) { if (use_lock(M)) RELEASE_LOCK(&(M)->mutex); } 2190 #define PREACTION(M) (0) 2194 #define POSTACTION(M) 2207 #if PROCEED_ON_ERROR 2210 int malloc_corruption_error_count;
2213 static void reset_on_error(mstate
m);
2215 #define CORRUPTION_ERROR_ACTION(m) reset_on_error(m) 2216 #define USAGE_ERROR_ACTION(m, p) 2220 #ifndef CORRUPTION_ERROR_ACTION 2221 #define CORRUPTION_ERROR_ACTION(m) ABORT 2224 #ifndef USAGE_ERROR_ACTION 2225 #define USAGE_ERROR_ACTION(m,p) ABORT 2234 #define check_free_chunk(M,P) 2235 #define check_inuse_chunk(M,P) 2236 #define check_malloced_chunk(M,P,N) 2237 #define check_mmapped_chunk(M,P) 2238 #define check_malloc_state(M) 2239 #define check_top_chunk(M,P) 2242 #define check_free_chunk(M,P) do_check_free_chunk(M,P) 2243 #define check_inuse_chunk(M,P) do_check_inuse_chunk(M,P) 2244 #define check_top_chunk(M,P) do_check_top_chunk(M,P) 2245 #define check_malloced_chunk(M,P,N) do_check_malloced_chunk(M,P,N) 2246 #define check_mmapped_chunk(M,P) do_check_mmapped_chunk(M,P) 2247 #define check_malloc_state(M) do_check_malloc_state(M) 2249 static void do_check_any_chunk(mstate
m, mchunkptr
p);
2250 static void do_check_top_chunk(mstate
m, mchunkptr
p);
2251 static void do_check_mmapped_chunk(mstate
m, mchunkptr
p);
2252 static void do_check_inuse_chunk(mstate
m, mchunkptr
p);
2253 static void do_check_free_chunk(mstate
m, mchunkptr
p);
2254 static void do_check_malloced_chunk(mstate
m,
void *mem,
size_t s);
2255 static void do_check_tree(mstate
m, tchunkptr
t);
2256 static void do_check_treebin(mstate
m,
bindex_t i);
2257 static void do_check_smallbin(mstate
m,
bindex_t i);
2258 static void do_check_malloc_state(mstate
m);
2259 static int bin_find(mstate
m, mchunkptr
x);
2260 static size_t traverse_and_check(mstate
m);
2265 #define is_small(s) (((s) >> SMALLBIN_SHIFT) < NSMALLBINS) 2266 #define small_index(s) ((s) >> SMALLBIN_SHIFT) 2267 #define small_index2size(i) ((i) << SMALLBIN_SHIFT) 2268 #define MIN_SMALL_INDEX (small_index(MIN_CHUNK_SIZE)) 2271 #define smallbin_at(M, i) ((sbinptr)((char*)&((M)->smallbins[(i)<<1]))) 2272 #define treebin_at(M,i) (&((M)->treebins[i])) 2275 #if defined(__GNUC__) && defined(i386) 2276 #define compute_tree_index(S, I)\ 2278 size_t X = S >> TREEBIN_SHIFT;\ 2281 else if (X > 0xFFFF)\ 2285 __asm__("bsrl %1,%0\n\t" : "=r" (K) : "rm" (X));\ 2286 I = (bindex_t)((K << 1) + ((S >> (K + (TREEBIN_SHIFT-1)) & 1)));\ 2290 #define compute_tree_index(S, I)\ 2292 size_t X = S >> TREEBIN_SHIFT;\ 2295 else if (X > 0xFFFF)\ 2298 unsigned int Y = (unsigned int)X;\ 2299 unsigned int N = ((Y - 0x100) >> 16) & 8;\ 2300 unsigned int K = (((Y <<= N) - 0x1000) >> 16) & 4;\ 2302 N += K = (((Y <<= K) - 0x4000) >> 16) & 2;\ 2303 K = 14 - N + ((Y <<= K) >> 15);\ 2304 I = (K << 1) + ((S >> (K + (TREEBIN_SHIFT-1)) & 1));\ 2310 #define bit_for_tree_index(i) \ 2311 (i == NTREEBINS-1)? (SIZE_T_BITSIZE-1) : (((i) >> 1) + TREEBIN_SHIFT - 2) 2314 #define leftshift_for_tree_index(i) \ 2315 ((i == NTREEBINS-1)? 0 : \ 2316 ((SIZE_T_BITSIZE-SIZE_T_ONE) - (((i) >> 1) + TREEBIN_SHIFT - 2))) 2319 #define minsize_for_tree_index(i) \ 2320 ((SIZE_T_ONE << (((i) >> 1) + TREEBIN_SHIFT)) | \ 2321 (((size_t)((i) & SIZE_T_ONE)) << (((i) >> 1) + TREEBIN_SHIFT - 1))) 2327 #define idx2bit(i) ((binmap_t)(1) << (i)) 2330 #define mark_smallmap(M,i) ((M)->smallmap |= idx2bit(i)) 2331 #define clear_smallmap(M,i) ((M)->smallmap &= ~idx2bit(i)) 2332 #define smallmap_is_marked(M,i) ((M)->smallmap & idx2bit(i)) 2334 #define mark_treemap(M,i) ((M)->treemap |= idx2bit(i)) 2335 #define clear_treemap(M,i) ((M)->treemap &= ~idx2bit(i)) 2336 #define treemap_is_marked(M,i) ((M)->treemap & idx2bit(i)) 2340 #if defined(__GNUC__) && defined(i386) 2341 #define compute_bit2idx(X, I)\ 2344 __asm__("bsfl %1,%0\n\t" : "=r" (J) : "rm" (X));\ 2350 #define compute_bit2idx(X, I) I = ffs(X)-1 2353 #define compute_bit2idx(X, I)\ 2355 unsigned int Y = X - 1;\ 2356 unsigned int K = Y >> (16-4) & 16;\ 2357 unsigned int N = K; Y >>= K;\ 2358 N += K = Y >> (8-3) & 8; Y >>= K;\ 2359 N += K = Y >> (4-2) & 4; Y >>= K;\ 2360 N += K = Y >> (2-1) & 2; Y >>= K;\ 2361 N += K = Y >> (1-0) & 1; Y >>= K;\ 2362 I = (bindex_t)(N + Y);\ 2368 #define least_bit(x) ((x) & -(x)) 2371 #define left_bits(x) ((x<<1) | -(x<<1)) 2374 #define same_or_left_bits(x) ((x) | -(x)) 2407 #define ok_address(M, a) ((char*)(a) >= (M)->least_addr) 2409 #define ok_next(p, n) ((char*)(p) < (char*)(n)) 2411 #define ok_cinuse(p) cinuse(p) 2413 #define ok_pinuse(p) pinuse(p) 2416 #define ok_address(M, a) (1) 2417 #define ok_next(b, n) (1) 2418 #define ok_cinuse(p) (1) 2419 #define ok_pinuse(p) (1) 2422 #if (FOOTERS && !INSECURE) 2424 #define ok_magic(M) ((M)->magic == mparams.magic) 2426 #define ok_magic(M) (1) 2432 #if defined(__GNUC__) && __GNUC__ >= 3 2433 #define RTCHECK(e) __builtin_expect(e, 1) 2435 #define RTCHECK(e) (e) 2438 #define RTCHECK(e) (1) 2445 #define mark_inuse_foot(M,p,s) 2448 #define set_inuse(M,p,s)\ 2449 ((p)->head = (((p)->head & PINUSE_BIT)|s|CINUSE_BIT),\ 2450 ((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT) 2453 #define set_inuse_and_pinuse(M,p,s)\ 2454 ((p)->head = (s|PINUSE_BIT|CINUSE_BIT),\ 2455 ((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT) 2458 #define set_size_and_pinuse_of_inuse_chunk(M, p, s)\ 2459 ((p)->head = (s|PINUSE_BIT|CINUSE_BIT)) 2464 #define mark_inuse_foot(M,p,s)\ 2465 (((mchunkptr)((char*)(p) + (s)))->prev_foot = ((size_t)(M) ^ mparams.magic)) 2467 #define get_mstate_for(p)\ 2468 ((mstate)(((mchunkptr)((char*)(p) +\ 2469 (chunksize(p))))->prev_foot ^ mparams.magic)) 2471 #define set_inuse(M,p,s)\ 2472 ((p)->head = (((p)->head & PINUSE_BIT)|s|CINUSE_BIT),\ 2473 (((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT), \ 2474 mark_inuse_foot(M,p,s)) 2476 #define set_inuse_and_pinuse(M,p,s)\ 2477 ((p)->head = (s|PINUSE_BIT|CINUSE_BIT),\ 2478 (((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT),\ 2479 mark_inuse_foot(M,p,s)) 2481 #define set_size_and_pinuse_of_inuse_chunk(M, p, s)\ 2482 ((p)->head = (s|PINUSE_BIT|CINUSE_BIT),\ 2483 mark_inuse_foot(M, p, s)) 2498 #if MORECORE_CONTIGUOUS 2505 #if (FOOTERS && !INSECURE) 2511 if ((fd = open(
"/dev/urandom", O_RDONLY)) >= 0 &&
2512 read(fd,
buf,
sizeof(
buf)) ==
sizeof(
buf)) {
2513 s = *((
size_t *)
buf);
2524 s = (
size_t) 0x58585858U;
2541 SYSTEM_INFO system_info;
2542 GetSystemInfo(&system_info);
2554 if ((
sizeof(
size_t) !=
sizeof(
char *)) ||
2556 (
sizeof(
int) < 4) ||
2573 switch (param_number) {
2596 do_check_any_chunk(mstate
m, mchunkptr
p)
2604 do_check_top_chunk(mstate
m, mchunkptr
p)
2620 do_check_mmapped_chunk(mstate
m, mchunkptr
p)
2636 do_check_inuse_chunk(mstate
m, mchunkptr
p)
2638 do_check_any_chunk(
m, p);
2644 do_check_mmapped_chunk(
m, p);
2649 do_check_free_chunk(mstate
m, mchunkptr
p)
2653 do_check_any_chunk(
m, p);
2657 if (p !=
m->dv && p !=
m->top) {
2661 assert(next->prev_foot == sz);
2673 do_check_malloced_chunk(mstate
m,
void *mem,
size_t s)
2678 do_check_inuse_chunk(
m, p);
2689 do_check_tree(mstate
m, tchunkptr
t)
2704 do_check_any_chunk(
m, ((mchunkptr) u));
2705 assert(u->index == tindex);
2711 if (u->parent == 0) {
2712 assert(u->child[0] == 0);
2713 assert(u->child[1] == 0);
2718 assert(u->parent->child[0] == u ||
2719 u->parent->child[1] == u ||
2720 *((tbinptr *) (u->parent)) == u);
2721 if (u->child[0] != 0) {
2722 assert(u->child[0]->parent == u);
2723 assert(u->child[0] != u);
2724 do_check_tree(
m, u->child[0]);
2726 if (u->child[1] != 0) {
2727 assert(u->child[1]->parent == u);
2728 assert(u->child[1] != u);
2729 do_check_tree(
m, u->child[1]);
2731 if (u->child[0] != 0 && u->child[1] != 0) {
2746 int empty = (
m->treemap & (1U <<
i)) == 0;
2750 do_check_tree(
m, t);
2758 mchunkptr
p = b->
bk;
2759 unsigned int empty = (
m->smallmap & (1U <<
i)) == 0;
2763 for (; p !=
b; p = p->bk) {
2767 do_check_free_chunk(
m, p);
2774 do_check_inuse_chunk(
m, q);
2781 bin_find(mstate
m, mchunkptr
x)
2792 }
while ((p = p->fd) != b);
2800 while (t != 0 &&
chunksize(t) != size) {
2807 if (u == (tchunkptr)
x)
2809 }
while ((u = u->fd) !=
t);
2818 traverse_and_check(mstate
m)
2822 msegmentptr
s = &m->seg;
2826 mchunkptr lastq = 0;
2833 do_check_inuse_chunk(m, q);
2835 assert(q == m->dv || bin_find(m, q));
2837 do_check_free_chunk(m, q);
2850 do_check_malloc_state(mstate
m)
2856 do_check_smallbin(m, i);
2858 do_check_treebin(m, i);
2860 if (m->dvsize != 0) {
2861 do_check_any_chunk(m, m->dv);
2864 assert(bin_find(m, m->dv) == 0);
2868 do_check_top_chunk(m, m->top);
2871 assert(bin_find(m, m->top) == 0);
2874 total = traverse_and_check(m);
2876 assert(m->footprint <= m->max_footprint);
2886 struct mallinfo nm = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
2893 msegmentptr
s = &
m->seg;
2911 nm.
hblkhd =
m->footprint - sum;
2928 #ifndef LACKS_STDIO_H 2935 msegmentptr
s = &m->seg;
2936 #ifndef LACKS_STDIO_H 2937 maxfp = m->max_footprint;
2953 #ifndef LACKS_STDIO_H 2954 fprintf(stderr,
"max system bytes = %10lu\n",
2955 (
unsigned long) (maxfp));
2956 fprintf(stderr,
"system bytes = %10lu\n", (
unsigned long) (fp));
2957 fprintf(stderr,
"in use bytes = %10lu\n", (
unsigned long) (used));
2974 #define insert_small_chunk(M, P, S) {\ 2975 bindex_t I = small_index(S);\ 2976 mchunkptr B = smallbin_at(M, I);\ 2978 assert(S >= MIN_CHUNK_SIZE);\ 2979 if (!smallmap_is_marked(M, I))\ 2980 mark_smallmap(M, I);\ 2981 else if (RTCHECK(ok_address(M, B->fd)))\ 2984 CORRUPTION_ERROR_ACTION(M);\ 2993 #define unlink_small_chunk(M, P, S) {\ 2994 mchunkptr F = P->fd;\ 2995 mchunkptr B = P->bk;\ 2996 bindex_t I = small_index(S);\ 2999 assert(chunksize(P) == small_index2size(I));\ 3001 clear_smallmap(M, I);\ 3002 else if (RTCHECK((F == smallbin_at(M,I) || ok_address(M, F)) &&\ 3003 (B == smallbin_at(M,I) || ok_address(M, B)))) {\ 3008 CORRUPTION_ERROR_ACTION(M);\ 3013 #define unlink_first_small_chunk(M, B, P, I) {\ 3014 mchunkptr F = P->fd;\ 3017 assert(chunksize(P) == small_index2size(I));\ 3019 clear_smallmap(M, I);\ 3020 else if (RTCHECK(ok_address(M, F))) {\ 3025 CORRUPTION_ERROR_ACTION(M);\ 3031 #define replace_dv(M, P, S) {\ 3032 size_t DVS = M->dvsize;\ 3034 mchunkptr DV = M->dv;\ 3035 assert(is_small(DVS));\ 3036 insert_small_chunk(M, DV, DVS);\ 3045 #define insert_large_chunk(M, X, S) {\ 3048 compute_tree_index(S, I);\ 3049 H = treebin_at(M, I);\ 3051 X->child[0] = X->child[1] = 0;\ 3052 if (!treemap_is_marked(M, I)) {\ 3053 mark_treemap(M, I);\ 3055 X->parent = (tchunkptr)H;\ 3060 size_t K = S << leftshift_for_tree_index(I);\ 3062 if (chunksize(T) != S) {\ 3063 tchunkptr* C = &(T->child[(K >> (SIZE_T_BITSIZE-SIZE_T_ONE)) & 1]);\ 3067 else if (RTCHECK(ok_address(M, C))) {\ 3074 CORRUPTION_ERROR_ACTION(M);\ 3079 tchunkptr F = T->fd;\ 3080 if (RTCHECK(ok_address(M, T) && ok_address(M, F))) {\ 3088 CORRUPTION_ERROR_ACTION(M);\ 3113 #define unlink_large_chunk(M, X) {\ 3114 tchunkptr XP = X->parent;\ 3117 tchunkptr F = X->fd;\ 3119 if (RTCHECK(ok_address(M, F))) {\ 3124 CORRUPTION_ERROR_ACTION(M);\ 3129 if (((R = *(RP = &(X->child[1]))) != 0) ||\ 3130 ((R = *(RP = &(X->child[0]))) != 0)) {\ 3132 while ((*(CP = &(R->child[1])) != 0) ||\ 3133 (*(CP = &(R->child[0])) != 0)) {\ 3136 if (RTCHECK(ok_address(M, RP)))\ 3139 CORRUPTION_ERROR_ACTION(M);\ 3144 tbinptr* H = treebin_at(M, X->index);\ 3146 if ((*H = R) == 0) \ 3147 clear_treemap(M, X->index);\ 3149 else if (RTCHECK(ok_address(M, XP))) {\ 3150 if (XP->child[0] == X) \ 3156 CORRUPTION_ERROR_ACTION(M);\ 3158 if (RTCHECK(ok_address(M, R))) {\ 3161 if ((C0 = X->child[0]) != 0) {\ 3162 if (RTCHECK(ok_address(M, C0))) {\ 3167 CORRUPTION_ERROR_ACTION(M);\ 3169 if ((C1 = X->child[1]) != 0) {\ 3170 if (RTCHECK(ok_address(M, C1))) {\ 3175 CORRUPTION_ERROR_ACTION(M);\ 3179 CORRUPTION_ERROR_ACTION(M);\ 3186 #define insert_chunk(M, P, S)\ 3187 if (is_small(S)) insert_small_chunk(M, P, S)\ 3188 else { tchunkptr TP = (tchunkptr)(P); insert_large_chunk(M, TP, S); } 3190 #define unlink_chunk(M, P, S)\ 3191 if (is_small(S)) unlink_small_chunk(M, P, S)\ 3192 else { tchunkptr TP = (tchunkptr)(P); unlink_large_chunk(M, TP); } 3198 #define internal_malloc(m, b) mspace_malloc(m, b) 3199 #define internal_free(m, mem) mspace_free(m,mem); 3202 #define internal_malloc(m, b)\ 3203 (m == gm)? dlmalloc(b) : mspace_malloc(m, b) 3204 #define internal_free(m, mem)\ 3205 if (m == gm) dlfree(mem); else mspace_free(m,mem); 3207 #define internal_malloc(m, b) dlmalloc(b) 3208 #define internal_free(m, mem) dlfree(mem) 3235 mchunkptr
p = (mchunkptr) (mm + offset);
3242 if (mm < m->least_addr)
3244 if ((m->footprint += mmsize) > m->max_footprint)
3245 m->max_footprint = m->footprint;
3271 oldmmsize, newmmsize, 1);
3273 mchunkptr newp = (mchunkptr) (cp + offset);
3280 if (cp < m->least_addr)
3282 if ((m->footprint += newmmsize - oldmmsize) > m->max_footprint)
3283 m->max_footprint = m->footprint;
3299 p = (mchunkptr) ((
char *) p +
offset);
3318 bin->fd = bin->bk = bin;
3322 #if PROCEED_ON_ERROR 3326 reset_on_error(mstate
m)
3329 ++malloc_corruption_error_count;
3331 m->smallbins = m->treebins = 0;
3332 m->dvsize = m->topsize = 0;
3349 size_t psize = (
char *) oldfirst - (
char *)
p;
3351 size_t qsize = psize - nb;
3354 assert((
char *) oldfirst > (
char *) q);
3359 if (oldfirst == m->top) {
3360 size_t tsize = m->topsize += qsize;
3364 }
else if (oldfirst == m->dv) {
3365 size_t dsize = m->dvsize += qsize;
3390 char *old_top = (
char *) m->top;
3392 char *old_end = oldsp->base + oldsp->size;
3396 char *asp = rawsp +
offset;
3398 mchunkptr sp = (mchunkptr) csp;
3399 msegmentptr ss = (msegmentptr) (
chunk2mem(sp));
3401 mchunkptr
p = tnext;
3411 m->seg.base = tbase;
3412 m->seg.size = tsize;
3413 m->seg.sflags = mmapped;
3421 if ((
char *) (&(nextp->head)) < old_end)
3429 if (csp != old_top) {
3430 mchunkptr
q = (mchunkptr) old_top;
3431 size_t psize = csp - old_top;
3567 size_t ssize = end - br;
3578 if ((m->footprint += tsize) > m->max_footprint)
3579 m->max_footprint = m->footprint;
3582 m->seg.base = m->least_addr = tbase;
3583 m->seg.size = tsize;
3584 m->seg.sflags = mmap_flag;
3593 (
size_t) ((tbase + tsize) - (
char *) mn) -
3600 msegmentptr sp = &m->seg;
3601 while (sp != 0 && tbase != sp->base + sp->size)
3605 init_top(m, m->top, m->topsize + tsize);
3607 if (tbase < m->least_addr)
3608 m->least_addr = tbase;
3610 while (sp != 0 && sp->base != tbase + tsize)
3615 char *oldbase = sp->base;
3624 if (nb < m->topsize) {
3625 size_t rsize = m->topsize -= nb;
3626 mchunkptr
p = m->top;
3646 size_t released = 0;
3647 msegmentptr pred = &m->seg;
3648 msegmentptr sp = pred->
next;
3650 char *
base = sp->base;
3651 size_t size = sp->size;
3652 msegmentptr next = sp->
next;
3659 tchunkptr tp = (tchunkptr) p;
3669 m->footprint -=
size;
3687 size_t released = 0;
3691 if (m->topsize > pad) {
3694 size_t extra = ((m->topsize - pad + (unit -
SIZE_T_ONE)) / unit -
3701 size_t newsize = sp->size - extra;
3703 if ((
CALL_MREMAP(sp->base, sp->size, newsize, 0) !=
3705 || (
CALL_MUNMAP(sp->base + newsize, extra) == 0)) {
3716 if (old_br == sp->base + sp->size) {
3719 if (rel_br !=
CMFAIL && new_br < old_br)
3720 released = old_br - new_br;
3727 if (released != 0) {
3728 sp->size -= released;
3729 m->footprint -= released;
3730 init_top(m, m->top, m->topsize - released);
3744 return (released != 0) ? 1 : 0;
3768 if ((rsize = trem) == 0)
3773 if (rt != 0 && rt != t)
3783 if (t == 0 && v == 0) {
3785 if (leftbits != 0) {
3803 if (v != 0 && rsize < (
size_t) (m->dvsize - nb)) {
3888 else if (oldsize >= nb) {
3889 size_t rsize = oldsize - nb;
3897 }
else if (next == m->top && oldsize + m->topsize > nb) {
3899 size_t newsize = oldsize + m->topsize;
3900 size_t newtopsize = newsize - nb;
3905 m->topsize = newtopsize;
3926 memcpy(newmem, oldmem, (oc < bytes) ? oc : bytes);
3944 if ((alignment & (alignment -
SIZE_T_ONE)) != 0) {
3946 while (a < alignment)
3966 if ((((
size_t) (mem)) % alignment) != 0) {
3980 ((
size_t) (br - (
char *) (
p)) >=
3982 mchunkptr newp = (mchunkptr) pos;
3983 size_t leadsize = pos - (
char *) (p);
3984 size_t newsize =
chunksize(p) - leadsize;
3987 newp->prev_foot = p->prev_foot + leadsize;
4001 size_t remainder_size = size - nb;
4004 set_inuse(m, remainder, remainder_size);
4028 ialloc(mstate
m,
size_t n_elements,
size_t *
sizes,
int opts,
void *chunks[])
4039 size_t element_size;
4040 size_t contents_size;
4044 size_t remainder_size;
4046 mchunkptr array_chunk;
4053 if (n_elements == 0)
4059 if (n_elements == 0)
4062 array_size =
request2size(n_elements * (
sizeof(
void *)));
4068 contents_size = n_elements * element_size;
4072 for (i = 0; i != n_elements; ++
i)
4076 size = contents_size + array_size;
4104 size_t array_chunk_size;
4106 array_chunk_size = remainder_size - contents_size;
4107 marray = (
void **) (
chunk2mem(array_chunk));
4109 remainder_size = contents_size;
4115 if (i != n_elements - 1) {
4116 if (element_size != 0)
4117 size = element_size;
4120 remainder_size -=
size;
4130 if (marray != chunks) {
4132 if (element_size != 0) {
4133 assert(remainder_size == element_size);
4139 for (i = 0; i != n_elements; ++
i)
4187 smallbits =
gm->smallmap >>
idx;
4189 if ((smallbits & 0x3U) != 0) {
4191 idx += ~smallbits & 1;
4202 else if (nb >
gm->dvsize) {
4203 if (smallbits != 0) {
4230 else if (
gm->treemap != 0
4246 if (nb <= gm->dvsize) {
4247 size_t rsize =
gm->dvsize - nb;
4248 mchunkptr
p =
gm->dv;
4255 size_t dvs =
gm->dvsize;
4265 else if (nb < gm->topsize) {
4266 size_t rsize =
gm->topsize -= nb;
4267 mchunkptr
p =
gm->top;
4299 mstate
fm = get_mstate_for(p);
4313 size_t prevsize = p->prev_foot;
4315 prevsize &= ~IS_MMAPPED_BIT;
4317 if (
CALL_MUNMAP((
char *) p - prevsize, psize) == 0)
4318 fm->footprint -= psize;
4340 if (next == fm->top) {
4341 size_t tsize = fm->topsize += psize;
4351 }
else if (next == fm->dv) {
4352 size_t dsize = fm->dvsize += psize;
4389 if (n_elements != 0) {
4391 if (((n_elements | elem_size) & ~(
size_t) 0xffff) &&
4392 (req / n_elements != elem_size))
4406 #ifdef REALLOC_ZERO_BYTES_FREES 4416 mstate m = get_mstate_for(
mem2chunk(oldmem));
4436 return ialloc(
gm, n_elements, &sz, 3, chunks);
4442 return ialloc(
gm, n_elements, sizes, 0, chunks);
4478 return gm->footprint;
4484 return gm->max_footprint;
4525 init_user_mstate(
char *tbase,
size_t tsize)
4534 m->seg.base = m->least_addr = tbase;
4535 m->seg.size = m->footprint = m->max_footprint = tsize;
4547 create_mspace(
size_t capacity,
int locked)
4557 char *tbase = (
char *) (
CALL_MMAP(tsize));
4559 m = init_user_mstate(tbase, tsize);
4568 create_mspace_with_base(
void *
base,
size_t capacity,
int locked)
4576 m = init_user_mstate((
char *)
base, capacity);
4584 destroy_mspace(mspace msp)
4587 mstate ms = (mstate) msp;
4589 msegmentptr sp = &ms->seg;
4591 char *
base = sp->base;
4592 size_t size = sp->size;
4593 flag_t flag = sp->sflags;
4612 mspace_malloc(mspace msp,
size_t bytes)
4614 mstate ms = (mstate) msp;
4627 smallbits = ms->smallmap >>
idx;
4629 if ((smallbits & 0x3U) != 0) {
4631 idx += ~smallbits & 1;
4642 else if (nb > ms->dvsize) {
4643 if (smallbits != 0) {
4670 else if (ms->treemap != 0
4680 if (ms->treemap != 0 && (mem =
tmalloc_large(ms, nb)) != 0) {
4686 if (nb <= ms->dvsize) {
4687 size_t rsize = ms->dvsize - nb;
4688 mchunkptr
p = ms->dv;
4695 size_t dvs = ms->dvsize;
4705 else if (nb < ms->topsize) {
4706 size_t rsize = ms->topsize -= nb;
4707 mchunkptr
p = ms->top;
4728 mspace_free(mspace msp,
void *mem)
4733 mstate
fm = get_mstate_for(p);
4735 mstate fm = (mstate) msp;
4747 size_t prevsize = p->prev_foot;
4749 prevsize &= ~IS_MMAPPED_BIT;
4751 if (
CALL_MUNMAP((
char *) p - prevsize, psize) == 0)
4752 fm->footprint -= psize;
4774 if (next == fm->top) {
4775 size_t tsize = fm->topsize += psize;
4785 }
else if (next == fm->dv) {
4786 size_t dsize = fm->dvsize += psize;
4816 mspace_calloc(mspace msp,
size_t n_elements,
size_t elem_size)
4820 mstate ms = (mstate) msp;
4825 if (n_elements != 0) {
4827 if (((n_elements | elem_size) & ~(
size_t) 0xffff) &&
4828 (req / n_elements != elem_size))
4838 mspace_realloc(mspace msp,
void *oldmem,
size_t bytes)
4841 return mspace_malloc(msp, bytes);
4842 #ifdef REALLOC_ZERO_BYTES_FREES 4844 mspace_free(msp, oldmem);
4851 mstate ms = get_mstate_for(p);
4853 mstate ms = (mstate) msp;
4864 mspace_memalign(mspace msp,
size_t alignment,
size_t bytes)
4866 mstate ms = (mstate) msp;
4875 mspace_independent_calloc(mspace msp,
size_t n_elements,
4879 mstate ms = (mstate) msp;
4884 return ialloc(ms, n_elements, &sz, 3, chunks);
4888 mspace_independent_comalloc(mspace msp,
size_t n_elements,
4889 size_t sizes[],
void *chunks[])
4891 mstate ms = (mstate) msp;
4900 mspace_trim(mspace msp,
size_t pad)
4903 mstate ms = (mstate) msp;
4916 mspace_malloc_stats(mspace msp)
4918 mstate ms = (mstate) msp;
4927 mspace_footprint(mspace msp)
4930 mstate ms = (mstate) msp;
4940 mspace_max_footprint(mspace msp)
4943 mstate ms = (mstate) msp;
4954 mspace_mallinfo(mspace msp)
4956 mstate ms = (mstate) msp;
4965 mspace_mallopt(
int param_number,
int value)
5248 #define real_malloc malloc 5249 #define real_calloc calloc 5250 #define real_realloc realloc 5251 #define real_free free 5253 #define real_malloc dlmalloc 5254 #define real_calloc dlcalloc 5255 #define real_realloc dlrealloc 5256 #define real_free dlfree 5277 *malloc_func =
s_mem.malloc_func;
5280 *calloc_func =
s_mem.calloc_func;
5283 *realloc_func =
s_mem.realloc_func;
5286 *free_func =
s_mem.free_func;
5301 if (!realloc_func) {
5328 mem =
s_mem.malloc_func(size);
5339 if (!nmemb || !size) {
5344 mem =
s_mem.calloc_func(nmemb, size);
5355 if (!ptr && !size) {
5359 mem =
s_mem.realloc_func(ptr, size);
5372 s_mem.free_func(ptr);
#define USAGE_ERROR_ACTION(m, p)
#define DEFAULT_MMAP_THRESHOLD
#define unlink_large_chunk(M, X)
#define request2size(req)
GLdouble GLdouble GLdouble r
static void * sys_alloc(mstate m, size_t nb)
#define minsize_for_tree_index(i)
#define ACQUIRE_MAGIC_INIT_LOCK()
GLuint GLfloat GLfloat GLfloat x1
static int has_segment_link(mstate m, msegmentptr ss)
MALLINFO_FIELD_TYPE arena
static void * internal_realloc(mstate m, void *oldmem, size_t bytes)
#define compute_tree_index(S, I)
MALLINFO_FIELD_TYPE hblks
EGLSurface EGLnsecsANDROID time
#define insert_chunk(M, P, S)
GLint GLint GLint GLint GLint x
static int win32_acquire_lock(MLOCK_T *sl)
static int change_mparam(int param_number, int value)
#define mark_inuse_foot(M, p, s)
#define DEFAULT_TRIM_THRESHOLD
A type representing an atomic integer value. It is a struct so people don't accidentally use numeric ...
#define MALLOC_FAILURE_ACTION
GLdouble GLdouble GLdouble GLdouble q
void * dlcalloc(size_t, size_t)
static void * tmalloc_large(mstate m, size_t nb)
#define smallbin_at(M, i)
void ** dlindependent_calloc(size_t, size_t, void **)
set set set set set set set set set set set set set set set set set set set set *set set set macro pixldst op &r &cond WK op &r &cond WK op &r &cond WK else op &m &cond &ia op &r &cond WK else op &m &cond &ia elseif elseif else error unsupported base if elseif elseif else error unsupported unaligned pixldst unaligned endm macro pixst base base else pixldst base endif endm macro PF ptr
#define replace_dv(M, P, S)
#define disable_contiguous(M)
void * SDL_realloc(void *ptr, size_t size)
#define is_initialized(M)
#define set_free_with_pinuse(p, s, n)
void dlmalloc_stats(void)
void *(* SDL_calloc_func)(size_t nmemb, size_t size)
MALLINFO_FIELD_TYPE ordblks
void * dlrealloc(void *, size_t)
void(* SDL_free_func)(void *mem)
#define compute_bit2idx(X, I)
#define SDL_InvalidParamError(param)
#define chunk_plus_offset(p, s)
#define CALL_MUNMAP(a, s)
#define leftmost_child(t)
#define FOUR_SIZE_T_SIZES
#define CORRUPTION_ERROR_ACTION(m)
int SDL_SetMemoryFunctions(SDL_malloc_func malloc_func, SDL_calloc_func calloc_func, SDL_realloc_func realloc_func, SDL_free_func free_func)
Replace SDL's memory allocation functions with a custom set.
static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb)
#define USE_NONCONTIGUOUS_BIT
size_t dlmalloc_usable_size(void *)
#define small_index2size(i)
SDL_realloc_func realloc_func
SDL_atomic_t num_allocations
#define ACQUIRE_MORECORE_LOCK()
set set set set set set set set set set set set set set set set set set set set *set set set macro pixldst base
void * SDL_malloc(size_t size)
#define use_noncontiguous(M)
int dlmalloc_trim(size_t)
static int sys_trim(mstate m, size_t pad)
#define unlink_first_small_chunk(M, B, P, I)
#define check_inuse_chunk(M, P)
static void ** ialloc(mstate m, size_t n_elements, size_t *sizes, int opts, void *chunks[])
static void init_bins(mstate m)
#define treemap_is_marked(M, i)
void *(* SDL_malloc_func)(size_t size)
static struct malloc_params mparams
#define MORECORE_CONTIGUOUS
#define set_inuse(M, p, s)
static void * tmalloc_small(mstate m, size_t nb)
void *(* SDL_realloc_func)(void *mem, size_t size)
static struct mallinfo internal_mallinfo(mstate m)
GLuint GLsizei const GLuint const GLintptr const GLsizeiptr * sizes
static MLOCK_T magic_init_mutex
MALLINFO_FIELD_TYPE fordblks
set set set set set set set macro pixldst1 elem_size
GLsizei const GLfloat * value
#define align_as_chunk(A)
SDL_malloc_func malloc_func
void * dlmemalign(size_t, size_t)
#define unlink_chunk(M, P, S)
size_t dlmalloc_max_footprint(void)
void * SDL_calloc(size_t nmemb, size_t size)
#define MAX_SMALL_REQUEST
#define leftshift_for_tree_index(i)
#define DEFAULT_GRANULARITY
struct malloc_segment * next
GLenum GLuint GLenum GLsizei const GLchar * buf
#define chunk_minus_offset(p, s)
static void internal_malloc_stats(mstate m)
#define SDL_AtomicIncRef(a)
Increment an atomic variable used as a reference count.
#define should_trim(M, s)
struct malloc_tree_chunk * bk
struct malloc_tree_chunk * parent
return Display return Display Bool Bool int int int return Display XEvent Bool(*) XPointer return Display return Display Drawable _Xconst char unsigned int unsigned int return Display Pixmap Pixmap XColor XColor unsigned int unsigned int return Display _Xconst char char int char return Display Visual unsigned int int int char unsigned int unsigned int in i)
SDL_calloc_func calloc_func
#define RELEASE_MAGIC_INIT_LOCK()
struct mallinfo dlmallinfo(void)
MALLINFO_FIELD_TYPE fsmblks
static void init_top(mstate m, mchunkptr p, size_t psize)
#define check_malloc_state(M)
static void win32_release_lock(MLOCK_T *sl)
MALLINFO_FIELD_TYPE keepcost
#define check_malloced_chunk(M, P, N)
static void * prepend_alloc(mstate m, char *newbase, char *oldbase, size_t nb)
#define insert_large_chunk(M, X, S)
#define segment_holds(S, A)
static int init_mparams(void)
MALLINFO_FIELD_TYPE hblkhd
static size_t release_unused_segments(mstate m)
static void add_segment(mstate m, char *tbase, size_t tsize, flag_t mmapped)
#define RELEASE_MORECORE_LOCK()
struct malloc_tree_chunk * child[2]
SDL_PRINTF_FORMAT_STRING const char int SDL_PRINTF_FORMAT_STRING const char int SDL_PRINTF_FORMAT_STRING const char int SDL_PRINTF_FORMAT_STRING const char const char SDL_SCANF_FORMAT_STRING const char return SDL_ThreadFunction const char void return Uint32 return Uint32 void
int SDL_GetNumAllocations(void)
Get the number of outstanding (unfreed) allocations.
#define SDL_AtomicDecRef(a)
Decrement an atomic variable used as a reference count.
#define CALL_MREMAP(addr, osz, nsz, mv)
#define internal_free(m, mem)
#define check_mmapped_chunk(M, P)
set set set set set set set macro pixldst1 abits if abits op else op endif endm macro pixldst2 abits if abits op else op endif endm macro pixldst4 abits if abits op else op endif endm macro pixldst0 idx
static void * internal_memalign(mstate m, size_t alignment, size_t bytes)
#define smallmap_is_marked(M, i)
#define calloc_must_clear(p)
static msegmentptr segment_holding(mstate m, char *addr)
void ** dlindependent_comalloc(size_t, size_t *, void **)
#define is_mmapped_segment(S)
static void * win32direct_mmap(size_t size)
#define set_size_and_pinuse_of_free_chunk(p, s)
GLboolean GLboolean GLboolean GLboolean a
#define is_extern_segment(S)
static struct malloc_state _gm_
#define internal_malloc(m, b)
#define check_free_chunk(M, P)
#define granularity_align(S)
static void * mmap_alloc(mstate m, size_t nb)
GLboolean GLboolean GLboolean b
size_t dlmalloc_footprint(void)
void SDL_GetMemoryFunctions(SDL_malloc_func *malloc_func, SDL_calloc_func *calloc_func, SDL_realloc_func *realloc_func, SDL_free_func *free_func)
Get the current set of SDL memory functions.
#define MALLINFO_FIELD_TYPE
#define is_page_aligned(S)
#define check_top_chunk(M, P)
#define set_size_and_pinuse_of_inuse_chunk(M, p, s)
MALLINFO_FIELD_TYPE uordblks
#define set_inuse_and_pinuse(M, p, s)
static int win32munmap(void *ptr, size_t size)
struct malloc_tree_chunk * fd
MALLINFO_FIELD_TYPE smblks
MALLINFO_FIELD_TYPE usmblks
static void * win32mmap(size_t size)