Back to index

python3.2  3.2.2
Classes | Defines | Typedefs | Functions | Variables
dlmalloc.c File Reference
#include <sys/types.h>
#include <stdio.h>
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <sys/mman.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/param.h>

Go to the source code of this file.

Classes

struct  mallinfo
struct  malloc_chunk
struct  malloc_tree_chunk
struct  malloc_segment
struct  malloc_state
struct  malloc_params

Defines

#define _GNU_SOURCE   1
#define MAX_SIZE_T   (~(size_t)0)
#define ONLY_MSPACES   0
#define MSPACES   0
#define MALLOC_ALIGNMENT   ((size_t)8U)
#define FOOTERS   0
#define ABORT   abort()
#define ABORT_ON_ASSERT_FAILURE   1
#define PROCEED_ON_ERROR   0
#define USE_LOCKS   0
#define INSECURE   0
#define HAVE_MMAP   1
#define MMAP_CLEARS   1
#define HAVE_MREMAP   0
#define MALLOC_FAILURE_ACTION   errno = ENOMEM;
#define HAVE_MORECORE   1
#define MORECORE   sbrk
#define MORECORE_CONTIGUOUS   1
#define DEFAULT_GRANULARITY   (0) /* 0 means to compute in init_mparams */
#define DEFAULT_TRIM_THRESHOLD   ((size_t)2U * (size_t)1024U * (size_t)1024U)
#define DEFAULT_MMAP_THRESHOLD   ((size_t)256U * (size_t)1024U)
#define USE_BUILTIN_FFS   0
#define USE_DEV_RANDOM   0
#define NO_MALLINFO   0
#define MALLINFO_FIELD_TYPE   size_t
#define M_TRIM_THRESHOLD   (-1)
#define M_GRANULARITY   (-2)
#define M_MMAP_THRESHOLD   (-3)
#define dlcalloc   calloc
#define dlfree   free
#define dlmalloc   malloc
#define dlmemalign   memalign
#define dlrealloc   realloc
#define dlvalloc   valloc
#define dlpvalloc   pvalloc
#define dlmallinfo   mallinfo
#define dlmallopt   mallopt
#define dlmalloc_trim   malloc_trim
#define dlmalloc_stats   malloc_stats
#define dlmalloc_usable_size   malloc_usable_size
#define dlmalloc_footprint   malloc_footprint
#define dlmalloc_max_footprint   malloc_max_footprint
#define dlindependent_calloc   independent_calloc
#define dlindependent_comalloc   independent_comalloc
#define assert(x)
#define malloc_getpagesize   ((size_t)4096U)
#define SIZE_T_SIZE   (sizeof(size_t))
#define SIZE_T_BITSIZE   (sizeof(size_t) << 3)
#define SIZE_T_ZERO   ((size_t)0)
#define SIZE_T_ONE   ((size_t)1)
#define SIZE_T_TWO   ((size_t)2)
#define TWO_SIZE_T_SIZES   (SIZE_T_SIZE<<1)
#define FOUR_SIZE_T_SIZES   (SIZE_T_SIZE<<2)
#define SIX_SIZE_T_SIZES   (FOUR_SIZE_T_SIZES+TWO_SIZE_T_SIZES)
#define HALF_MAX_SIZE_T   (MAX_SIZE_T / 2U)
#define CHUNK_ALIGN_MASK   (MALLOC_ALIGNMENT - SIZE_T_ONE)
#define is_aligned(A)   (((size_t)((A)) & (CHUNK_ALIGN_MASK)) == 0)
#define align_offset(A)
#define MFAIL   ((void*)(MAX_SIZE_T))
#define CMFAIL   ((char*)(MFAIL)) /* defined for convenience */
#define IS_MMAPPED_BIT   (SIZE_T_ONE)
#define USE_MMAP_BIT   (SIZE_T_ONE)
#define CALL_MUNMAP(a, s)   munmap((a), (s))
#define MMAP_PROT   (PROT_READ|PROT_WRITE)
#define MMAP_FLAGS   (MAP_PRIVATE)
#define CALL_MMAP(s)
#define DIRECT_MMAP(s)   CALL_MMAP(s)
#define CALL_MREMAP(addr, osz, nsz, mv)   MFAIL
#define CALL_MORECORE(S)   MORECORE(S)
#define USE_NONCONTIGUOUS_BIT   (4U)
#define EXTERN_BIT   (8U)
#define USE_LOCK_BIT   (0U)
#define INITIAL_LOCK(l)
#define ACQUIRE_MORECORE_LOCK()
#define RELEASE_MORECORE_LOCK()
#define ACQUIRE_MAGIC_INIT_LOCK()
#define RELEASE_MAGIC_INIT_LOCK()
#define MCHUNK_SIZE   (sizeof(mchunk))
#define CHUNK_OVERHEAD   (SIZE_T_SIZE)
#define MMAP_CHUNK_OVERHEAD   (TWO_SIZE_T_SIZES)
#define MMAP_FOOT_PAD   (FOUR_SIZE_T_SIZES)
#define MIN_CHUNK_SIZE   ((MCHUNK_SIZE + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK)
#define chunk2mem(p)   ((void*)((char*)(p) + TWO_SIZE_T_SIZES))
#define mem2chunk(mem)   ((mchunkptr)((char*)(mem) - TWO_SIZE_T_SIZES))
#define align_as_chunk(A)   (mchunkptr)((A) + align_offset(chunk2mem(A)))
#define MAX_REQUEST   ((-MIN_CHUNK_SIZE) << 2)
#define MIN_REQUEST   (MIN_CHUNK_SIZE - CHUNK_OVERHEAD - SIZE_T_ONE)
#define pad_request(req)   (((req) + CHUNK_OVERHEAD + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK)
#define request2size(req)   (((req) < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(req))
#define PINUSE_BIT   (SIZE_T_ONE)
#define CINUSE_BIT   (SIZE_T_TWO)
#define INUSE_BITS   (PINUSE_BIT|CINUSE_BIT)
#define FENCEPOST_HEAD   (INUSE_BITS|SIZE_T_SIZE)
#define cinuse(p)   ((p)->head & CINUSE_BIT)
#define pinuse(p)   ((p)->head & PINUSE_BIT)
#define chunksize(p)   ((p)->head & ~(INUSE_BITS))
#define clear_pinuse(p)   ((p)->head &= ~PINUSE_BIT)
#define clear_cinuse(p)   ((p)->head &= ~CINUSE_BIT)
#define chunk_plus_offset(p, s)   ((mchunkptr)(((char*)(p)) + (s)))
#define chunk_minus_offset(p, s)   ((mchunkptr)(((char*)(p)) - (s)))
#define next_chunk(p)   ((mchunkptr)( ((char*)(p)) + ((p)->head & ~INUSE_BITS)))
#define prev_chunk(p)   ((mchunkptr)( ((char*)(p)) - ((p)->prev_foot) ))
#define next_pinuse(p)   ((next_chunk(p)->head) & PINUSE_BIT)
#define get_foot(p, s)   (((mchunkptr)((char*)(p) + (s)))->prev_foot)
#define set_foot(p, s)   (((mchunkptr)((char*)(p) + (s)))->prev_foot = (s))
#define set_size_and_pinuse_of_free_chunk(p, s)   ((p)->head = (s|PINUSE_BIT), set_foot(p, s))
#define set_free_with_pinuse(p, s, n)   (clear_pinuse(n), set_size_and_pinuse_of_free_chunk(p, s))
#define is_mmapped(p)   (!((p)->head & PINUSE_BIT) && ((p)->prev_foot & IS_MMAPPED_BIT))
#define overhead_for(p)   (is_mmapped(p)? MMAP_CHUNK_OVERHEAD : CHUNK_OVERHEAD)
#define calloc_must_clear(p)   (!is_mmapped(p))
#define leftmost_child(t)   ((t)->child[0] != 0? (t)->child[0] : (t)->child[1])
#define get_segment_flags(S)   ((S)->sflags)
#define set_segment_flags(S, v)   ((S)->sflags = (v))
#define check_segment_merge(S, b, s)   (1)
#define is_mmapped_segment(S)   (get_segment_flags(S) & IS_MMAPPED_BIT)
#define is_extern_segment(S)   (get_segment_flags(S) & EXTERN_BIT)
#define NSMALLBINS   (32U)
#define NTREEBINS   (32U)
#define SMALLBIN_SHIFT   (3U)
#define SMALLBIN_WIDTH   (SIZE_T_ONE << SMALLBIN_SHIFT)
#define TREEBIN_SHIFT   (8U)
#define MIN_LARGE_SIZE   (SIZE_T_ONE << TREEBIN_SHIFT)
#define MAX_SMALL_SIZE   (MIN_LARGE_SIZE - SIZE_T_ONE)
#define MAX_SMALL_REQUEST   (MAX_SMALL_SIZE - CHUNK_ALIGN_MASK - CHUNK_OVERHEAD)
#define gm   (&_gm_)
#define is_global(M)   ((M) == &_gm_)
#define is_initialized(M)   ((M)->top != 0)
#define use_lock(M)   ((M)->mflags & USE_LOCK_BIT)
#define enable_lock(M)   ((M)->mflags |= USE_LOCK_BIT)
#define disable_lock(M)   ((M)->mflags &= ~USE_LOCK_BIT)
#define use_mmap(M)   ((M)->mflags & USE_MMAP_BIT)
#define enable_mmap(M)   ((M)->mflags |= USE_MMAP_BIT)
#define disable_mmap(M)   ((M)->mflags &= ~USE_MMAP_BIT)
#define use_noncontiguous(M)   ((M)->mflags & USE_NONCONTIGUOUS_BIT)
#define disable_contiguous(M)   ((M)->mflags |= USE_NONCONTIGUOUS_BIT)
#define set_lock(M, L)
#define page_align(S)   (((S) + (mparams.page_size)) & ~(mparams.page_size - SIZE_T_ONE))
#define granularity_align(S)   (((S) + (mparams.granularity)) & ~(mparams.granularity - SIZE_T_ONE))
#define is_page_aligned(S)   (((size_t)(S) & (mparams.page_size - SIZE_T_ONE)) == 0)
#define is_granularity_aligned(S)   (((size_t)(S) & (mparams.granularity - SIZE_T_ONE)) == 0)
#define segment_holds(S, A)   ((char*)(A) >= S->base && (char*)(A) < S->base + S->size)
#define should_trim(M, s)   ((s) > (M)->trim_check)
#define TOP_FOOT_SIZE   (align_offset(chunk2mem(0))+pad_request(sizeof(struct malloc_segment))+MIN_CHUNK_SIZE)
#define PREACTION(M)   (0)
#define POSTACTION(M)
#define CORRUPTION_ERROR_ACTION(m)   ABORT
#define USAGE_ERROR_ACTION(m, p)   ABORT
#define check_free_chunk(M, P)
#define check_inuse_chunk(M, P)
#define check_malloced_chunk(M, P, N)
#define check_mmapped_chunk(M, P)
#define check_malloc_state(M)
#define check_top_chunk(M, P)
#define is_small(s)   (((s) >> SMALLBIN_SHIFT) < NSMALLBINS)
#define small_index(s)   ((s) >> SMALLBIN_SHIFT)
#define small_index2size(i)   ((i) << SMALLBIN_SHIFT)
#define MIN_SMALL_INDEX   (small_index(MIN_CHUNK_SIZE))
#define smallbin_at(M, i)   ((sbinptr)((char*)&((M)->smallbins[(i)<<1])))
#define treebin_at(M, i)   (&((M)->treebins[i]))
#define compute_tree_index(S, I)
#define bit_for_tree_index(i)   (i == NTREEBINS-1)? (SIZE_T_BITSIZE-1) : (((i) >> 1) + TREEBIN_SHIFT - 2)
#define leftshift_for_tree_index(i)
#define minsize_for_tree_index(i)
#define idx2bit(i)   ((binmap_t)(1) << (i))
#define mark_smallmap(M, i)   ((M)->smallmap |= idx2bit(i))
#define clear_smallmap(M, i)   ((M)->smallmap &= ~idx2bit(i))
#define smallmap_is_marked(M, i)   ((M)->smallmap & idx2bit(i))
#define mark_treemap(M, i)   ((M)->treemap |= idx2bit(i))
#define clear_treemap(M, i)   ((M)->treemap &= ~idx2bit(i))
#define treemap_is_marked(M, i)   ((M)->treemap & idx2bit(i))
#define compute_bit2idx(X, I)
#define least_bit(x)   ((x) & -(x))
#define left_bits(x)   ((x<<1) | -(x<<1))
#define same_or_left_bits(x)   ((x) | -(x))
#define ok_address(M, a)   ((char*)(a) >= (M)->least_addr)
#define ok_next(p, n)   ((char*)(p) < (char*)(n))
#define ok_cinuse(p)   cinuse(p)
#define ok_pinuse(p)   pinuse(p)
#define ok_magic(M)   (1)
#define RTCHECK(e)   (e)
#define mark_inuse_foot(M, p, s)
#define set_inuse(M, p, s)
#define set_inuse_and_pinuse(M, p, s)
#define set_size_and_pinuse_of_inuse_chunk(M, p, s)   ((p)->head = (s|PINUSE_BIT|CINUSE_BIT))
#define insert_small_chunk(M, P, S)
#define unlink_small_chunk(M, P, S)
#define unlink_first_small_chunk(M, B, P, I)
#define replace_dv(M, P, S)
#define insert_large_chunk(M, X, S)
#define unlink_large_chunk(M, X)
#define insert_chunk(M, P, S)
#define unlink_chunk(M, P, S)
#define internal_malloc(m, b)   dlmalloc(b)
#define internal_free(m, mem)   dlfree(mem)
#define fm   gm

Typedefs

typedef struct malloc_chunk
typedef struct malloc_chunkmchunkptr
typedef struct malloc_chunksbinptr
typedef unsigned int bindex_t
typedef unsigned int binmap_t
typedef unsigned int flag_t
typedef struct malloc_tree_chunk
typedef struct malloc_tree_chunktchunkptr
typedef struct malloc_tree_chunktbinptr
typedef struct malloc_segment
typedef struct malloc_segmentmsegmentptr
typedef struct malloc_statemstate

Functions

voiddlmalloc (size_t)
void dlfree (void *)
voiddlcalloc (size_t, size_t)
voiddlrealloc (void *, size_t)
voiddlmemalign (size_t, size_t)
voiddlvalloc (size_t)
int dlmallopt (int, int)
size_t dlmalloc_footprint (void)
size_t dlmalloc_max_footprint (void)
struct mallinfo dlmallinfo (void)
void ** dlindependent_calloc (size_t, size_t, void **)
void ** dlindependent_comalloc (size_t, size_t *, void **)
voiddlpvalloc (size_t)
int dlmalloc_trim (size_t)
size_t dlmalloc_usable_size (void *)
void dlmalloc_stats (void)
static msegmentptr segment_holding (mstate m, char *addr)
static int has_segment_link (mstate m, msegmentptr ss)
static int init_mparams (void)
static int change_mparam (int param_number, int value)
static struct mallinfo internal_mallinfo (mstate m)
static void internal_malloc_stats (mstate m)
static voidmmap_alloc (mstate m, size_t nb)
static mchunkptr mmap_resize (mstate m, mchunkptr oldp, size_t nb)
static void init_top (mstate m, mchunkptr p, size_t psize)
static void init_bins (mstate m)
static voidprepend_alloc (mstate m, char *newbase, char *oldbase, size_t nb)
static void add_segment (mstate m, char *tbase, size_t tsize, flag_t mmapped)
static voidsys_alloc (mstate m, size_t nb)
static size_t release_unused_segments (mstate m)
static int sys_trim (mstate m, size_t pad)
static voidtmalloc_large (mstate m, size_t nb)
static voidtmalloc_small (mstate m, size_t nb)
static voidinternal_realloc (mstate m, void *oldmem, size_t bytes)
static voidinternal_memalign (mstate m, size_t alignment, size_t bytes)
static void ** ialloc (mstate m, size_t n_elements, size_t *sizes, int opts, void *chunks[])
void ** dlindependent_calloc (size_t n_elements, size_t elem_size, void *chunks[])
void ** dlindependent_comalloc (size_t n_elements, size_t sizes[], void *chunks[])

Variables

static int dev_zero_fd = -1
static struct malloc_params
static struct malloc_state

Class Documentation

struct mallinfo
struct malloc_chunk

Definition at line 1602 of file dlmalloc.c.

Collaboration diagram for malloc_chunk:
Class Members
struct malloc_chunk * bk
struct malloc_chunk * fd
size_t head
size_t prev_foot
struct malloc_tree_chunk

Definition at line 1807 of file dlmalloc.c.

Collaboration diagram for malloc_tree_chunk:
Class Members
struct malloc_tree_chunk * bk
struct malloc_tree_chunk * child
struct malloc_tree_chunk * fd
size_t head
bindex_t index
struct malloc_tree_chunk * parent
size_t prev_foot
struct malloc_segment

Definition at line 1883 of file dlmalloc.c.

Collaboration diagram for malloc_segment:
Class Members
char * base
struct malloc_segment * next
flag_t sflags
size_t size
struct malloc_state

Definition at line 2017 of file dlmalloc.c.

Collaboration diagram for malloc_state:
Class Members
mchunkptr dv
size_t dvsize
size_t footprint
char * least_addr
size_t magic
size_t max_footprint
flag_t mflags
msegment seg
mchunkptr smallbins
binmap_t smallmap
mchunkptr top
size_t topsize
tbinptr treebins
binmap_t treemap
size_t trim_check
struct malloc_params

Definition at line 2048 of file dlmalloc.c.

Class Members
flag_t default_mflags
size_t granularity
size_t magic
size_t mmap_threshold
size_t page_size
size_t trim_threshold

Define Documentation

#define _GNU_SOURCE   1

Definition at line 464 of file dlmalloc.c.

#define ABORT   abort()

Definition at line 499 of file dlmalloc.c.

#define ABORT_ON_ASSERT_FAILURE   1

Definition at line 502 of file dlmalloc.c.

Definition at line 1460 of file dlmalloc.c.

#define ACQUIRE_MORECORE_LOCK ( )

Definition at line 1452 of file dlmalloc.c.

#define align_as_chunk (   A)    (mchunkptr)((A) + align_offset(chunk2mem(A)))

Definition at line 1639 of file dlmalloc.c.

#define align_offset (   A)
Value:

Definition at line 1268 of file dlmalloc.c.

#define assert (   x)

Definition at line 1170 of file dlmalloc.c.

#define bit_for_tree_index (   i)    (i == NTREEBINS-1)? (SIZE_T_BITSIZE-1) : (((i) >> 1) + TREEBIN_SHIFT - 2)

Definition at line 2277 of file dlmalloc.c.

#define CALL_MMAP (   s)
Value:
((dev_zero_fd < 0) ? \
           (dev_zero_fd = open("/dev/zero", O_RDWR), \
            mmap(0, (s), MMAP_PROT, MMAP_FLAGS, dev_zero_fd, 0)) : \
            mmap(0, (s), MMAP_PROT, MMAP_FLAGS, dev_zero_fd, 0))

Definition at line 1312 of file dlmalloc.c.

#define CALL_MORECORE (   S)    MORECORE(S)

Definition at line 1365 of file dlmalloc.c.

#define CALL_MREMAP (   addr,
  osz,
  nsz,
  mv 
)    MFAIL

Definition at line 1361 of file dlmalloc.c.

#define CALL_MUNMAP (   a,
  s 
)    munmap((a), (s))

Definition at line 1297 of file dlmalloc.c.

#define calloc_must_clear (   p)    (!is_mmapped(p))

Definition at line 1711 of file dlmalloc.c.

#define check_free_chunk (   M,
  P 
)

Definition at line 2201 of file dlmalloc.c.

#define check_inuse_chunk (   M,
  P 
)

Definition at line 2202 of file dlmalloc.c.

#define check_malloc_state (   M)

Definition at line 2205 of file dlmalloc.c.

#define check_malloced_chunk (   M,
  P,
  N 
)

Definition at line 2203 of file dlmalloc.c.

#define check_mmapped_chunk (   M,
  P 
)

Definition at line 2204 of file dlmalloc.c.

#define check_segment_merge (   S,
  b,
  s 
)    (1)

Definition at line 1920 of file dlmalloc.c.

#define check_top_chunk (   M,
  P 
)

Definition at line 2206 of file dlmalloc.c.

#define chunk2mem (   p)    ((void*)((char*)(p) + TWO_SIZE_T_SIZES))

Definition at line 1636 of file dlmalloc.c.

Definition at line 1262 of file dlmalloc.c.

#define chunk_minus_offset (   p,
  s 
)    ((mchunkptr)(((char*)(p)) - (s)))

Definition at line 1681 of file dlmalloc.c.

#define CHUNK_OVERHEAD   (SIZE_T_SIZE)

Definition at line 1623 of file dlmalloc.c.

#define chunk_plus_offset (   p,
  s 
)    ((mchunkptr)(((char*)(p)) + (s)))

Definition at line 1680 of file dlmalloc.c.

#define chunksize (   p)    ((p)->head & ~(INUSE_BITS))

Definition at line 1674 of file dlmalloc.c.

#define cinuse (   p)    ((p)->head & CINUSE_BIT)

Definition at line 1672 of file dlmalloc.c.

#define CINUSE_BIT   (SIZE_T_TWO)

Definition at line 1665 of file dlmalloc.c.

#define clear_cinuse (   p)    ((p)->head &= ~CINUSE_BIT)

Definition at line 1677 of file dlmalloc.c.

#define clear_pinuse (   p)    ((p)->head &= ~PINUSE_BIT)

Definition at line 1676 of file dlmalloc.c.

#define clear_smallmap (   M,
  i 
)    ((M)->smallmap &= ~idx2bit(i))

Definition at line 2298 of file dlmalloc.c.

#define clear_treemap (   M,
  i 
)    ((M)->treemap &= ~idx2bit(i))

Definition at line 2302 of file dlmalloc.c.

#define CMFAIL   ((char*)(MFAIL)) /* defined for convenience */

Definition at line 1283 of file dlmalloc.c.

#define compute_bit2idx (   X,
  I 
)
Value:
{\
  unsigned int Y = X - 1;\
  unsigned int K = Y >> (16-4) & 16;\
  unsigned int N = K;        Y >>= K;\
  N += K = Y >> (8-3) &  8;  Y >>= K;\
  N += K = Y >> (4-2) &  4;  Y >>= K;\
  N += K = Y >> (2-1) &  2;  Y >>= K;\
  N += K = Y >> (1-0) &  1;  Y >>= K;\
  I = (bindex_t)(N + Y);\
}

Definition at line 2320 of file dlmalloc.c.

#define compute_tree_index (   S,
  I 
)
Value:
{\
  size_t X = S >> TREEBIN_SHIFT;\
  if (X == 0)\
    I = 0;\
  else if (X > 0xFFFF)\
    I = NTREEBINS-1;\
  else {\
    unsigned int Y = (unsigned int)X;\
    unsigned int N = ((Y - 0x100) >> 16) & 8;\
    unsigned int K = (((Y <<= N) - 0x1000) >> 16) & 4;\
    N += K;\
    N += K = (((Y <<= K) - 0x4000) >> 16) & 2;\
    K = 14 - N + ((Y <<= K) >> 15);\
    I = (K << 1) + ((S >> (K + (TREEBIN_SHIFT-1)) & 1));\
  }\
}

Definition at line 2257 of file dlmalloc.c.

#define CORRUPTION_ERROR_ACTION (   m)    ABORT

Definition at line 2188 of file dlmalloc.c.

#define DEFAULT_GRANULARITY   (0) /* 0 means to compute in init_mparams */

Definition at line 548 of file dlmalloc.c.

#define DEFAULT_MMAP_THRESHOLD   ((size_t)256U * (size_t)1024U)

Definition at line 562 of file dlmalloc.c.

#define DEFAULT_TRIM_THRESHOLD   ((size_t)2U * (size_t)1024U * (size_t)1024U)

Definition at line 555 of file dlmalloc.c.

#define DIRECT_MMAP (   s)    CALL_MMAP(s)

Definition at line 1318 of file dlmalloc.c.

#define disable_contiguous (   M)    ((M)->mflags |= USE_NONCONTIGUOUS_BIT)

Definition at line 2078 of file dlmalloc.c.

#define disable_lock (   M)    ((M)->mflags &= ~USE_LOCK_BIT)

Definition at line 2071 of file dlmalloc.c.

#define disable_mmap (   M)    ((M)->mflags &= ~USE_MMAP_BIT)

Definition at line 2075 of file dlmalloc.c.

#define dlcalloc   calloc

Definition at line 647 of file dlmalloc.c.

#define dlfree   free

Definition at line 648 of file dlmalloc.c.

#define dlindependent_calloc   independent_calloc

Definition at line 661 of file dlmalloc.c.

#define dlindependent_comalloc   independent_comalloc

Definition at line 662 of file dlmalloc.c.

#define dlmallinfo   mallinfo

Definition at line 654 of file dlmalloc.c.

#define dlmalloc   malloc

Definition at line 649 of file dlmalloc.c.

#define dlmalloc_footprint   malloc_footprint

Definition at line 659 of file dlmalloc.c.

#define dlmalloc_max_footprint   malloc_max_footprint

Definition at line 660 of file dlmalloc.c.

void dlmalloc_stats   malloc_stats

Definition at line 657 of file dlmalloc.c.

#define dlmalloc_trim   malloc_trim

Definition at line 656 of file dlmalloc.c.

#define dlmalloc_usable_size   malloc_usable_size

Definition at line 658 of file dlmalloc.c.

#define dlmallopt   mallopt

Definition at line 655 of file dlmalloc.c.

#define dlmemalign   memalign

Definition at line 650 of file dlmalloc.c.

#define dlpvalloc   pvalloc

Definition at line 653 of file dlmalloc.c.

#define dlrealloc   realloc

Definition at line 651 of file dlmalloc.c.

#define dlvalloc   valloc

Definition at line 652 of file dlmalloc.c.

#define enable_lock (   M)    ((M)->mflags |= USE_LOCK_BIT)

Definition at line 2070 of file dlmalloc.c.

#define enable_mmap (   M)    ((M)->mflags |= USE_MMAP_BIT)

Definition at line 2074 of file dlmalloc.c.

#define EXTERN_BIT   (8U)

Definition at line 1374 of file dlmalloc.c.

Definition at line 1669 of file dlmalloc.c.

#define fm   gm
#define FOOTERS   0

Definition at line 496 of file dlmalloc.c.

#define FOUR_SIZE_T_SIZES   (SIZE_T_SIZE<<2)

Definition at line 1257 of file dlmalloc.c.

#define get_foot (   p,
  s 
)    (((mchunkptr)((char*)(p) + (s)))->prev_foot)

Definition at line 1691 of file dlmalloc.c.

#define get_segment_flags (   S)    ((S)->sflags)

Definition at line 1918 of file dlmalloc.c.

#define gm   (&_gm_)

Definition at line 2061 of file dlmalloc.c.

#define granularity_align (   S)    (((S) + (mparams.granularity)) & ~(mparams.granularity - SIZE_T_ONE))

Definition at line 2090 of file dlmalloc.c.

#define HALF_MAX_SIZE_T   (MAX_SIZE_T / 2U)

Definition at line 1259 of file dlmalloc.c.

#define HAVE_MMAP   1

Definition at line 514 of file dlmalloc.c.

#define HAVE_MORECORE   1

Definition at line 533 of file dlmalloc.c.

#define HAVE_MREMAP   0

Definition at line 523 of file dlmalloc.c.

#define idx2bit (   i)    ((binmap_t)(1) << (i))

Definition at line 2294 of file dlmalloc.c.

#define INITIAL_LOCK (   l)

Definition at line 1445 of file dlmalloc.c.

#define INSECURE   0

Definition at line 511 of file dlmalloc.c.

#define insert_chunk (   M,
  P,
  S 
)
Value:
if (is_small(S)) insert_small_chunk(M, P, S)\
  else { tchunkptr TP = (tchunkptr)(P); insert_large_chunk(M, TP, S); }

Definition at line 3119 of file dlmalloc.c.

#define insert_large_chunk (   M,
  X,
  S 
)

Definition at line 2978 of file dlmalloc.c.

#define insert_small_chunk (   M,
  P,
  S 
)
Value:
{\
  bindex_t I  = small_index(S);\
  mchunkptr B = smallbin_at(M, I);\
  mchunkptr F = B;\
  assert(S >= MIN_CHUNK_SIZE);\
  if (!smallmap_is_marked(M, I))\
    mark_smallmap(M, I);\
  else if (RTCHECK(ok_address(M, B->fd)))\
    F = B->fd;\
  else {\
    CORRUPTION_ERROR_ACTION(M);\
  }\
  B->fd = P;\
  F->bk = P;\
  P->fd = F;\
  P->bk = B;\
}

Definition at line 2907 of file dlmalloc.c.

#define internal_free (   m,
  mem 
)    dlfree(mem)

Definition at line 3141 of file dlmalloc.c.

#define internal_malloc (   m,
  b 
)    dlmalloc(b)

Definition at line 3140 of file dlmalloc.c.

Definition at line 1666 of file dlmalloc.c.

#define is_aligned (   A)    (((size_t)((A)) & (CHUNK_ALIGN_MASK)) == 0)

Definition at line 1265 of file dlmalloc.c.

#define is_extern_segment (   S)    (get_segment_flags(S) & EXTERN_BIT)

Definition at line 1927 of file dlmalloc.c.

#define is_global (   M)    ((M) == &_gm_)

Definition at line 2062 of file dlmalloc.c.

#define is_granularity_aligned (   S)    (((size_t)(S) & (mparams.granularity - SIZE_T_ONE)) == 0)

Definition at line 2095 of file dlmalloc.c.

#define is_initialized (   M)    ((M)->top != 0)

Definition at line 2063 of file dlmalloc.c.

#define is_mmapped (   p)    (!((p)->head & PINUSE_BIT) && ((p)->prev_foot & IS_MMAPPED_BIT))

Definition at line 1702 of file dlmalloc.c.

#define IS_MMAPPED_BIT   (SIZE_T_ONE)

Definition at line 1293 of file dlmalloc.c.

Definition at line 1926 of file dlmalloc.c.

#define is_page_aligned (   S)    (((size_t)(S) & (mparams.page_size - SIZE_T_ONE)) == 0)

Definition at line 2093 of file dlmalloc.c.

#define is_small (   s)    (((s) >> SMALLBIN_SHIFT) < NSMALLBINS)

Definition at line 2232 of file dlmalloc.c.

#define least_bit (   x)    ((x) & -(x))

Definition at line 2335 of file dlmalloc.c.

#define left_bits (   x)    ((x<<1) | -(x<<1))

Definition at line 2338 of file dlmalloc.c.

#define leftmost_child (   t)    ((t)->child[0] != 0? (t)->child[0] : (t)->child[1])

Definition at line 1824 of file dlmalloc.c.

#define leftshift_for_tree_index (   i)
Value:
((i == NTREEBINS-1)? 0 : \
    ((SIZE_T_BITSIZE-SIZE_T_ONE) - (((i) >> 1) + TREEBIN_SHIFT - 2)))

Definition at line 2281 of file dlmalloc.c.

#define M_GRANULARITY   (-2)

Definition at line 588 of file dlmalloc.c.

#define M_MMAP_THRESHOLD   (-3)

Definition at line 589 of file dlmalloc.c.

#define M_TRIM_THRESHOLD   (-1)

Definition at line 587 of file dlmalloc.c.

Definition at line 577 of file dlmalloc.c.

#define MALLOC_ALIGNMENT   ((size_t)8U)

Definition at line 493 of file dlmalloc.c.

#define MALLOC_FAILURE_ACTION   errno = ENOMEM;

Definition at line 527 of file dlmalloc.c.

#define malloc_getpagesize   ((size_t)4096U)

Definition at line 1234 of file dlmalloc.c.

#define mark_inuse_foot (   M,
  p,
  s 
)

Definition at line 2412 of file dlmalloc.c.

#define mark_smallmap (   M,
  i 
)    ((M)->smallmap |= idx2bit(i))

Definition at line 2297 of file dlmalloc.c.

#define mark_treemap (   M,
  i 
)    ((M)->treemap |= idx2bit(i))

Definition at line 2301 of file dlmalloc.c.

#define MAX_REQUEST   ((-MIN_CHUNK_SIZE) << 2)

Definition at line 1642 of file dlmalloc.c.

#define MAX_SIZE_T   (~(size_t)0)

Definition at line 480 of file dlmalloc.c.

Definition at line 2015 of file dlmalloc.c.

Definition at line 2014 of file dlmalloc.c.

#define MCHUNK_SIZE   (sizeof(mchunk))

Definition at line 1618 of file dlmalloc.c.

#define mem2chunk (   mem)    ((mchunkptr)((char*)(mem) - TWO_SIZE_T_SIZES))

Definition at line 1637 of file dlmalloc.c.

#define MFAIL   ((void*)(MAX_SIZE_T))

Definition at line 1282 of file dlmalloc.c.

Definition at line 1632 of file dlmalloc.c.

Definition at line 2013 of file dlmalloc.c.

Definition at line 1643 of file dlmalloc.c.

Definition at line 2235 of file dlmalloc.c.

#define minsize_for_tree_index (   i)
Value:
((SIZE_T_ONE << (((i) >> 1) + TREEBIN_SHIFT)) |  \
   (((size_t)((i) & SIZE_T_ONE)) << (((i) >> 1) + TREEBIN_SHIFT - 1)))

Definition at line 2286 of file dlmalloc.c.

Definition at line 1627 of file dlmalloc.c.

#define MMAP_CLEARS   1

Definition at line 517 of file dlmalloc.c.

#define MMAP_FLAGS   (MAP_PRIVATE)

Definition at line 1310 of file dlmalloc.c.

Definition at line 1629 of file dlmalloc.c.

#define MMAP_PROT   (PROT_READ|PROT_WRITE)

Definition at line 1298 of file dlmalloc.c.

#define MORECORE   sbrk

Definition at line 540 of file dlmalloc.c.

#define MORECORE_CONTIGUOUS   1

Definition at line 543 of file dlmalloc.c.

#define MSPACES   0

Definition at line 489 of file dlmalloc.c.

#define next_chunk (   p)    ((mchunkptr)( ((char*)(p)) + ((p)->head & ~INUSE_BITS)))

Definition at line 1684 of file dlmalloc.c.

#define next_pinuse (   p)    ((next_chunk(p)->head) & PINUSE_BIT)

Definition at line 1688 of file dlmalloc.c.

#define NO_MALLINFO   0

Definition at line 574 of file dlmalloc.c.

#define NSMALLBINS   (32U)

Definition at line 2008 of file dlmalloc.c.

#define NTREEBINS   (32U)

Definition at line 2009 of file dlmalloc.c.

#define ok_address (   M,
 
)    ((char*)(a) >= (M)->least_addr)

Definition at line 2374 of file dlmalloc.c.

#define ok_cinuse (   p)    cinuse(p)

Definition at line 2378 of file dlmalloc.c.

#define ok_magic (   M)    (1)

Definition at line 2393 of file dlmalloc.c.

#define ok_next (   p,
  n 
)    ((char*)(p) < (char*)(n))

Definition at line 2376 of file dlmalloc.c.

#define ok_pinuse (   p)    pinuse(p)

Definition at line 2380 of file dlmalloc.c.

#define ONLY_MSPACES   0

Definition at line 483 of file dlmalloc.c.

Definition at line 1706 of file dlmalloc.c.

#define pad_request (   req)    (((req) + CHUNK_OVERHEAD + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK)

Definition at line 1646 of file dlmalloc.c.

#define page_align (   S)    (((S) + (mparams.page_size)) & ~(mparams.page_size - SIZE_T_ONE))

Definition at line 2086 of file dlmalloc.c.

#define pinuse (   p)    ((p)->head & PINUSE_BIT)

Definition at line 1673 of file dlmalloc.c.

#define PINUSE_BIT   (SIZE_T_ONE)

Definition at line 1664 of file dlmalloc.c.

#define POSTACTION (   M)

Definition at line 2161 of file dlmalloc.c.

#define PREACTION (   M)    (0)

Definition at line 2157 of file dlmalloc.c.

#define prev_chunk (   p)    ((mchunkptr)( ((char*)(p)) - ((p)->prev_foot) ))

Definition at line 1685 of file dlmalloc.c.

#define PROCEED_ON_ERROR   0

Definition at line 505 of file dlmalloc.c.

Definition at line 1461 of file dlmalloc.c.

#define RELEASE_MORECORE_LOCK ( )

Definition at line 1453 of file dlmalloc.c.

#define replace_dv (   M,
  P,
  S 
)
Value:
{\
  size_t DVS = M->dvsize;\
  if (DVS != 0) {\
    mchunkptr DV = M->dv;\
    assert(is_small(DVS));\
    insert_small_chunk(M, DV, DVS);\
  }\
  M->dvsize = S;\
  M->dv = P;\
}

Definition at line 2964 of file dlmalloc.c.

#define request2size (   req)    (((req) < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(req))

Definition at line 1650 of file dlmalloc.c.

#define RTCHECK (   e)    (e)

Definition at line 2402 of file dlmalloc.c.

#define same_or_left_bits (   x)    ((x) | -(x))

Definition at line 2341 of file dlmalloc.c.

#define segment_holds (   S,
  A 
)    ((char*)(A) >= S->base && (char*)(A) < S->base + S->size)

Definition at line 2099 of file dlmalloc.c.

#define set_foot (   p,
  s 
)    (((mchunkptr)((char*)(p) + (s)))->prev_foot = (s))

Definition at line 1692 of file dlmalloc.c.

Definition at line 1699 of file dlmalloc.c.

#define set_inuse (   M,
  p,
  s 
)
Value:
((p)->head = (((p)->head & PINUSE_BIT)|s|CINUSE_BIT),\
  ((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT)

Definition at line 2415 of file dlmalloc.c.

#define set_inuse_and_pinuse (   M,
  p,
  s 
)
Value:
((p)->head = (s|PINUSE_BIT|CINUSE_BIT),\
  ((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT)

Definition at line 2420 of file dlmalloc.c.

#define set_lock (   M,
  L 
)
Value:
((M)->mflags = (L)?\
  ((M)->mflags | USE_LOCK_BIT) :\
  ((M)->mflags & ~USE_LOCK_BIT))

Definition at line 2080 of file dlmalloc.c.

#define set_segment_flags (   S,
  v 
)    ((S)->sflags = (v))

Definition at line 1919 of file dlmalloc.c.

#define set_size_and_pinuse_of_free_chunk (   p,
  s 
)    ((p)->head = (s|PINUSE_BIT), set_foot(p, s))

Definition at line 1695 of file dlmalloc.c.

#define set_size_and_pinuse_of_inuse_chunk (   M,
  p,
  s 
)    ((p)->head = (s|PINUSE_BIT|CINUSE_BIT))

Definition at line 2425 of file dlmalloc.c.

#define should_trim (   M,
  s 
)    ((s) > (M)->trim_check)

Definition at line 2125 of file dlmalloc.c.

Definition at line 1258 of file dlmalloc.c.

#define SIZE_T_BITSIZE   (sizeof(size_t) << 3)

Definition at line 1249 of file dlmalloc.c.

#define SIZE_T_ONE   ((size_t)1)

Definition at line 1254 of file dlmalloc.c.

#define SIZE_T_SIZE   (sizeof(size_t))

Definition at line 1248 of file dlmalloc.c.

#define SIZE_T_TWO   ((size_t)2)

Definition at line 1255 of file dlmalloc.c.

#define SIZE_T_ZERO   ((size_t)0)

Definition at line 1253 of file dlmalloc.c.

#define small_index (   s)    ((s) >> SMALLBIN_SHIFT)

Definition at line 2233 of file dlmalloc.c.

#define small_index2size (   i)    ((i) << SMALLBIN_SHIFT)

Definition at line 2234 of file dlmalloc.c.

#define smallbin_at (   M,
  i 
)    ((sbinptr)((char*)&((M)->smallbins[(i)<<1])))

Definition at line 2238 of file dlmalloc.c.

#define SMALLBIN_SHIFT   (3U)

Definition at line 2010 of file dlmalloc.c.

Definition at line 2011 of file dlmalloc.c.

#define smallmap_is_marked (   M,
  i 
)    ((M)->smallmap & idx2bit(i))

Definition at line 2299 of file dlmalloc.c.

Definition at line 2135 of file dlmalloc.c.

#define treebin_at (   M,
  i 
)    (&((M)->treebins[i]))

Definition at line 2239 of file dlmalloc.c.

#define TREEBIN_SHIFT   (8U)

Definition at line 2012 of file dlmalloc.c.

#define treemap_is_marked (   M,
  i 
)    ((M)->treemap & idx2bit(i))

Definition at line 2303 of file dlmalloc.c.

#define TWO_SIZE_T_SIZES   (SIZE_T_SIZE<<1)

Definition at line 1256 of file dlmalloc.c.

#define unlink_chunk (   M,
  P,
  S 
)
Value:

Definition at line 3123 of file dlmalloc.c.

#define unlink_first_small_chunk (   M,
  B,
  P,
  I 
)
Value:
{\
  mchunkptr F = P->fd;\
  assert(P != B);\
  assert(P != F);\
  assert(chunksize(P) == small_index2size(I));\
  if (B == F)\
    clear_smallmap(M, I);\
  else if (RTCHECK(ok_address(M, F))) {\
    B->fd = F;\
    F->bk = B;\
  }\
  else {\
    CORRUPTION_ERROR_ACTION(M);\
  }\
}

Definition at line 2946 of file dlmalloc.c.

#define unlink_large_chunk (   M,
 
)

Definition at line 3046 of file dlmalloc.c.

#define unlink_small_chunk (   M,
  P,
  S 
)
Value:
{\
  mchunkptr F = P->fd;\
  mchunkptr B = P->bk;\
  bindex_t I = small_index(S);\
  assert(P != B);\
  assert(P != F);\
  assert(chunksize(P) == small_index2size(I));\
  if (F == B)\
    clear_smallmap(M, I);\
  else if (RTCHECK((F == smallbin_at(M,I) || ok_address(M, F)) &&\
                   (B == smallbin_at(M,I) || ok_address(M, B)))) {\
    F->bk = B;\
    B->fd = F;\
  }\
  else {\
    CORRUPTION_ERROR_ACTION(M);\
  }\
}

Definition at line 2926 of file dlmalloc.c.

#define USAGE_ERROR_ACTION (   m,
  p 
)    ABORT

Definition at line 2192 of file dlmalloc.c.

#define USE_BUILTIN_FFS   0

Definition at line 568 of file dlmalloc.c.

#define USE_DEV_RANDOM   0

Definition at line 571 of file dlmalloc.c.

#define use_lock (   M)    ((M)->mflags & USE_LOCK_BIT)

Definition at line 2069 of file dlmalloc.c.

#define USE_LOCK_BIT   (0U)

Definition at line 1444 of file dlmalloc.c.

#define USE_LOCKS   0

Definition at line 508 of file dlmalloc.c.

#define use_mmap (   M)    ((M)->mflags & USE_MMAP_BIT)

Definition at line 2073 of file dlmalloc.c.

#define USE_MMAP_BIT   (SIZE_T_ONE)

Definition at line 1294 of file dlmalloc.c.

#define use_noncontiguous (   M)    ((M)->mflags & USE_NONCONTIGUOUS_BIT)

Definition at line 2077 of file dlmalloc.c.

#define USE_NONCONTIGUOUS_BIT   (4U)

Definition at line 1371 of file dlmalloc.c.


Typedef Documentation

typedef unsigned int bindex_t

Definition at line 1612 of file dlmalloc.c.

typedef unsigned int binmap_t

Definition at line 1613 of file dlmalloc.c.

typedef unsigned int flag_t

Definition at line 1614 of file dlmalloc.c.

typedef struct malloc_chunk

Definition at line 1609 of file dlmalloc.c.

typedef struct malloc_segment

Definition at line 1929 of file dlmalloc.c.

typedef struct malloc_tree_chunk

Definition at line 1819 of file dlmalloc.c.

typedef struct malloc_chunk* mchunkptr

Definition at line 1610 of file dlmalloc.c.

typedef struct malloc_segment* msegmentptr

Definition at line 1930 of file dlmalloc.c.

typedef struct malloc_state* mstate

Definition at line 2038 of file dlmalloc.c.

typedef struct malloc_chunk* sbinptr

Definition at line 1611 of file dlmalloc.c.

typedef struct malloc_tree_chunk* tbinptr

Definition at line 1821 of file dlmalloc.c.

typedef struct malloc_tree_chunk* tchunkptr

Definition at line 1820 of file dlmalloc.c.


Function Documentation

static void add_segment ( mstate  m,
char *  tbase,
size_t  tsize,
flag_t  mmapped 
) [static]

Definition at line 3309 of file dlmalloc.c.

                                                                             {
  /* Determine locations and sizes of segment, fenceposts, old top */
  char* old_top = (char*)m->top;
  msegmentptr oldsp = segment_holding(m, old_top);
  char* old_end = oldsp->base + oldsp->size;
  size_t ssize = pad_request(sizeof(struct malloc_segment));
  char* rawsp = old_end - (ssize + FOUR_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
  size_t offset = align_offset(chunk2mem(rawsp));
  char* asp = rawsp + offset;
  char* csp = (asp < (old_top + MIN_CHUNK_SIZE))? old_top : asp;
  mchunkptr sp = (mchunkptr)csp;
  msegmentptr ss = (msegmentptr)(chunk2mem(sp));
  mchunkptr tnext = chunk_plus_offset(sp, ssize);
  mchunkptr p = tnext;
  int nfences = 0;

  /* reset top to new space */
  init_top(m, (mchunkptr)tbase, tsize - TOP_FOOT_SIZE);

  /* Set up segment record */
  assert(is_aligned(ss));
  set_size_and_pinuse_of_inuse_chunk(m, sp, ssize);
  *ss = m->seg; /* Push current record */
  m->seg.base = tbase;
  m->seg.size = tsize;
  set_segment_flags(&m->seg, mmapped);
  m->seg.next = ss;

  /* Insert trailing fenceposts */
  for (;;) {
    mchunkptr nextp = chunk_plus_offset(p, SIZE_T_SIZE);
    p->head = FENCEPOST_HEAD;
    ++nfences;
    if ((char*)(&(nextp->head)) < old_end)
      p = nextp;
    else
      break;
  }
  assert(nfences >= 2);

  /* Insert the rest of old top into a bin as an ordinary free chunk */
  if (csp != old_top) {
    mchunkptr q = (mchunkptr)old_top;
    size_t psize = csp - old_top;
    mchunkptr tn = chunk_plus_offset(q, psize);
    set_free_with_pinuse(q, psize, tn);
    insert_chunk(m, q, psize);
  }

  check_top_chunk(m, m->top);
}

Here is the call graph for this function:

Here is the caller graph for this function:

static int change_mparam ( int  param_number,
int  value 
) [static]

Definition at line 2533 of file dlmalloc.c.

                                                      {
  size_t val = (size_t)value;
  init_mparams();
  switch(param_number) {
  case M_TRIM_THRESHOLD:
    mparams.trim_threshold = val;
    return 1;
  case M_GRANULARITY:
    if (val >= mparams.page_size && ((val & (val-1)) == 0)) {
      mparams.granularity = val;
      return 1;
    }
    else
      return 0;
  case M_MMAP_THRESHOLD:
    mparams.mmap_threshold = val;
    return 1;
  default:
    return 0;
  }
}

Here is the call graph for this function:

Here is the caller graph for this function:

void * dlcalloc ( size_t  n_elements,
size_t  elem_size 
)

Definition at line 4298 of file dlmalloc.c.

                                                    {
  void* mem;
  size_t req = 0;
  if (n_elements != 0) {
    req = n_elements * elem_size;
    if (((n_elements | elem_size) & ~(size_t)0xffff) &&
        (req / n_elements != elem_size))
      req = MAX_SIZE_T; /* force downstream failure on overflow */
  }
  mem = dlmalloc(req);
  if (mem != 0 && calloc_must_clear(mem2chunk(mem)))
    memset(mem, 0, req);
  return mem;
}
void dlfree ( void mem)

Definition at line 4198 of file dlmalloc.c.

                       {
  /*
     Consolidate freed chunks with preceeding or succeeding bordering
     free chunks, if they exist, and then place in a bin.  Intermixed
     with special cases for top, dv, mmapped chunks, and usage errors.
  */

  if (mem != 0) {
    mchunkptr p  = mem2chunk(mem);
#if FOOTERS
    mstate fm = get_mstate_for(p);
    if (!ok_magic(fm)) {
      USAGE_ERROR_ACTION(fm, p);
      return;
    }
#else /* FOOTERS */
#define fm gm
#endif /* FOOTERS */
    if (!PREACTION(fm)) {
      check_inuse_chunk(fm, p);
      if (RTCHECK(ok_address(fm, p) && ok_cinuse(p))) {
        size_t psize = chunksize(p);
        mchunkptr next = chunk_plus_offset(p, psize);
        if (!pinuse(p)) {
          size_t prevsize = p->prev_foot;
          if ((prevsize & IS_MMAPPED_BIT) != 0) {
            prevsize &= ~IS_MMAPPED_BIT;
            psize += prevsize + MMAP_FOOT_PAD;
            if (CALL_MUNMAP((char*)p - prevsize, psize) == 0)
              fm->footprint -= psize;
            goto postaction;
          }
          else {
            mchunkptr prev = chunk_minus_offset(p, prevsize);
            psize += prevsize;
            p = prev;
            if (RTCHECK(ok_address(fm, prev))) { /* consolidate backward */
              if (p != fm->dv) {
                unlink_chunk(fm, p, prevsize);
              }
              else if ((next->head & INUSE_BITS) == INUSE_BITS) {
                fm->dvsize = psize;
                set_free_with_pinuse(p, psize, next);
                goto postaction;
              }
            }
            else
              goto erroraction;
          }
        }

        if (RTCHECK(ok_next(p, next) && ok_pinuse(next))) {
          if (!cinuse(next)) {  /* consolidate forward */
            if (next == fm->top) {
              size_t tsize = fm->topsize += psize;
              fm->top = p;
              p->head = tsize | PINUSE_BIT;
              if (p == fm->dv) {
                fm->dv = 0;
                fm->dvsize = 0;
              }
              if (should_trim(fm, tsize))
                sys_trim(fm, 0);
              goto postaction;
            }
            else if (next == fm->dv) {
              size_t dsize = fm->dvsize += psize;
              fm->dv = p;
              set_size_and_pinuse_of_free_chunk(p, dsize);
              goto postaction;
            }
            else {
              size_t nsize = chunksize(next);
              psize += nsize;
              unlink_chunk(fm, next, nsize);
              set_size_and_pinuse_of_free_chunk(p, psize);
              if (p == fm->dv) {
                fm->dvsize = psize;
                goto postaction;
              }
            }
          }
          else
            set_free_with_pinuse(p, psize, next);
          insert_chunk(fm, p, psize);
          check_free_chunk(fm, p);
          goto postaction;
        }
      }
    erroraction:
      USAGE_ERROR_ACTION(fm, p);
    postaction:
      POSTACTION(fm);
    }
  }
#if !FOOTERS
#undef fm
#endif /* FOOTERS */
}

Here is the call graph for this function:

void** dlindependent_calloc ( size_t  ,
size_t  ,
void **   
)
void** dlindependent_calloc ( size_t  n_elements,
size_t  elem_size,
void chunks[] 
)

Definition at line 4340 of file dlmalloc.c.

                                                 {
  size_t sz = elem_size; /* serves as 1-element array */
  return ialloc(gm, n_elements, &sz, 3, chunks);
}

Here is the call graph for this function:

void** dlindependent_comalloc ( size_t  ,
size_t ,
void **   
)
void** dlindependent_comalloc ( size_t  n_elements,
size_t  sizes[],
void chunks[] 
)

Definition at line 4346 of file dlmalloc.c.

                                                   {
  return ialloc(gm, n_elements, sizes, 0, chunks);
}

Here is the call graph for this function:

struct mallinfo dlmallinfo ( void  ) [read]

Definition at line 4383 of file dlmalloc.c.

                                 {
  return internal_mallinfo(gm);
}

Here is the call graph for this function:

void * dlmalloc ( size_t  bytes)

Definition at line 4066 of file dlmalloc.c.

                             {
  /*
     Basic algorithm:
     If a small request (< 256 bytes minus per-chunk overhead):
       1. If one exists, use a remainderless chunk in associated smallbin.
          (Remainderless means that there are too few excess bytes to
          represent as a chunk.)
       2. If it is big enough, use the dv chunk, which is normally the
          chunk adjacent to the one used for the most recent small request.
       3. If one exists, split the smallest available chunk in a bin,
          saving remainder in dv.
       4. If it is big enough, use the top chunk.
       5. If available, get memory from system and use it
     Otherwise, for a large request:
       1. Find the smallest available binned chunk that fits, and use it
          if it is better fitting than dv chunk, splitting if necessary.
       2. If better fitting than any binned chunk, use the dv chunk.
       3. If it is big enough, use the top chunk.
       4. If request size >= mmap threshold, try to directly mmap this chunk.
       5. If available, get memory from system and use it

     The ugly goto's here ensure that postaction occurs along all paths.
  */

  if (!PREACTION(gm)) {
    void* mem;
    size_t nb;
    if (bytes <= MAX_SMALL_REQUEST) {
      bindex_t idx;
      binmap_t smallbits;
      nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
      idx = small_index(nb);
      smallbits = gm->smallmap >> idx;

      if ((smallbits & 0x3U) != 0) { /* Remainderless fit to a smallbin. */
        mchunkptr b, p;
        idx += ~smallbits & 1;       /* Uses next bin if idx empty */
        b = smallbin_at(gm, idx);
        p = b->fd;
        assert(chunksize(p) == small_index2size(idx));
        unlink_first_small_chunk(gm, b, p, idx);
        set_inuse_and_pinuse(gm, p, small_index2size(idx));
        mem = chunk2mem(p);
        check_malloced_chunk(gm, mem, nb);
        goto postaction;
      }

      else if (nb > gm->dvsize) {
        if (smallbits != 0) { /* Use chunk in next nonempty smallbin */
          mchunkptr b, p, r;
          size_t rsize;
          bindex_t i;
          binmap_t leftbits = (smallbits << idx) & left_bits(idx2bit(idx));
          binmap_t leastbit = least_bit(leftbits);
          compute_bit2idx(leastbit, i);
          b = smallbin_at(gm, i);
          p = b->fd;
          assert(chunksize(p) == small_index2size(i));
          unlink_first_small_chunk(gm, b, p, i);
          rsize = small_index2size(i) - nb;
          /* Fit here cannot be remainderless if 4byte sizes */
          if (SIZE_T_SIZE != 4 && rsize < MIN_CHUNK_SIZE)
            set_inuse_and_pinuse(gm, p, small_index2size(i));
          else {
            set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
            r = chunk_plus_offset(p, nb);
            set_size_and_pinuse_of_free_chunk(r, rsize);
            replace_dv(gm, r, rsize);
          }
          mem = chunk2mem(p);
          check_malloced_chunk(gm, mem, nb);
          goto postaction;
        }

        else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) {
          check_malloced_chunk(gm, mem, nb);
          goto postaction;
        }
      }
    }
    else if (bytes >= MAX_REQUEST)
      nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
    else {
      nb = pad_request(bytes);
      if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) {
        check_malloced_chunk(gm, mem, nb);
        goto postaction;
      }
    }

    if (nb <= gm->dvsize) {
      size_t rsize = gm->dvsize - nb;
      mchunkptr p = gm->dv;
      if (rsize >= MIN_CHUNK_SIZE) { /* split dv */
        mchunkptr r = gm->dv = chunk_plus_offset(p, nb);
        gm->dvsize = rsize;
        set_size_and_pinuse_of_free_chunk(r, rsize);
        set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
      }
      else { /* exhaust dv */
        size_t dvs = gm->dvsize;
        gm->dvsize = 0;
        gm->dv = 0;
        set_inuse_and_pinuse(gm, p, dvs);
      }
      mem = chunk2mem(p);
      check_malloced_chunk(gm, mem, nb);
      goto postaction;
    }

    else if (nb < gm->topsize) { /* Split top */
      size_t rsize = gm->topsize -= nb;
      mchunkptr p = gm->top;
      mchunkptr r = gm->top = chunk_plus_offset(p, nb);
      r->head = rsize | PINUSE_BIT;
      set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
      mem = chunk2mem(p);
      check_top_chunk(gm, gm->top);
      check_malloced_chunk(gm, mem, nb);
      goto postaction;
    }

    mem = sys_alloc(gm, nb);

  postaction:
    POSTACTION(gm);
    return mem;
  }

  return 0;
}

Here is the call graph for this function:

Definition at line 4374 of file dlmalloc.c.

                                {
  return gm->footprint;
}

Definition at line 4378 of file dlmalloc.c.

                                    {
  return gm->max_footprint;
}

Definition at line 4365 of file dlmalloc.c.

                              {
  int result = 0;
  if (!PREACTION(gm)) {
    result = sys_trim(gm, pad);
    POSTACTION(gm);
  }
  return result;
}

Here is the call graph for this function:

Definition at line 4392 of file dlmalloc.c.

                                       {
  if (mem != 0) {
    mchunkptr p = mem2chunk(mem);
    if (cinuse(p))
      return chunksize(p) - overhead_for(p);
  }
  return 0;
}
int dlmallopt ( int  param_number,
int  value 
)

Definition at line 4401 of file dlmalloc.c.

                                           {
  return change_mparam(param_number, value);
}

Here is the call graph for this function:

void * dlmemalign ( size_t  alignment,
size_t  bytes 
)

Definition at line 4336 of file dlmalloc.c.

                                                 {
  return internal_memalign(gm, alignment, bytes);
}

Here is the call graph for this function:

void * dlpvalloc ( size_t  bytes)

Definition at line 4358 of file dlmalloc.c.

                              {
  size_t pagesz;
  init_mparams();
  pagesz = mparams.page_size;
  return dlmemalign(pagesz, (bytes + pagesz - SIZE_T_ONE) & ~(pagesz - SIZE_T_ONE));
}

Here is the call graph for this function:

void * dlrealloc ( void oldmem,
size_t  bytes 
)

Definition at line 4313 of file dlmalloc.c.

                                            {
  if (oldmem == 0)
    return dlmalloc(bytes);
#ifdef REALLOC_ZERO_BYTES_FREES
  if (bytes == 0) {
    dlfree(oldmem);
    return 0;
  }
#endif /* REALLOC_ZERO_BYTES_FREES */
  else {
#if ! FOOTERS
    mstate m = gm;
#else /* FOOTERS */
    mstate m = get_mstate_for(mem2chunk(oldmem));
    if (!ok_magic(m)) {
      USAGE_ERROR_ACTION(m, oldmem);
      return 0;
    }
#endif /* FOOTERS */
    return internal_realloc(m, oldmem, bytes);
  }
}

Here is the call graph for this function:

void * dlvalloc ( size_t  bytes)

Definition at line 4351 of file dlmalloc.c.

                             {
  size_t pagesz;
  init_mparams();
  pagesz = mparams.page_size;
  return dlmemalign(pagesz, bytes);
}

Here is the call graph for this function:

static int has_segment_link ( mstate  m,
msegmentptr  ss 
) [static]

Definition at line 2114 of file dlmalloc.c.

                                                      {
  msegmentptr sp = &m->seg;
  for (;;) {
    if ((char*)sp >= ss->base && (char*)sp < ss->base + ss->size)
      return 1;
    if ((sp = sp->next) == 0)
      return 0;
  }
}

Here is the caller graph for this function:

static void** ialloc ( mstate  m,
size_t  n_elements,
size_t sizes,
int  opts,
void chunks[] 
) [static]

Definition at line 3935 of file dlmalloc.c.

                                     {
  /*
    This provides common support for independent_X routines, handling
    all of the combinations that can result.

    The opts arg has:
    bit 0 set if all elements are same size (using sizes[0])
    bit 1 set if elements should be zeroed
  */

  size_t    element_size;   /* chunksize of each element, if all same */
  size_t    contents_size;  /* total size of elements */
  size_t    array_size;     /* request size of pointer array */
  void*     mem;            /* malloced aggregate space */
  mchunkptr p;              /* corresponding chunk */
  size_t    remainder_size; /* remaining bytes while splitting */
  void**    marray;         /* either "chunks" or malloced ptr array */
  mchunkptr array_chunk;    /* chunk for malloced ptr array */
  flag_t    was_enabled;    /* to disable mmap */
  size_t    size;
  size_t    i;

  /* compute array length, if needed */
  if (chunks != 0) {
    if (n_elements == 0)
      return chunks; /* nothing to do */
    marray = chunks;
    array_size = 0;
  }
  else {
    /* if empty req, must still return chunk representing empty array */
    if (n_elements == 0)
      return (void**)internal_malloc(m, 0);
    marray = 0;
    array_size = request2size(n_elements * (sizeof(void*)));
  }

  /* compute total element size */
  if (opts & 0x1) { /* all-same-size */
    element_size = request2size(*sizes);
    contents_size = n_elements * element_size;
  }
  else { /* add up all the sizes */
    element_size = 0;
    contents_size = 0;
    for (i = 0; i != n_elements; ++i)
      contents_size += request2size(sizes[i]);
  }

  size = contents_size + array_size;

  /*
     Allocate the aggregate chunk.  First disable direct-mmapping so
     malloc won't use it, since we would not be able to later
     free/realloc space internal to a segregated mmap region.
  */
  was_enabled = use_mmap(m);
  disable_mmap(m);
  mem = internal_malloc(m, size - CHUNK_OVERHEAD);
  if (was_enabled)
    enable_mmap(m);
  if (mem == 0)
    return 0;

  if (PREACTION(m)) return 0;
  p = mem2chunk(mem);
  remainder_size = chunksize(p);

  assert(!is_mmapped(p));

  if (opts & 0x2) {       /* optionally clear the elements */
    memset((size_t*)mem, 0, remainder_size - SIZE_T_SIZE - array_size);
  }

  /* If not provided, allocate the pointer array as final part of chunk */
  if (marray == 0) {
    size_t  array_chunk_size;
    array_chunk = chunk_plus_offset(p, contents_size);
    array_chunk_size = remainder_size - contents_size;
    marray = (void**) (chunk2mem(array_chunk));
    set_size_and_pinuse_of_inuse_chunk(m, array_chunk, array_chunk_size);
    remainder_size = contents_size;
  }

  /* split out elements */
  for (i = 0; ; ++i) {
    marray[i] = chunk2mem(p);
    if (i != n_elements-1) {
      if (element_size != 0)
        size = element_size;
      else
        size = request2size(sizes[i]);
      remainder_size -= size;
      set_size_and_pinuse_of_inuse_chunk(m, p, size);
      p = chunk_plus_offset(p, size);
    }
    else { /* the final element absorbs any overallocation slop */
      set_size_and_pinuse_of_inuse_chunk(m, p, remainder_size);
      break;
    }
  }

#if DEBUG
  if (marray != chunks) {
    /* final element must have exactly exhausted chunk */
    if (element_size != 0) {
      assert(remainder_size == element_size);
    }
    else {
      assert(remainder_size == request2size(sizes[i]));
    }
    check_inuse_chunk(m, mem2chunk(marray));
  }
  for (i = 0; i != n_elements; ++i)
    check_inuse_chunk(m, mem2chunk(marray[i]));

#endif /* DEBUG */

  POSTACTION(m);
  return marray;
}

Here is the caller graph for this function:

static void init_bins ( mstate  m) [static]

Definition at line 3237 of file dlmalloc.c.

                                {
  /* Establish circular links for smallbins */
  bindex_t i;
  for (i = 0; i < NSMALLBINS; ++i) {
    sbinptr bin = smallbin_at(m,i);
    bin->fd = bin->bk = bin;
  }
}

Here is the call graph for this function:

Here is the caller graph for this function:

static int init_mparams ( void  ) [static]

Definition at line 2457 of file dlmalloc.c.

                              {
  if (mparams.page_size == 0) {
    size_t s;

    mparams.mmap_threshold = DEFAULT_MMAP_THRESHOLD;
    mparams.trim_threshold = DEFAULT_TRIM_THRESHOLD;
#if MORECORE_CONTIGUOUS
    mparams.default_mflags = USE_LOCK_BIT|USE_MMAP_BIT;
#else  /* MORECORE_CONTIGUOUS */
    mparams.default_mflags = USE_LOCK_BIT|USE_MMAP_BIT|USE_NONCONTIGUOUS_BIT;
#endif /* MORECORE_CONTIGUOUS */

#if (FOOTERS && !INSECURE)
    {
#if USE_DEV_RANDOM
      int fd;
      unsigned char buf[sizeof(size_t)];
      /* Try to use /dev/urandom, else fall back on using time */
      if ((fd = open("/dev/urandom", O_RDONLY)) >= 0 &&
          read(fd, buf, sizeof(buf)) == sizeof(buf)) {
        s = *((size_t *) buf);
        close(fd);
      }
      else
#endif /* USE_DEV_RANDOM */
        s = (size_t)(time(0) ^ (size_t)0x55555555U);

      s |= (size_t)8U;    /* ensure nonzero */
      s &= ~(size_t)7U;   /* improve chances of fault for bad values */

    }
#else /* (FOOTERS && !INSECURE) */
    s = (size_t)0x58585858U;
#endif /* (FOOTERS && !INSECURE) */
    ACQUIRE_MAGIC_INIT_LOCK();
    if (mparams.magic == 0) {
      mparams.magic = s;
      /* Set up lock for main malloc area */
      INITIAL_LOCK(&gm->mutex);
      gm->mflags = mparams.default_mflags;
    }
    RELEASE_MAGIC_INIT_LOCK();

#ifndef WIN32
    mparams.page_size = malloc_getpagesize;
    mparams.granularity = ((DEFAULT_GRANULARITY != 0)?
                           DEFAULT_GRANULARITY : mparams.page_size);
#else /* WIN32 */
    {
      SYSTEM_INFO system_info;
      GetSystemInfo(&system_info);
      mparams.page_size = system_info.dwPageSize;
      mparams.granularity = system_info.dwAllocationGranularity;
    }
#endif /* WIN32 */

    /* Sanity-check configuration:
       size_t must be unsigned and as wide as pointer type.
       ints must be at least 4 bytes.
       alignment must be at least 8.
       Alignment, min chunk size, and page size must all be powers of 2.
    */
    if ((sizeof(size_t) != sizeof(char*)) ||
        (MAX_SIZE_T < MIN_CHUNK_SIZE)  ||
        (sizeof(int) < 4)  ||
        (MALLOC_ALIGNMENT < (size_t)8U) ||
        ((MALLOC_ALIGNMENT    & (MALLOC_ALIGNMENT-SIZE_T_ONE))    != 0) ||
        ((MCHUNK_SIZE         & (MCHUNK_SIZE-SIZE_T_ONE))         != 0) ||
        ((mparams.granularity & (mparams.granularity-SIZE_T_ONE)) != 0) ||
        ((mparams.page_size   & (mparams.page_size-SIZE_T_ONE))   != 0))
      ABORT;
  }
  return 0;
}

Here is the call graph for this function:

Here is the caller graph for this function:

static void init_top ( mstate  m,
mchunkptr  p,
size_t  psize 
) [static]

Definition at line 3222 of file dlmalloc.c.

                                                          {
  /* Ensure alignment */
  size_t offset = align_offset(chunk2mem(p));
  p = (mchunkptr)((char*)p + offset);
  psize -= offset;

  m->top = p;
  m->topsize = psize;
  p->head = psize | PINUSE_BIT;
  /* set size of fake trailing chunk holding overhead space only once */
  chunk_plus_offset(p, psize)->head = TOP_FOOT_SIZE;
  m->trim_check = mparams.trim_threshold; /* reset on each update */
}

Here is the caller graph for this function:

static struct mallinfo internal_mallinfo ( mstate  m) [static, read]

Definition at line 2826 of file dlmalloc.c.

                                                   {
  struct mallinfo nm = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
  if (!PREACTION(m)) {
    check_malloc_state(m);
    if (is_initialized(m)) {
      size_t nfree = SIZE_T_ONE; /* top always free */
      size_t mfree = m->topsize + TOP_FOOT_SIZE;
      size_t sum = mfree;
      msegmentptr s = &m->seg;
      while (s != 0) {
        mchunkptr q = align_as_chunk(s->base);
        while (segment_holds(s, q) &&
               q != m->top && q->head != FENCEPOST_HEAD) {
          size_t sz = chunksize(q);
          sum += sz;
          if (!cinuse(q)) {
            mfree += sz;
            ++nfree;
          }
          q = next_chunk(q);
        }
        s = s->next;
      }

      nm.arena    = sum;
      nm.ordblks  = nfree;
      nm.hblkhd   = m->footprint - sum;
      nm.usmblks  = m->max_footprint;
      nm.uordblks = m->footprint - mfree;
      nm.fordblks = mfree;
      nm.keepcost = m->topsize;
    }

    POSTACTION(m);
  }
  return nm;
}

Here is the call graph for this function:

Here is the caller graph for this function:

static void internal_malloc_stats ( mstate  m) [static]

Definition at line 2865 of file dlmalloc.c.

                                            {
  if (!PREACTION(m)) {
    size_t maxfp = 0;
    size_t fp = 0;
    size_t used = 0;
    check_malloc_state(m);
    if (is_initialized(m)) {
      msegmentptr s = &m->seg;
      maxfp = m->max_footprint;
      fp = m->footprint;
      used = fp - (m->topsize + TOP_FOOT_SIZE);

      while (s != 0) {
        mchunkptr q = align_as_chunk(s->base);
        while (segment_holds(s, q) &&
               q != m->top && q->head != FENCEPOST_HEAD) {
          if (!cinuse(q))
            used -= chunksize(q);
          q = next_chunk(q);
        }
        s = s->next;
      }
    }

    fprintf(stderr, "max system bytes = %10lu\n", (unsigned long)(maxfp));
    fprintf(stderr, "system bytes     = %10lu\n", (unsigned long)(fp));
    fprintf(stderr, "in use bytes     = %10lu\n", (unsigned long)(used));

    POSTACTION(m);
  }
}
static void* internal_memalign ( mstate  m,
size_t  alignment,
size_t  bytes 
) [static]

Definition at line 3848 of file dlmalloc.c.

                                                                         {
  if (alignment <= MALLOC_ALIGNMENT)    /* Can just use malloc */
    return internal_malloc(m, bytes);
  if (alignment <  MIN_CHUNK_SIZE) /* must be at least a minimum chunk size */
    alignment = MIN_CHUNK_SIZE;
  if ((alignment & (alignment-SIZE_T_ONE)) != 0) {/* Ensure a power of 2 */
    size_t a = MALLOC_ALIGNMENT << 1;
    while (a < alignment) a <<= 1;
    alignment = a;
  }
  
  if (bytes >= MAX_REQUEST - alignment) {
    if (m != 0)  { /* Test isn't needed but avoids compiler warning */
      MALLOC_FAILURE_ACTION;
    }
  }
  else {
    size_t nb = request2size(bytes);
    size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD;
    char* mem = (char*)internal_malloc(m, req);
    if (mem != 0) {
      void* leader = 0;
      void* trailer = 0;
      mchunkptr p = mem2chunk(mem);

      if (PREACTION(m)) return 0;
      if ((((size_t)(mem)) % alignment) != 0) { /* misaligned */
        /*
          Find an aligned spot inside chunk.  Since we need to give
          back leading space in a chunk of at least MIN_CHUNK_SIZE, if
          the first calculation places us at a spot with less than
          MIN_CHUNK_SIZE leader, we can move to the next aligned spot.
          We've allocated enough total room so that this is always
          possible.
        */
        char* br = (char*)mem2chunk((size_t)(((size_t)(mem +
                                                       alignment -
                                                       SIZE_T_ONE)) &
                                             -alignment));
        char* pos = ((size_t)(br - (char*)(p)) >= MIN_CHUNK_SIZE)?
          br : br+alignment;
        mchunkptr newp = (mchunkptr)pos;
        size_t leadsize = pos - (char*)(p);
        size_t newsize = chunksize(p) - leadsize;

        if (is_mmapped(p)) { /* For mmapped chunks, just adjust offset */
          newp->prev_foot = p->prev_foot + leadsize;
          newp->head = (newsize|CINUSE_BIT);
        }
        else { /* Otherwise, give back leader, use the rest */
          set_inuse(m, newp, newsize);
          set_inuse(m, p, leadsize);
          leader = chunk2mem(p);
        }
        p = newp;
      }

      /* Give back spare room at the end */
      if (!is_mmapped(p)) {
        size_t size = chunksize(p);
        if (size > nb + MIN_CHUNK_SIZE) {
          size_t remainder_size = size - nb;
          mchunkptr remainder = chunk_plus_offset(p, nb);
          set_inuse(m, p, nb);
          set_inuse(m, remainder, remainder_size);
          trailer = chunk2mem(remainder);
        }
      }

      assert (chunksize(p) >= nb);
      assert((((size_t)(chunk2mem(p))) % alignment) == 0);
      check_inuse_chunk(m, p);
      POSTACTION(m);
      if (leader != 0) {
        internal_free(m, leader);
      }
      if (trailer != 0) {
        internal_free(m, trailer);
      }
      return chunk2mem(p);
    }
  }
  return 0;
}

Here is the caller graph for this function:

static void* internal_realloc ( mstate  m,
void oldmem,
size_t  bytes 
) [static]

Definition at line 3777 of file dlmalloc.c.

                                                                    {
  if (bytes >= MAX_REQUEST) {
    MALLOC_FAILURE_ACTION;
    return 0;
  }
  if (!PREACTION(m)) {
    mchunkptr oldp = mem2chunk(oldmem);
    size_t oldsize = chunksize(oldp);
    mchunkptr next = chunk_plus_offset(oldp, oldsize);
    mchunkptr newp = 0;
    void* extra = 0;

    /* Try to either shrink or extend into top. Else malloc-copy-free */

    if (RTCHECK(ok_address(m, oldp) && ok_cinuse(oldp) &&
                ok_next(oldp, next) && ok_pinuse(next))) {
      size_t nb = request2size(bytes);
      if (is_mmapped(oldp))
        newp = mmap_resize(m, oldp, nb);
      else if (oldsize >= nb) { /* already big enough */
        size_t rsize = oldsize - nb;
        newp = oldp;
        if (rsize >= MIN_CHUNK_SIZE) {
          mchunkptr remainder = chunk_plus_offset(newp, nb);
          set_inuse(m, newp, nb);
          set_inuse(m, remainder, rsize);
          extra = chunk2mem(remainder);
        }
      }
      else if (next == m->top && oldsize + m->topsize > nb) {
        /* Expand into top */
        size_t newsize = oldsize + m->topsize;
        size_t newtopsize = newsize - nb;
        mchunkptr newtop = chunk_plus_offset(oldp, nb);
        set_inuse(m, oldp, nb);
        newtop->head = newtopsize |PINUSE_BIT;
        m->top = newtop;
        m->topsize = newtopsize;
        newp = oldp;
      }
    }
    else {
      USAGE_ERROR_ACTION(m, oldmem);
      POSTACTION(m);
      return 0;
    }

    POSTACTION(m);

    if (newp != 0) {
      if (extra != 0) {
        internal_free(m, extra);
      }
      check_inuse_chunk(m, newp);
      return chunk2mem(newp);
    }
    else {
      void* newmem = internal_malloc(m, bytes);
      if (newmem != 0) {
        size_t oc = oldsize - overhead_for(oldp);
        memcpy(newmem, oldmem, (oc < bytes)? oc : bytes);
        internal_free(m, oldmem);
      }
      return newmem;
    }
  }
  return 0;
}

Here is the call graph for this function:

Here is the caller graph for this function:

static void* mmap_alloc ( mstate  m,
size_t  nb 
) [static]

Definition at line 3158 of file dlmalloc.c.

                                             {
  size_t mmsize = granularity_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
  if (mmsize > nb) {     /* Check for wrap around 0 */
    char* mm = (char*)(DIRECT_MMAP(mmsize));
    if (mm != CMFAIL) {
      size_t offset = align_offset(chunk2mem(mm));
      size_t psize = mmsize - offset - MMAP_FOOT_PAD;
      mchunkptr p = (mchunkptr)(mm + offset);
      p->prev_foot = offset | IS_MMAPPED_BIT;
      (p)->head = (psize|CINUSE_BIT);
      mark_inuse_foot(m, p, psize);
      chunk_plus_offset(p, psize)->head = FENCEPOST_HEAD;
      chunk_plus_offset(p, psize+SIZE_T_SIZE)->head = 0;

      if (mm < m->least_addr)
        m->least_addr = mm;
      if ((m->footprint += mmsize) > m->max_footprint)
        m->max_footprint = m->footprint;
      assert(is_aligned(chunk2mem(p)));
      check_mmapped_chunk(m, p);
      return chunk2mem(p);
    }
  }
  return 0;
}

Here is the caller graph for this function:

static mchunkptr mmap_resize ( mstate  m,
mchunkptr  oldp,
size_t  nb 
) [static]

Definition at line 3185 of file dlmalloc.c.

                                                                  {
  size_t oldsize = chunksize(oldp);
  if (is_small(nb)) /* Can't shrink mmap regions below small size */
    return 0;
  /* Keep old chunk if big enough but not too big */
  if (oldsize >= nb + SIZE_T_SIZE &&
      (oldsize - nb) <= (mparams.granularity << 1))
    return oldp;
  else {
    size_t offset = oldp->prev_foot & ~IS_MMAPPED_BIT;
    size_t oldmmsize = oldsize + offset + MMAP_FOOT_PAD;
    size_t newmmsize = granularity_align(nb + SIX_SIZE_T_SIZES +
                                         CHUNK_ALIGN_MASK);
    char* cp = (char*)CALL_MREMAP((char*)oldp - offset,
                                  oldmmsize, newmmsize, 1);
    if (cp != CMFAIL) {
      mchunkptr newp = (mchunkptr)(cp + offset);
      size_t psize = newmmsize - offset - MMAP_FOOT_PAD;
      newp->head = (psize|CINUSE_BIT);
      mark_inuse_foot(m, newp, psize);
      chunk_plus_offset(newp, psize)->head = FENCEPOST_HEAD;
      chunk_plus_offset(newp, psize+SIZE_T_SIZE)->head = 0;

      if (cp < m->least_addr)
        m->least_addr = cp;
      if ((m->footprint += newmmsize - oldmmsize) > m->max_footprint)
        m->max_footprint = m->footprint;
      check_mmapped_chunk(m, newp);
      return newp;
    }
  }
  return 0;
}

Here is the caller graph for this function:

static void* prepend_alloc ( mstate  m,
char *  newbase,
char *  oldbase,
size_t  nb 
) [static]

Definition at line 3266 of file dlmalloc.c.

                                      {
  mchunkptr p = align_as_chunk(newbase);
  mchunkptr oldfirst = align_as_chunk(oldbase);
  size_t psize = (char*)oldfirst - (char*)p;
  mchunkptr q = chunk_plus_offset(p, nb);
  size_t qsize = psize - nb;
  set_size_and_pinuse_of_inuse_chunk(m, p, nb);

  assert((char*)oldfirst > (char*)q);
  assert(pinuse(oldfirst));
  assert(qsize >= MIN_CHUNK_SIZE);

  /* consolidate remainder with first chunk of old base */
  if (oldfirst == m->top) {
    size_t tsize = m->topsize += qsize;
    m->top = q;
    q->head = tsize | PINUSE_BIT;
    check_top_chunk(m, q);
  }
  else if (oldfirst == m->dv) {
    size_t dsize = m->dvsize += qsize;
    m->dv = q;
    set_size_and_pinuse_of_free_chunk(q, dsize);
  }
  else {
    if (!cinuse(oldfirst)) {
      size_t nsize = chunksize(oldfirst);
      unlink_chunk(m, oldfirst, nsize);
      oldfirst = chunk_plus_offset(oldfirst, nsize);
      qsize += nsize;
    }
    set_free_with_pinuse(q, qsize, oldfirst);
    insert_chunk(m, q, qsize);
    check_free_chunk(m, q);
  }

  check_malloced_chunk(m, chunk2mem(p), nb);
  return chunk2mem(p);
}

Here is the caller graph for this function:

static size_t release_unused_segments ( mstate  m) [static]

Definition at line 3558 of file dlmalloc.c.

                                                {
  size_t released = 0;
  msegmentptr pred = &m->seg;
  msegmentptr sp = pred->next;
  while (sp != 0) {
    char* base = sp->base;
    size_t size = sp->size;
    msegmentptr next = sp->next;
    if (is_mmapped_segment(sp) && !is_extern_segment(sp)) {
      mchunkptr p = align_as_chunk(base);
      size_t psize = chunksize(p);
      /* Can unmap if first chunk holds entire segment and not pinned */
      if (!cinuse(p) && (char*)p + psize >= base + size - TOP_FOOT_SIZE) {
        tchunkptr tp = (tchunkptr)p;
        assert(segment_holds(sp, (char*)sp));
        if (p == m->dv) {
          m->dv = 0;
          m->dvsize = 0;
        }
        else {
          unlink_large_chunk(m, tp);
        }
        if (CALL_MUNMAP(base, size) == 0) {
          released += size;
          m->footprint -= size;
          /* unlink obsoleted record */
          sp = pred;
          sp->next = next;
        }
        else { /* back out if cannot unmap */
          insert_large_chunk(m, tp, psize);
        }
      }
    }
    pred = sp;
    sp = next;
  }
  return released;
}

Here is the caller graph for this function:

static msegmentptr segment_holding ( mstate  m,
char *  addr 
) [static]

Definition at line 2103 of file dlmalloc.c.

                                                         {
  msegmentptr sp = &m->seg;
  for (;;) {
    if (addr >= sp->base && addr < sp->base + sp->size)
      return sp;
    if ((sp = sp->next) == 0)
      return 0;
  }
}

Here is the caller graph for this function:

static void* sys_alloc ( mstate  m,
size_t  nb 
) [static]

Definition at line 3364 of file dlmalloc.c.

                                            {
  char* tbase = CMFAIL;
  size_t tsize = 0;
  flag_t mmap_flag = 0;

  init_mparams();

  /* Directly map large chunks */
  if (use_mmap(m) && nb >= mparams.mmap_threshold) {
    void* mem = mmap_alloc(m, nb);
    if (mem != 0)
      return mem;
  }

  /*
    Try getting memory in any of three ways (in most-preferred to
    least-preferred order):
    1. A call to MORECORE that can normally contiguously extend memory.
       (disabled if not MORECORE_CONTIGUOUS or not HAVE_MORECORE or
       or main space is mmapped or a previous contiguous call failed)
    2. A call to MMAP new space (disabled if not HAVE_MMAP).
       Note that under the default settings, if MORECORE is unable to
       fulfill a request, and HAVE_MMAP is true, then mmap is
       used as a noncontiguous system allocator. This is a useful backup
       strategy for systems with holes in address spaces -- in this case
       sbrk cannot contiguously expand the heap, but mmap may be able to
       find space.
    3. A call to MORECORE that cannot usually contiguously extend memory.
       (disabled if not HAVE_MORECORE)
  */

  if (MORECORE_CONTIGUOUS && !use_noncontiguous(m)) {
    char* br = CMFAIL;
    msegmentptr ss = (m->top == 0)? 0 : segment_holding(m, (char*)m->top);
    size_t asize = 0;
    ACQUIRE_MORECORE_LOCK();

    if (ss == 0) {  /* First time through or recovery */
      char* base = (char*)CALL_MORECORE(0);
      if (base != CMFAIL) {
        asize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE);
        /* Adjust to end on a page boundary */
        if (!is_page_aligned(base))
          asize += (page_align((size_t)base) - (size_t)base);
        /* Can't call MORECORE if size is negative when treated as signed */
        if (asize < HALF_MAX_SIZE_T &&
            (br = (char*)(CALL_MORECORE(asize))) == base) {
          tbase = base;
          tsize = asize;
        }
      }
    }
    else {
      /* Subtract out existing available top space from MORECORE request. */
      asize = granularity_align(nb - m->topsize + TOP_FOOT_SIZE + SIZE_T_ONE);
      /* Use mem here only if it did continuously extend old space */
      if (asize < HALF_MAX_SIZE_T &&
          (br = (char*)(CALL_MORECORE(asize))) == ss->base+ss->size) {
        tbase = br;
        tsize = asize;
      }
    }

    if (tbase == CMFAIL) {    /* Cope with partial failure */
      if (br != CMFAIL) {    /* Try to use/extend the space we did get */
        if (asize < HALF_MAX_SIZE_T &&
            asize < nb + TOP_FOOT_SIZE + SIZE_T_ONE) {
          size_t esize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE - asize);
          if (esize < HALF_MAX_SIZE_T) {
            char* end = (char*)CALL_MORECORE(esize);
            if (end != CMFAIL)
              asize += esize;
            else {            /* Can't use; try to release */
              (void)CALL_MORECORE(-asize);
              br = CMFAIL;
            }
          }
        }
      }
      if (br != CMFAIL) {    /* Use the space we did get */
        tbase = br;
        tsize = asize;
      }
      else
        disable_contiguous(m); /* Don't try contiguous path in the future */
    }

    RELEASE_MORECORE_LOCK();
  }

  if (HAVE_MMAP && tbase == CMFAIL) {  /* Try MMAP */
    size_t req = nb + TOP_FOOT_SIZE + SIZE_T_ONE;
    size_t rsize = granularity_align(req);
    if (rsize > nb) { /* Fail if wraps around zero */
      char* mp = (char*)(CALL_MMAP(rsize));
      if (mp != CMFAIL) {
        tbase = mp;
        tsize = rsize;
        mmap_flag = IS_MMAPPED_BIT;
      }
    }
  }

  if (HAVE_MORECORE && tbase == CMFAIL) { /* Try noncontiguous MORECORE */
    size_t asize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE);
    if (asize < HALF_MAX_SIZE_T) {
      char* br = CMFAIL;
      char* end = CMFAIL;
      ACQUIRE_MORECORE_LOCK();
      br = (char*)(CALL_MORECORE(asize));
      end = (char*)(CALL_MORECORE(0));
      RELEASE_MORECORE_LOCK();
      if (br != CMFAIL && end != CMFAIL && br < end) {
        size_t ssize = end - br;
        if (ssize > nb + TOP_FOOT_SIZE) {
          tbase = br;
          tsize = ssize;
        }
      }
    }
  }

  if (tbase != CMFAIL) {

    if ((m->footprint += tsize) > m->max_footprint)
      m->max_footprint = m->footprint;

    if (!is_initialized(m)) { /* first-time initialization */
      m->seg.base = m->least_addr = tbase;
      m->seg.size = tsize;
      set_segment_flags(&m->seg, mmap_flag);
      m->magic = mparams.magic;
      init_bins(m);
      if (is_global(m)) 
        init_top(m, (mchunkptr)tbase, tsize - TOP_FOOT_SIZE);
      else {
        /* Offset top by embedded malloc_state */
        mchunkptr mn = next_chunk(mem2chunk(m));
        init_top(m, mn, (size_t)((tbase + tsize) - (char*)mn) -TOP_FOOT_SIZE);
      }
    }

    else {
      /* Try to merge with an existing segment */
      msegmentptr sp = &m->seg;
      while (sp != 0 && tbase != sp->base + sp->size)
        sp = sp->next;
      if (sp != 0 &&
          !is_extern_segment(sp) &&
         check_segment_merge(sp, tbase, tsize) &&
          (get_segment_flags(sp) & IS_MMAPPED_BIT) == mmap_flag &&
          segment_holds(sp, m->top)) { /* append */
        sp->size += tsize;
        init_top(m, m->top, m->topsize + tsize);
      }
      else {
        if (tbase < m->least_addr)
          m->least_addr = tbase;
        sp = &m->seg;
        while (sp != 0 && sp->base != tbase + tsize)
          sp = sp->next;
        if (sp != 0 &&
            !is_extern_segment(sp) &&
           check_segment_merge(sp, tbase, tsize) &&
            (get_segment_flags(sp) & IS_MMAPPED_BIT) == mmap_flag) {
          char* oldbase = sp->base;
          sp->base = tbase;
          sp->size += tsize;
          return prepend_alloc(m, tbase, oldbase, nb);
        }
        else
          add_segment(m, tbase, tsize, mmap_flag);
      }
    }

    if (nb < m->topsize) { /* Allocate from new or extended top space */
      size_t rsize = m->topsize -= nb;
      mchunkptr p = m->top;
      mchunkptr r = m->top = chunk_plus_offset(p, nb);
      r->head = rsize | PINUSE_BIT;
      set_size_and_pinuse_of_inuse_chunk(m, p, nb);
      check_top_chunk(m, m->top);
      check_malloced_chunk(m, chunk2mem(p), nb);
      return chunk2mem(p);
    }
  }

  MALLOC_FAILURE_ACTION;
  return 0;
}

Here is the call graph for this function:

Here is the caller graph for this function:

static int sys_trim ( mstate  m,
size_t  pad 
) [static]

Definition at line 3598 of file dlmalloc.c.

                                          {
  size_t released = 0;
  if (pad < MAX_REQUEST && is_initialized(m)) {
    pad += TOP_FOOT_SIZE; /* ensure enough room for segment overhead */

    if (m->topsize > pad) {
      /* Shrink top space in granularity-size units, keeping at least one */
      size_t unit = mparams.granularity;
      size_t extra = ((m->topsize - pad + (unit - SIZE_T_ONE)) / unit -
                      SIZE_T_ONE) * unit;
      msegmentptr sp = segment_holding(m, (char*)m->top);

      if (!is_extern_segment(sp)) {
        if (is_mmapped_segment(sp)) {
          if (HAVE_MMAP &&
              sp->size >= extra &&
              !has_segment_link(m, sp)) { /* can't shrink if pinned */
            size_t newsize = sp->size - extra;
            /* Prefer mremap, fall back to munmap */
            if ((CALL_MREMAP(sp->base, sp->size, newsize, 0) != MFAIL) ||
                (CALL_MUNMAP(sp->base + newsize, extra) == 0)) {
              released = extra;
            }
          }
        }
        else if (HAVE_MORECORE) {
          if (extra >= HALF_MAX_SIZE_T) /* Avoid wrapping negative */
            extra = (HALF_MAX_SIZE_T) + SIZE_T_ONE - unit;
          ACQUIRE_MORECORE_LOCK();
          {
            /* Make sure end of memory is where we last set it. */
            char* old_br = (char*)(CALL_MORECORE(0));
            if (old_br == sp->base + sp->size) {
              char* rel_br = (char*)(CALL_MORECORE(-extra));
              char* new_br = (char*)(CALL_MORECORE(0));
              if (rel_br != CMFAIL && new_br < old_br)
                released = old_br - new_br;
            }
          }
          RELEASE_MORECORE_LOCK();
        }
      }

      if (released != 0) {
        sp->size -= released;
        m->footprint -= released;
        init_top(m, m->top, m->topsize - released);
        check_top_chunk(m, m->top);
      }
    }

    /* Unmap any unused mmapped segments */
    if (HAVE_MMAP) 
      released += release_unused_segments(m);

    /* On failure, disable autotrim to avoid repeated failed future calls */
    if (released == 0)
      m->trim_check = MAX_SIZE_T;
  }

  return (released != 0)? 1 : 0;
}

Here is the call graph for this function:

Here is the caller graph for this function:

static void* tmalloc_large ( mstate  m,
size_t  nb 
) [static]

Definition at line 3664 of file dlmalloc.c.

                                                {
  tchunkptr v = 0;
  size_t rsize = -nb; /* Unsigned negation */
  tchunkptr t;
  bindex_t idx;
  compute_tree_index(nb, idx);

  if ((t = *treebin_at(m, idx)) != 0) {
    /* Traverse tree for this bin looking for node with size == nb */
    size_t sizebits = nb << leftshift_for_tree_index(idx);
    tchunkptr rst = 0;  /* The deepest untaken right subtree */
    for (;;) {
      tchunkptr rt;
      size_t trem = chunksize(t) - nb;
      if (trem < rsize) {
        v = t;
        if ((rsize = trem) == 0)
          break;
      }
      rt = t->child[1];
      t = t->child[(sizebits >> (SIZE_T_BITSIZE-SIZE_T_ONE)) & 1];
      if (rt != 0 && rt != t)
        rst = rt;
      if (t == 0) {
        t = rst; /* set t to least subtree holding sizes > nb */
        break;
      }
      sizebits <<= 1;
    }
  }

  if (t == 0 && v == 0) { /* set t to root of next non-empty treebin */
    binmap_t leftbits = left_bits(idx2bit(idx)) & m->treemap;
    if (leftbits != 0) {
      bindex_t i;
      binmap_t leastbit = least_bit(leftbits);
      compute_bit2idx(leastbit, i);
      t = *treebin_at(m, i);
    }
  }

  while (t != 0) { /* find smallest of tree or subtree */
    size_t trem = chunksize(t) - nb;
    if (trem < rsize) {
      rsize = trem;
      v = t;
    }
    t = leftmost_child(t);
  }

  /*  If dv is a better fit, return 0 so malloc will use it */
  if (v != 0 && rsize < (size_t)(m->dvsize - nb)) {
    if (RTCHECK(ok_address(m, v))) { /* split */
      mchunkptr r = chunk_plus_offset(v, nb);
      assert(chunksize(v) == rsize + nb);
      if (RTCHECK(ok_next(v, r))) {
        unlink_large_chunk(m, v);
        if (rsize < MIN_CHUNK_SIZE)
          set_inuse_and_pinuse(m, v, (rsize + nb));
        else {
          set_size_and_pinuse_of_inuse_chunk(m, v, nb);
          set_size_and_pinuse_of_free_chunk(r, rsize);
          insert_chunk(m, r, rsize);
        }
        return chunk2mem(v);
      }
    }
    CORRUPTION_ERROR_ACTION(m);
  }
  return 0;
}

Here is the caller graph for this function:

static void* tmalloc_small ( mstate  m,
size_t  nb 
) [static]

Definition at line 3737 of file dlmalloc.c.

                                                {
  tchunkptr t, v;
  size_t rsize;
  bindex_t i;
  binmap_t leastbit = least_bit(m->treemap);
  compute_bit2idx(leastbit, i);

  v = t = *treebin_at(m, i);
  rsize = chunksize(t) - nb;

  while ((t = leftmost_child(t)) != 0) {
    size_t trem = chunksize(t) - nb;
    if (trem < rsize) {
      rsize = trem;
      v = t;
    }
  }

  if (RTCHECK(ok_address(m, v))) {
    mchunkptr r = chunk_plus_offset(v, nb);
    assert(chunksize(v) == rsize + nb);
    if (RTCHECK(ok_next(v, r))) {
      unlink_large_chunk(m, v);
      if (rsize < MIN_CHUNK_SIZE)
        set_inuse_and_pinuse(m, v, (rsize + nb));
      else {
        set_size_and_pinuse_of_inuse_chunk(m, v, nb);
        set_size_and_pinuse_of_free_chunk(r, rsize);
        replace_dv(m, r, rsize);
      }
      return chunk2mem(v);
    }
  }

  CORRUPTION_ERROR_ACTION(m);
  return 0;
}

Here is the caller graph for this function:


Variable Documentation

int dev_zero_fd = -1 [static]

Definition at line 1311 of file dlmalloc.c.

struct malloc_params [static]

Definition at line 2057 of file dlmalloc.c.

struct malloc_state [static]

Definition at line 2060 of file dlmalloc.c.