Back to index

plt-scheme  4.2.1
Defines | Functions | Variables
allchblk.c File Reference
#include <stdio.h>
#include "private/gc_priv.h"

Go to the source code of this file.

Defines

#define MAX_BLACK_LIST_ALLOC   (2*HBLKSIZE)
#define UNIQUE_THRESHOLD   32
#define HUGE_THRESHOLD   256
#define FL_COMPRESSION   8
#define N_HBLK_FLS
#define INCR_FREE_BYTES(n, b)   GC_free_bytes[n] += (b);
#define FREE_ASSERT(e)   GC_ASSERT(e)
#define PHDR(hhdr)   HDR(hhdr -> hb_prev)
#define NHDR(hhdr)   HDR(hhdr -> hb_next)
#define IS_MAPPED(hhdr)   1
#define FL_UNKNOWN   -1

Functions

static GC_bool GC_enough_large_bytes_left (word bytes, int n)
int GC_hblk_fl_from_blocks (word blocks_needed)
void GC_print_hblkfreelist ()
int free_list_index_of (hdr *wanted)
void GC_dump_regions ()
static GC_bool setup_header (hdr *hhdr, word sz, int kind, unsigned char flags)
void GC_remove_from_fl (hdr *hhdr, int n)
struct hblkGC_free_block_ending_at (struct hblk *h)
void GC_add_to_fl (struct hblk *h, hdr *hhdr)
struct hblkGC_get_first_part (struct hblk *h, hdr *hhdr, word bytes, int index)
void GC_split_block (struct hblk *h, hdr *hhdr, struct hblk *n, hdr *nhdr, int index)
struct hblkGC_allochblk_nth ()
struct hblkGC_allochblk (word sz, int kind, unsigned flags)
struct hblkGC_allochblk_nth (word sz, int kind, unsigned char flags, int n)
void GC_freehblk (struct hblk *hbp)

Variables

GC_bool GC_use_entire_heap = 0
struct hblkGC_hblkfreelist [N_HBLK_FLS+1] = { 0 }
word GC_free_bytes [N_HBLK_FLS+1] = { 0 }
struct hblkGC_freehblk_ptr = 0

Define Documentation

#define FL_COMPRESSION   8

Definition at line 40 of file allchblk.c.

#define FL_UNKNOWN   -1

Definition at line 252 of file allchblk.c.

#define FREE_ASSERT (   e)    GC_ASSERT(e)

Definition at line 73 of file allchblk.c.

#define HUGE_THRESHOLD   256

Definition at line 37 of file allchblk.c.

#define INCR_FREE_BYTES (   n,
  b 
)    GC_free_bytes[n] += (b);

Definition at line 71 of file allchblk.c.

#define IS_MAPPED (   hhdr)    1

Definition at line 99 of file allchblk.c.

#define MAX_BLACK_LIST_ALLOC   (2*HBLKSIZE)

Definition at line 30 of file allchblk.c.

#define N_HBLK_FLS
Value:

Definition at line 44 of file allchblk.c.

#define NHDR (   hhdr)    HDR(hhdr -> hb_next)

Definition at line 94 of file allchblk.c.

#define PHDR (   hhdr)    HDR(hhdr -> hb_prev)

Definition at line 93 of file allchblk.c.

#define UNIQUE_THRESHOLD   32

Definition at line 35 of file allchblk.c.


Function Documentation

int free_list_index_of ( hdr wanted)

Definition at line 147 of file allchblk.c.

{
    struct hblk * h;
    hdr * hhdr;
    int i;
    
    for (i = 0; i <= N_HBLK_FLS; ++i) {
      h = GC_hblkfreelist[i];
      while (h != 0) {
        hhdr = HDR(h);
       if (hhdr == wanted) return i;
        h = hhdr -> hb_next;
      }
    }
    return -1;
}

Here is the caller graph for this function:

void GC_add_to_fl ( struct hblk h,
hdr hhdr 
)

Definition at line 333 of file allchblk.c.

{
    int index = GC_hblk_fl_from_blocks(divHBLKSZ(hhdr -> hb_sz));
    struct hblk *second = GC_hblkfreelist[index];
    hdr * second_hdr;
#   ifdef GC_ASSERTIONS
      struct hblk *next = (struct hblk *)((word)h + hhdr -> hb_sz);
      hdr * nexthdr = HDR(next);
      struct hblk *prev = GC_free_block_ending_at(h);
      hdr * prevhdr = HDR(prev);
      GC_ASSERT(nexthdr == 0 || !HBLK_IS_FREE(nexthdr) || !IS_MAPPED(nexthdr));
      GC_ASSERT(prev == 0 || !HBLK_IS_FREE(prevhdr) || !IS_MAPPED(prevhdr));
#   endif
    GC_ASSERT(((hhdr -> hb_sz) & (HBLKSIZE-1)) == 0);
    GC_hblkfreelist[index] = h;
    INCR_FREE_BYTES(index, hhdr -> hb_sz);
    FREE_ASSERT(GC_free_bytes[index] <= GC_large_free_bytes)
    hhdr -> hb_next = second;
    hhdr -> hb_prev = 0;
    if (0 != second) {
      GET_HDR(second, second_hdr);
      second_hdr -> hb_prev = h;
    }
    GC_invalidate_map(hhdr);
}

Here is the call graph for this function:

Here is the caller graph for this function:

struct hblk* GC_allochblk ( word  sz,
int  kind,
unsigned  flags 
) [read]

Definition at line 552 of file allchblk.c.

{
    word blocks = OBJ_SZ_TO_BLOCKS(sz);
    int start_list = GC_hblk_fl_from_blocks(blocks);
    int i;
    for (i = start_list; i <= N_HBLK_FLS; ++i) {
       struct hblk * result = GC_allochblk_nth(sz, kind, flags, i);
       if (0 != result) {
           return result;
       }
    }
    return 0;
}

Here is the call graph for this function:

Here is the caller graph for this function:

Here is the caller graph for this function:

struct hblk* GC_allochblk_nth ( word  sz,
int  kind,
unsigned char  flags,
int  n 
) [read]

Definition at line 572 of file allchblk.c.

{
    register struct hblk *hbp;
    register hdr * hhdr;           /* Header corr. to hbp */
    register struct hblk *thishbp;
    register hdr * thishdr;        /* Header corr. to hbp */
    signed_word size_needed;    /* number of bytes in requested objects */
    signed_word size_avail; /* bytes available in this block   */

    size_needed = HBLKSIZE * OBJ_SZ_TO_BLOCKS(sz);

    /* search for a big enough block in free list */
       hbp = GC_hblkfreelist[n];
       for(; 0 != hbp; hbp = hhdr -> hb_next) {
           GET_HDR(hbp, hhdr);
           size_avail = hhdr->hb_sz;
           if (size_avail < size_needed) continue;
           if (size_avail != size_needed
              && !GC_use_entire_heap
              && !GC_dont_gc
              && USED_HEAP_SIZE >= GC_requested_heapsize
              && !TRUE_INCREMENTAL && GC_should_collect()) {
#             ifdef USE_MUNMAP
                  continue;
#             else
                  /* If we have enough large blocks left to cover any */
                  /* previous request for large blocks, we go ahead   */
                  /* and split.  Assuming a steady state, that should */
                  /* be safe.  It means that we can use the full      */
                  /* heap if we allocate only small objects.          */
                  if (!GC_enough_large_bytes_left(GC_large_allocd_bytes, n)) {
                    continue;
                  } 
                  /* If we are deallocating lots of memory from       */
                  /* finalizers, fail and collect sooner rather       */
                  /* than later.                               */
                  if (WORDS_TO_BYTES(GC_finalizer_mem_freed)
                     > (GC_heapsize >> 4))  {
                    continue;
                  }
#             endif /* !USE_MUNMAP */
           }
           /* If the next heap block is obviously better, go on.      */
           /* This prevents us from disassembling a single large block */
           /* to get tiny blocks.                              */
           {
             signed_word next_size;
             
             thishbp = hhdr -> hb_next;
             if (thishbp != 0) {
              GET_HDR(thishbp, thishdr);
               next_size = (signed_word)(thishdr -> hb_sz);
               if (next_size < size_avail
                 && next_size >= size_needed
                 && !GC_is_black_listed(thishbp, (word)size_needed)) {
                 continue;
               }
             }
           }
           if ( !IS_UNCOLLECTABLE(kind) &&
                (kind != PTRFREE || size_needed > MAX_BLACK_LIST_ALLOC)) {
             struct hblk * lasthbp = hbp;
             ptr_t search_end = (ptr_t)hbp + size_avail - size_needed;
             signed_word orig_avail = size_avail;
             signed_word eff_size_needed = ((flags & IGNORE_OFF_PAGE)?
                                          HBLKSIZE
                                          : size_needed);
             
             
             while ((ptr_t)lasthbp <= search_end
                    && (thishbp = GC_is_black_listed(lasthbp,
                                                (word)eff_size_needed))
                      != 0) {
               lasthbp = thishbp;
             }
             size_avail -= (ptr_t)lasthbp - (ptr_t)hbp;
             thishbp = lasthbp;
             if (size_avail >= size_needed) {
               if (thishbp != hbp &&
                  0 != (thishdr = GC_install_header(thishbp))) {
                /* Make sure it's mapped before we mangle it. */
#                 ifdef USE_MUNMAP
                    if (!IS_MAPPED(hhdr)) {
                      GC_remap((ptr_t)hbp, hhdr -> hb_sz);
                      hhdr -> hb_flags &= ~WAS_UNMAPPED;
                    }
#                 endif
                 /* Split the block at thishbp */
                    GC_split_block(hbp, hhdr, thishbp, thishdr, n);
                /* Advance to thishbp */
                    hbp = thishbp;
                    hhdr = thishdr;
                    /* We must now allocate thishbp, since it may     */
                    /* be on the wrong free list.                     */
              }
             } else if (size_needed > (signed_word)BL_LIMIT
                        && orig_avail - size_needed
                         > (signed_word)BL_LIMIT) {
               /* Punt, since anything else risks unreasonable heap growth. */
              if (++GC_large_alloc_warn_suppressed
                  >= GC_large_alloc_warn_interval) {
              /* PLTSCHEME: rather not see this particular message (or setenv). */
#if 0
                 WARN("Repeated allocation of very large block "
                     "(appr. size %ld):\n"
                     "\tMay lead to memory leak and poor performance.\n",
                     size_needed);
#endif
                GC_large_alloc_warn_suppressed = 0;
              }
               size_avail = orig_avail;
             } else if (size_avail == 0 && size_needed == HBLKSIZE
                      && IS_MAPPED(hhdr)) {
              if (!GC_find_leak) {
                static unsigned count = 0;
                
                /* The block is completely blacklisted.  We need      */
                /* to drop some such blocks, since otherwise we spend */
                /* all our time traversing them if pointerfree */
                /* blocks are unpopular.                       */
                 /* A dropped block will be reconsidered at next GC.  */
                 if ((++count & 3) == 0) {
                   /* Allocate and drop the block in small chunks, to */
                   /* maximize the chance that we will recover some   */
                   /* later.                                          */
                    word total_size = hhdr -> hb_sz;
                     struct hblk * limit = hbp + divHBLKSZ(total_size);
                     struct hblk * h;
                    struct hblk * prev = hhdr -> hb_prev;
                     
                    GC_words_wasted += BYTES_TO_WORDS(total_size);
                    GC_large_free_bytes -= total_size;
                    GC_remove_from_fl(hhdr, n);
                     for (h = hbp; h < limit; h++) {
                       if (h == hbp || 0 != (hhdr = GC_install_header(h))) {
                         (void) setup_header(
                              hhdr,
                                     BYTES_TO_WORDS(HBLKSIZE),
                                     PTRFREE, 0); /* Cant fail */
                              if (GC_debugging_started) {
                                BZERO(h, HBLKSIZE);
                              }
                       }
                     }
                   /* Restore hbp to point at free block */
                    hbp = prev;
                    if (0 == hbp) {
                     return GC_allochblk_nth(sz, kind, flags, n);
                    }
                    hhdr = HDR(hbp);
                 }
              }
             }
           }
           if( size_avail >= size_needed ) {
#             ifdef USE_MUNMAP
                if (!IS_MAPPED(hhdr)) {
                  GC_remap((ptr_t)hbp, hhdr -> hb_sz);
                  hhdr -> hb_flags &= ~WAS_UNMAPPED;
                }
#              endif
              /* hbp may be on the wrong freelist; the parameter n    */
              /* is important.                                 */
              hbp = GC_get_first_part(hbp, hhdr, size_needed, n);
              break;
           }
       }

    if (0 == hbp) return 0;
       
    /* Add it to map of valid blocks */
       if (!GC_install_counts(hbp, (word)size_needed)) return(0);
       /* This leaks memory under very rare conditions. */
              
    /* Set up header */
        if (!setup_header(hhdr, sz, kind, flags)) {
            GC_remove_counts(hbp, (word)size_needed);
            return(0); /* ditto */
        }

    /* Notify virtual dirty bit implementation that we are about to write.  */
    /* Ensure that pointerfree objects are not protected if it's avoidable. */
       GC_remove_protection(hbp, divHBLKSZ(size_needed),
                          (hhdr -> hb_descr == 0) /* pointer-free */);
        
    /* We just successfully allocated a block.  Restart count of      */
    /* consecutive failures.                                          */
    {
       extern unsigned GC_fail_count;
       
       GC_fail_count = 0;
    }

    GC_large_free_bytes -= size_needed;
    
    GC_ASSERT(IS_MAPPED(hhdr));
    return( hbp );
}

Here is the call graph for this function:

Definition at line 165 of file allchblk.c.

{
    unsigned i;
    ptr_t start, end;
    ptr_t p;
    size_t bytes;
    hdr *hhdr;
    for (i = 0; i < GC_n_heap_sects; ++i) {
       start = GC_heap_sects[i].hs_start;
       bytes = GC_heap_sects[i].hs_bytes;
       end = start + bytes;
       /* Merge in contiguous sections.   */
         while (i+1 < GC_n_heap_sects && GC_heap_sects[i+1].hs_start == end) {
           ++i;
           end = GC_heap_sects[i].hs_start + GC_heap_sects[i].hs_bytes;
         }
       GC_printf2("***Section from 0x%lx to 0x%lx\n", start, end);
       for (p = start; p < end;) {
           hhdr = HDR(p);
           GC_printf1("\t0x%lx ", (unsigned long)p);
           if (IS_FORWARDING_ADDR_OR_NIL(hhdr)) {
              GC_printf1("Missing header!!(%ld)\n", hhdr);
              p += HBLKSIZE;
              continue;
           }
           if (HBLK_IS_FREE(hhdr)) {
                int correct_index = GC_hblk_fl_from_blocks(
                                   divHBLKSZ(hhdr -> hb_sz));
               int actual_index;
              
              GC_printf1("\tfree block of size 0x%lx bytes",
                        (unsigned long)(hhdr -> hb_sz));
              if (IS_MAPPED(hhdr)) {
                  GC_printf0("\n");
              } else {
                  GC_printf0("(unmapped)\n");
              }
              actual_index = free_list_index_of(hhdr);
              if (-1 == actual_index) {
                  GC_printf1("\t\tBlock not on free list %ld!!\n",
                            correct_index);
              } else if (correct_index != actual_index) {
                  GC_printf2("\t\tBlock on list %ld, should be on %ld!!\n",
                            actual_index, correct_index);
              }
              p += hhdr -> hb_sz;
           } else {
              GC_printf1("\tused for blocks of size 0x%lx bytes\n",
                        (unsigned long)WORDS_TO_BYTES(hhdr -> hb_sz));
              p += HBLKSIZE * OBJ_SZ_TO_BLOCKS(hhdr -> hb_sz);
           }
       }
    }
}

Here is the call graph for this function:

static GC_bool GC_enough_large_bytes_left ( word  bytes,
int  n 
) [static]

Definition at line 59 of file allchblk.c.

  {
    int i;
    for (i = N_HBLK_FLS; i >= n; --i) {
       bytes += GC_free_bytes[i];
       if (bytes > GC_max_large_allocd_bytes) return TRUE;
    }
    return FALSE;
  }

Here is the caller graph for this function:

Definition at line 301 of file allchblk.c.

{
    struct hblk * p = h - 1;
    hdr * phdr;

    GET_HDR(p, phdr);
    while (0 != phdr && IS_FORWARDING_ADDR_OR_NIL(phdr)) {
       p = FORWARDED_ADDR(p,phdr);
       phdr = HDR(p);
    }
    if (0 != phdr) {
        if(HBLK_IS_FREE(phdr)) {
           return p;
       } else {
           return 0;
       }
    }
    p = GC_prev_block(h - 1);
    if (0 != p) {
      phdr = HDR(p);
      if (HBLK_IS_FREE(phdr) && (ptr_t)p + phdr -> hb_sz == (ptr_t)h) {
       return p;
      }
    }
    return 0;
}

Here is the call graph for this function:

Here is the caller graph for this function:

Definition at line 785 of file allchblk.c.

{
struct hblk *next, *prev;
hdr *hhdr, *prevhdr, *nexthdr;
signed_word size;


    GET_HDR(hbp, hhdr);
    size = hhdr->hb_sz;
    size = HBLKSIZE * OBJ_SZ_TO_BLOCKS(size);
    GC_remove_counts(hbp, (word)size);
    hhdr->hb_sz = size;
#   ifdef USE_MUNMAP
      hhdr -> hb_last_reclaimed = (unsigned short)GC_gc_no;
#   endif
    
    /* Check for duplicate deallocation in the easy case */
      if (HBLK_IS_FREE(hhdr)) {
        GC_printf1("Duplicate large block deallocation of 0x%lx\n",
                 (unsigned long) hbp);
       ABORT("Duplicate large block deallocation");
      }

    GC_ASSERT(IS_MAPPED(hhdr));
    GC_invalidate_map(hhdr);
    next = (struct hblk *)((word)hbp + size);
    GET_HDR(next, nexthdr);
    prev = GC_free_block_ending_at(hbp);
    /* Coalesce with successor, if possible */
      if(0 != nexthdr && HBLK_IS_FREE(nexthdr) && IS_MAPPED(nexthdr)) {
       GC_remove_from_fl(nexthdr, FL_UNKNOWN);
       hhdr -> hb_sz += nexthdr -> hb_sz; 
       GC_remove_header(next);
      }
    /* Coalesce with predecessor, if possible. */
      if (0 != prev) {
       prevhdr = HDR(prev);
       if (IS_MAPPED(prevhdr)) {
         GC_remove_from_fl(prevhdr, FL_UNKNOWN);
         prevhdr -> hb_sz += hhdr -> hb_sz;
#        ifdef USE_MUNMAP
           prevhdr -> hb_last_reclaimed = (unsigned short)GC_gc_no;
#        endif
         GC_remove_header(hbp);
         hbp = prev;
         hhdr = prevhdr;
       }
      }
    /* FIXME: It is not clear we really always want to do these merges       */
    /* with -DUSE_MUNMAP, since it updates ages and hence prevents    */
    /* unmapping.                                              */

    GC_large_free_bytes += size;
    GC_add_to_fl(hbp, hhdr);    
}

Here is the call graph for this function:

Here is the caller graph for this function:

struct hblk* GC_get_first_part ( struct hblk h,
hdr hhdr,
word  bytes,
int  index 
) [read]

Definition at line 458 of file allchblk.c.

{
    word total_size = hhdr -> hb_sz;
    struct hblk * rest;
    hdr * rest_hdr;

    GC_ASSERT((total_size & (HBLKSIZE-1)) == 0);
    GC_remove_from_fl(hhdr, index);
    if (total_size == bytes) return h;
    rest = (struct hblk *)((word)h + bytes);
    rest_hdr = GC_install_header(rest);
    if (0 == rest_hdr) {
       /* This may be very bad news ... */
       WARN("Header allocation failed: Dropping block.\n", 0);
       return(0);
    }
    rest_hdr -> hb_sz = total_size - bytes;
    rest_hdr -> hb_flags = 0;
#   ifdef GC_ASSERTIONS
      /* Mark h not free, to avoid assertion about adjacent free blocks. */
        hhdr -> hb_map = 0;
#   endif
    GC_add_to_fl(rest, rest_hdr);
    return h;
}

Here is the call graph for this function:

Here is the caller graph for this function:

int GC_hblk_fl_from_blocks ( word  blocks_needed)

Definition at line 83 of file allchblk.c.

{
    if (blocks_needed <= UNIQUE_THRESHOLD) return blocks_needed;
    if (blocks_needed >= HUGE_THRESHOLD) return N_HBLK_FLS;
    return (blocks_needed - UNIQUE_THRESHOLD)/FL_COMPRESSION
                                   + UNIQUE_THRESHOLD;
    
}

Here is the caller graph for this function:

Definition at line 103 of file allchblk.c.

{
    struct hblk * h;
    word total_free = 0;
    hdr * hhdr;
    word sz;
    int i;
    
    for (i = 0; i <= N_HBLK_FLS; ++i) {
      h = GC_hblkfreelist[i];
#     ifdef USE_MUNMAP
        if (0 != h) GC_printf1("Free list %ld:\n",
                             (unsigned long)i);
#     else
        if (0 != h) GC_printf2("Free list %ld (Total size %ld):\n",
                             (unsigned long)i,
                            (unsigned long)GC_free_bytes[i]);
#     endif
      while (h != 0) {
        hhdr = HDR(h);
        sz = hhdr -> hb_sz;
       GC_printf2("\t0x%lx size %lu ", (unsigned long)h, (unsigned long)sz);
       total_free += sz;
        if (GC_is_black_listed(h, HBLKSIZE) != 0) {
             GC_printf0("start black listed\n");
        } else if (GC_is_black_listed(h, hhdr -> hb_sz) != 0) {
             GC_printf0("partially black listed\n");
        } else {
             GC_printf0("not black listed\n");
        }
        h = hhdr -> hb_next;
      }
    }
#   ifndef USE_MUNMAP
      if (total_free != GC_large_free_bytes) {
       GC_printf1("GC_large_free_bytes = %lu (INCONSISTENT!!)\n",
                 (unsigned long) GC_large_free_bytes);
      }
#   endif
    GC_printf1("Total of %lu bytes on free list\n", (unsigned long)total_free);
}

Here is the call graph for this function:

Here is the caller graph for this function:

void GC_remove_from_fl ( hdr hhdr,
int  n 
)

Definition at line 258 of file allchblk.c.

{
    int index;

    GC_ASSERT(((hhdr -> hb_sz) & (HBLKSIZE-1)) == 0);
#   ifndef USE_MUNMAP
      /* We always need index to mainatin free counts.  */
      if (FL_UNKNOWN == n) {
          index = GC_hblk_fl_from_blocks(divHBLKSZ(hhdr -> hb_sz));
      } else {
         index = n;
      }
#   endif
    if (hhdr -> hb_prev == 0) {
#      ifdef USE_MUNMAP
         if (FL_UNKNOWN == n) {
            index = GC_hblk_fl_from_blocks(divHBLKSZ(hhdr -> hb_sz));
         } else {
           index = n;
         }
#      endif
       GC_ASSERT(HDR(GC_hblkfreelist[index]) == hhdr);
       GC_hblkfreelist[index] = hhdr -> hb_next;
    } else {
       hdr *phdr;
       GET_HDR(hhdr -> hb_prev, phdr);
       phdr -> hb_next = hhdr -> hb_next;
    }
    FREE_ASSERT(GC_free_bytes[index] >= hhdr -> hb_sz);
    INCR_FREE_BYTES(index, - (signed_word)(hhdr -> hb_sz));
    if (0 != hhdr -> hb_next) {
       hdr * nhdr;
       GC_ASSERT(!IS_FORWARDING_ADDR_OR_NIL(NHDR(hhdr)));
       GET_HDR(hhdr -> hb_next, nhdr);
       nhdr -> hb_prev = hhdr -> hb_prev;
    }
}

Here is the call graph for this function:

Here is the caller graph for this function:

void GC_split_block ( struct hblk h,
hdr hhdr,
struct hblk n,
hdr nhdr,
int  index 
)

Definition at line 500 of file allchblk.c.

{
    word total_size = hhdr -> hb_sz;
    word h_size = (word)n - (word)h;
    struct hblk *prev = hhdr -> hb_prev;
    struct hblk *next = hhdr -> hb_next;

    /* Replace h with n on its freelist */
      nhdr -> hb_prev = prev;
      nhdr -> hb_next = next;
      nhdr -> hb_sz = total_size - h_size;
      nhdr -> hb_flags = 0;
      if (0 != prev) {
       HDR(prev) -> hb_next = n;
      } else {
        GC_hblkfreelist[index] = n;
      }
      if (0 != next) {
       HDR(next) -> hb_prev = n;
      }
      INCR_FREE_BYTES(index, -(signed_word)h_size);
      FREE_ASSERT(GC_free_bytes[index] > 0);
#     ifdef GC_ASSERTIONS
       nhdr -> hb_map = 0;  /* Don't fail test for consecutive */
                            /* free blocks in GC_add_to_fl.           */
#     endif
#   ifdef USE_MUNMAP
      hhdr -> hb_last_reclaimed = (unsigned short)GC_gc_no;
#   endif
    hhdr -> hb_sz = h_size;
    GC_add_to_fl(h, hhdr);
    GC_invalidate_map(nhdr);
}

Here is the call graph for this function:

Here is the caller graph for this function:

static GC_bool setup_header ( hdr hhdr,
word  sz,
int  kind,
unsigned char  flags 
) [static]

Definition at line 225 of file allchblk.c.

{
    register word descr;
    
    /* Add description of valid object pointers */
      if (!GC_add_map_entry(sz)) return(FALSE);
      hhdr -> hb_map = GC_obj_map[sz > MAXOBJSZ? 0 : sz];
      
    /* Set size, kind and mark proc fields */
      hhdr -> hb_sz = sz;
      hhdr -> hb_obj_kind = kind;
      hhdr -> hb_flags = flags;
      descr = GC_obj_kinds[kind].ok_descriptor;
      if (GC_obj_kinds[kind].ok_relocate_descr) descr += WORDS_TO_BYTES(sz);
      hhdr -> hb_descr = descr;
      
    /* Clear mark bits */
      GC_clear_hdr_marks(hhdr);
      
    hhdr -> hb_last_reclaimed = (unsigned short)GC_gc_no;
    return(TRUE);
}

Here is the call graph for this function:

Here is the caller graph for this function:


Variable Documentation

Definition at line 51 of file allchblk.c.

Definition at line 775 of file allchblk.c.

Definition at line 47 of file allchblk.c.

Definition at line 21 of file allchblk.c.