Back to index

plt-scheme  4.2.1
collect.inc
Go to the documentation of this file.
00001 
00002 /* 
00003    This function can be compiled in three modes:
00004    STACK_TRACE - stack mode: pull off one set of set & end and push
00005        the contents. An offset is passed into the function; this
00006        offset is typically used for a second pass on a stack
00007        range to check for one-past-end pointers
00008    FOLLOW_INTERIOR - internals mode: trace with internals, but no 
00009        offset is passed in.
00010    otherwise - exact mode: don't follow interior pointers; this is
00011        the fastest mode
00012 
00013    Also #define:
00014     COLLECT - the name of the function
00015 */
00016 
00017 
00018 #ifdef STACK_TRACE
00019 # define OFFSET_ARG int offset
00020 # define ADD_LOCAL_OFFSET(x) (x + offset)
00021 # define IF_OR_WHILE if
00022 # define FOLLOW_INTERIOR
00023 #else
00024 # define OFFSET_ARG /* empty */
00025 # define ADD_LOCAL_OFFSET(x) x
00026 # define IF_OR_WHILE while
00027 #endif
00028 
00029 #if MARK_STATS
00030 # define MARK_STATISTIC(x) x
00031 #else
00032 # define MARK_STATISTIC(x)
00033 #endif
00034 
00035 #ifdef FOLLOW_INTERIOR
00036 # define ALIGNED(x) 1
00037 #else
00038 # define ALIGNED(x) !(x & (PTR_ALIGNMENT - 1))
00039 #endif
00040 
00041 #ifdef STACK_TRACE
00042 # define ALLOW_TAIL_PUSH 0
00043 #else
00044 # define ALLOW_TAIL_PUSH 0
00045 #endif
00046 
00047 static void COLLECT(OFFSET_ARG)
00048 {
00049 #ifndef SIXTY_FOUR_BIT_INTEGERS
00050   SectorPage **local_sector_pagetables;
00051 #endif
00052   unsigned long local_low_plausible;
00053   unsigned long local_high_plausible;
00054   int local_collect_stack_size;
00055   int local_collect_stack_count;
00056   unsigned long *local_collect_stack;
00057   long local_mem_use;
00058 
00059   /* Download globals into locals so they can be put in registers: */
00060 #ifndef SIXTY_FOUR_BIT_INTEGERS
00061   local_sector_pagetables = sector_pagetables;
00062 #endif
00063   local_low_plausible = low_plausible;
00064   local_high_plausible = high_plausible;
00065   local_collect_stack_size = collect_stack_size;
00066   local_collect_stack_count = collect_stack_count;
00067   local_collect_stack = collect_stack;
00068   local_mem_use = mem_use;
00069   
00070   IF_OR_WHILE (local_collect_stack_count) {
00071     unsigned long s, end;
00072 #if KEEP_DETAIL_PATH
00073     unsigned long source;
00074 #endif
00075 
00076 #if CHECK_SKIP_MARK_AT_FIRST
00077     if (local_collect_stack_count == collect_start_disable_mark_skip) {
00078       skip_mark_at_first = NULL;
00079       collect_start_disable_mark_skip = 0;
00080     }
00081 #endif
00082 
00083 #if ALLOW_TRACE_COUNT
00084     if (local_collect_stack_count == collect_start_tracing) {
00085       void *tracing_for_object;
00086       GC_count_tracer count_tracer;
00087       int size;
00088 
00089       tracing_for_object = (void *)POP_WAIT_TRACE();
00090       count_tracer = (GC_count_tracer)POP_WAIT_TRACE();
00091       size = POP_WAIT_TRACE();
00092 
00093       /* Push current trace onto the stack: */
00094       PUSH_TRACE(collect_end_tracing);
00095       PUSH_TRACE(collect_trace_count);
00096       PUSH_TRACE(count_tracer);
00097       PUSH_TRACE(tracing_for_object);
00098 
00099       collect_trace_count = size;
00100   
00101       collect_end_tracing = collect_start_tracing - COLLECT_STACK_FRAME_SIZE;
00102 
00103       collect_start_tracing = POP_WAIT_TRACE();
00104     }
00105 #endif
00106 
00107 # if KEEP_DETAIL_PATH
00108     source = local_collect_stack[--local_collect_stack_count];
00109 # endif
00110     end = local_collect_stack[--local_collect_stack_count];
00111     s = local_collect_stack[--local_collect_stack_count];
00112     
00113 #if ALLOW_TRACE_PATH
00114     if (collecting_with_trace_path) {
00115       PUSH_PATH_ELEM(collect_end_path_elem);
00116       PUSH_PATH_ELEM(s);
00117 # if KEEP_DETAIL_PATH
00118       PUSH_PATH_ELEM(source);
00119 # else
00120       PUSH_PATH_ELEM(0);
00121 # endif
00122       collect_end_path_elem = local_collect_stack_count;
00123     }
00124 #endif
00125 
00126     MARK_STATISTIC(num_pairs_stat++);
00127 
00128     while (s < end) {
00129       void *d = *(void **)INT_TO_PTR(s);
00130       unsigned long p = ADD_LOCAL_OFFSET(PTR_TO_INT(d));
00131 
00132       MARK_STATISTIC(num_checks_stat++);
00133 #ifdef FOLLOW_INTERIOR
00134       MARK_STATISTIC(num_interior_checks_stat++);
00135 #endif
00136       if (p >= local_low_plausible && p < local_high_plausible && ALIGNED(p)) {
00137 #ifdef SIXTY_FOUR_BIT_INTEGERS
00138         SectorPage *pagetable;
00139         DECL_SECTOR_PAGETABLES;
00140         FIND_SECTOR_PAGETABLES(p);
00141         if (sector_pagetables)
00142           pagetable = sector_pagetables[SECTOR_LOOKUP_PAGETABLE(p)];
00143         else
00144           pagetable = NULL;
00145 #else
00146        SectorPage *pagetable = local_sector_pagetables[SECTOR_LOOKUP_PAGETABLE(p)];
00147 #endif
00148 
00149        MARK_STATISTIC(num_plausibles_stat++);
00150 
00151        if (pagetable) {
00152          SectorPage *page = pagetable + SECTOR_LOOKUP_PAGEPOS(p);
00153          long kind = page->kind;
00154 
00155          MARK_STATISTIC(num_pages_stat++);
00156 
00157          if (kind == sector_kind_block) {
00158            /* Found common block: */
00159            BlockOfMemory *block = (BlockOfMemory *)INT_TO_PTR(page->start);
00160            unsigned long bstart = block->start;
00161 
00162            MARK_STATISTIC(num_blocks_stat++);
00163 
00164            if ((p >= bstart) && (p < block->top)) {
00165              int size = block->size;
00166              int pos = block->positions[(p - bstart) >> LOG_PTR_SIZE];
00167              unsigned long start = bstart + pos * size;
00168            
00169              MARK_STATISTIC(num_blockallocs_stat++);
00170 
00171 #ifndef FOLLOW_INTERIOR
00172              if (p == start)
00173 #endif
00174 #if CHECK_SKIP_MARK_AT_FIRST
00175               if (!skip_mark_at_first || !skip_mark_at_first((void *)p, size))
00176 #endif
00177                 {
00178                   int bpos;
00179                   unsigned char bit;
00180 #if DISTINGUISH_FREE_FROM_UNMARKED
00181                   unsigned char fbit;
00182 #endif
00183                   unsigned char freebyte;
00184        
00185                   MARK_STATISTIC(num_blockaligns_stat++);
00186 
00187                   bpos = POS_TO_UNMARK_INDEX(pos);
00188                   bit = POS_TO_UNMARK_BIT(pos);
00189 #if DISTINGUISH_FREE_FROM_UNMARKED
00190                   fbit = POS_TO_FREE_BIT(pos);
00191 #endif
00192 
00193                   freebyte = block->free[bpos];
00194               
00195                   if (NOT_MARKED(freebyte & bit) && _NOT_FREE(freebyte & fbit)) {
00196                     MARK_STATISTIC(num_blockmarks_stat++);
00197 #if ALLOW_TRACE_COUNT
00198                     if (collecting_with_trace_count) {
00199                      GC_count_tracer count_tracer;
00200                      if ((count_tracer = common_sets[block->set_no]->count_tracer)) {
00201                        void *o;
00202 #ifdef FOLLOW_INTERIOR
00203                        p = start;
00204 #endif
00205                        o = INT_TO_PTR(p);
00206                        if (block->atomic) {
00207                          void *s = o;
00208 #if PAD_BOUNDARY_BYTES
00209                          s = PAD_FORWARD(s);
00210 #endif
00211                          count_tracer(s, size); 
00212                          mem_traced += size;
00213                        } else {
00214                          /* Push new trace onto the stack: */
00215                          PUSH_WAIT_TRACE(collect_start_tracing);
00216                          PUSH_WAIT_TRACE(size);
00217                          PUSH_WAIT_TRACE(count_tracer);
00218                          PUSH_WAIT_TRACE(o);
00219                          collect_start_tracing = local_collect_stack_count + COLLECT_STACK_FRAME_SIZE;
00220                        }
00221                      } else
00222                        collect_trace_count += size;
00223                     }
00224 #endif
00225 #if ALLOW_TRACE_PATH
00226                     if (collecting_with_trace_path) {
00227                      GC_path_tracer path_tracer;
00228                      if ((path_tracer = common_sets[block->set_no]->path_tracer)) {
00229                        void *o;
00230 #ifdef FOLLOW_INTERIOR
00231                        p = start;
00232 #endif
00233                        o = INT_TO_PTR(p);
00234 # if PAD_BOUNDARY_BYTES
00235                        o = PAD_FORWARD(o);
00236 # endif
00237                        path_tracer(o, s, &collect_trace_path_stack);
00238                      }
00239                     }
00240 #endif
00241               
00242 #if PRINT && 0
00243 # ifdef FOLLOW_INTERIOR
00244                     if (diff % size)
00245                      FPRINTF(STDERR,
00246                             "inexact block: %d for %lx[%d], %d=(%d, %d) {%d} %lx -> %lx\n", 
00247                             diff % size, block, size, pos, bpos, bit,
00248                             freebyte, p, start);
00249 # endif
00250 #endif
00251                 
00252                     block->free[bpos] = freebyte ^ bit;
00253                 
00254                     local_mem_use += size;
00255                 
00256                     if (!block->atomic) {
00257                      MARK_STATISTIC(num_blockpushes_stat++);
00258 #ifdef FOLLOW_INTERIOR
00259                      p = start;
00260 #endif
00261 #if ALLOW_TAIL_PUSH
00262                      if (s + PTR_ALIGNMENT >= end) {
00263                        MARK_STATISTIC(num_blockpushes_tail_stat++);
00264                        s = p - PTR_ALIGNMENT;
00265                        end = p + size;
00266                      } else 
00267 #endif
00268                        LOCAL_PUSH_COLLECT(p, p + size, s);
00269                     }
00270               
00271 #if STAMP_AND_REMEMBER_SOURCE
00272                     if (!block->low_marker || (s < block->low_marker))
00273                      block->low_marker = s;
00274                     if (!block->high_marker || (s > block->high_marker))
00275                      block->high_marker = s;
00276 #endif
00277                   }
00278                 }
00279            }
00280          } else if (kind == sector_kind_chunk) {
00281            MemoryChunk *c = (MemoryChunk *)INT_TO_PTR(page->start);
00282            
00283            MARK_STATISTIC(num_chunks_stat++);
00284 
00285            if (((p == c->start) 
00286 #ifdef FOLLOW_INTERIOR
00287                || ((p > c->start) && (p < c->end))
00288 #endif
00289                )
00290               && !c->marked) {
00291              MARK_STATISTIC(num_chunkmarks_stat++);
00292 #if ALLOW_TRACE_COUNT
00293              if (collecting_with_trace_count) {
00294               GC_count_tracer count_tracer;
00295               int size = (c->end - c->start);
00296               if ((count_tracer = common_sets[c->set_no]->count_tracer)) {
00297                 void *o;
00298                 o = INT_TO_PTR(c->start);
00299                 if (c->atomic) {
00300                   void *s = o;
00301 #if PAD_BOUNDARY_BYTES
00302                   s = PAD_FORWARD(s);
00303 #endif
00304                   count_tracer(s, size); 
00305                   mem_traced += size;
00306                 } else {
00307                   /* Push new trace onto the stack: */
00308                   PUSH_WAIT_TRACE(collect_start_tracing);
00309                   PUSH_WAIT_TRACE(size);
00310                   PUSH_WAIT_TRACE(count_tracer);
00311                   PUSH_WAIT_TRACE(o);
00312                   collect_start_tracing = local_collect_stack_count + COLLECT_STACK_FRAME_SIZE;
00313                 }
00314               } else
00315                 collect_trace_count += size;
00316              }
00317 #endif
00318 #if ALLOW_TRACE_PATH
00319              if (collecting_with_trace_path) {
00320               GC_path_tracer path_tracer;
00321               if ((path_tracer = common_sets[c->set_no]->path_tracer)) {
00322                 void *o;
00323                 o = INT_TO_PTR(c->start);
00324 #if PAD_BOUNDARY_BYTES
00325                 o = PAD_FORWARD(o);
00326 #endif
00327                 path_tracer(o, s, &collect_trace_path_stack);
00328               }
00329              }
00330 #endif
00331 
00332 #if PRINT && 0
00333 # ifdef FOLLOW_INTERIOR
00334              if (p != c->start)
00335               FPRINTF(STDERR, "inexact chunk: %lx != %lx\n", p, c->start);
00336 # endif
00337 #endif
00338 #if PRINT && 0
00339              FPRINTF(STDERR,
00340                     "push %ld (%ld) from %ld\n",
00341                     p, (c->end - c->start), s);
00342 #endif
00343              c->marked = 1;
00344              local_mem_use += (c->end - c->start);
00345              if (!c->atomic) {
00346               LOCAL_PUSH_COLLECT(c->start, c->end, s);
00347              }
00348 #if STAMP_AND_REMEMBER_SOURCE
00349              c->marker = s;
00350 #endif
00351            }
00352          }
00353        }
00354       }
00355       s += PTR_ALIGNMENT;
00356     }
00357 
00358 #if ALLOW_TRACE_COUNT
00359     while (local_collect_stack_count == collect_end_tracing) {
00360       void *tracing_for_object, *s;
00361       GC_count_tracer count_tracer;
00362       
00363       tracing_for_object = (void *)POP_TRACE();
00364       count_tracer = (GC_count_tracer)POP_TRACE();
00365 
00366       s = tracing_for_object;
00367 #if PAD_BOUNDARY_BYTES
00368       s = PAD_FORWARD(s);
00369 #endif
00370       count_tracer(s, collect_trace_count);
00371       mem_traced += collect_trace_count;
00372 
00373       collect_trace_count = POP_TRACE();
00374       collect_end_tracing = POP_TRACE();
00375     }
00376 #endif
00377 #if ALLOW_TRACE_PATH
00378     if (collecting_with_trace_path) {
00379       while (PATH_ELEM_STACK_NONEMPTY() && (local_collect_stack_count == collect_end_path_elem)) {
00380        (void)POP_PATH_ELEM(); /* source */
00381        (void)POP_PATH_ELEM(); /* obj */
00382        collect_end_path_elem = POP_PATH_ELEM();
00383       }
00384     }
00385 #endif
00386   }
00387 
00388   /* Upload back into globals: */
00389   collect_stack_size = local_collect_stack_size;
00390   collect_stack_count =local_collect_stack_count;
00391   collect_stack = local_collect_stack;
00392   mem_use = local_mem_use;
00393 
00394 #if ALLOW_TRACE_COUNT && CHECK
00395 # ifndef STACK_TRACE
00396   if (collect_trace_stack.count)
00397     FPRINTF(STDERR, "BOO-BOO: trace stack not emty: %d\n", collect_trace_stack.count);
00398   if (collect_wait_trace_stack.count)
00399     FPRINTF(STDERR, "BOO-BOO: wait trace stack not emty: %d\n", collect_wait_trace_stack.count);
00400 # endif
00401 #endif
00402 }
00403 
00404 #undef ALIGNED
00405 #undef OFFSET_ARG
00406 #undef ADD_LOCAL_OFFSET
00407 #undef IF_OR_WHILE
00408 #undef STACK_TRACE
00409 #undef FOLLOW_INTERIOR
00410 #undef ALLOW_TAIL_PUSH
00411 #undef COLLECT