Back to index

lightning-sunbird  0.9+nobinonly
stubborn.c
Go to the documentation of this file.
00001 /* 
00002  * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
00003  * Copyright (c) 1991-1994 by Xerox Corporation.  All rights reserved.
00004  *
00005  * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
00006  * OR IMPLIED.  ANY USE IS AT YOUR OWN RISK.
00007  *
00008  * Permission is hereby granted to use or copy this program
00009  * for any purpose,  provided the above notices are retained on all copies.
00010  * Permission to modify the code and to distribute modified code is granted,
00011  * provided the above notices are retained, and a notice that the code was
00012  * modified is included with the above copyright notice.
00013  */
00014 /* Boehm, July 31, 1995 5:02 pm PDT */
00015 
00016 
00017 #include "gc_priv.h"
00018 
00019 # ifdef STUBBORN_ALLOC
00020 /* Stubborn object (hard to change, nearly immutable) allocation. */
00021 
00022 extern ptr_t GC_clear_stack();     /* in misc.c, behaves like identity */
00023 
00024 #define GENERAL_MALLOC(lb,k) \
00025     (GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))
00026 
00027 /* Data structure representing immutable objects that   */
00028 /* are still being initialized.                         */
00029 /* This is a bit baroque in order to avoid acquiring    */
00030 /* the lock twice for a typical allocation.             */
00031 
00032 GC_PTR * GC_changing_list_start;
00033 
00034 # ifdef THREADS
00035   VOLATILE GC_PTR * VOLATILE GC_changing_list_current;
00036 # else
00037   GC_PTR * GC_changing_list_current;
00038 # endif
00039        /* Points at last added element.  Also (ab)used for            */
00040        /* synchronization.  Updates and reads are assumed atomic.     */
00041 
00042 GC_PTR * GC_changing_list_limit;
00043        /* Points at the last word of the buffer, which is always 0    */
00044        /* All entries in (GC_changing_list_current,                   */
00045        /* GC_changing_list_limit] are 0                        */
00046 
00047 
00048 void GC_stubborn_init()
00049 {
00050 #   define INIT_SIZE 10
00051 
00052     GC_changing_list_start = (GC_PTR *)
00053                      GC_generic_malloc_inner(
00054                             (word)(INIT_SIZE * sizeof(GC_PTR)),
00055                             PTRFREE);
00056     BZERO(GC_changing_list_start,
00057          INIT_SIZE * sizeof(GC_PTR));
00058     if (GC_changing_list_start == 0) {
00059         GC_err_printf0("Insufficient space to start up\n");
00060         ABORT("GC_stubborn_init: put of space");
00061     }
00062     GC_changing_list_current = GC_changing_list_start;
00063     GC_changing_list_limit = GC_changing_list_start + INIT_SIZE - 1;
00064     * GC_changing_list_limit = 0;
00065 }
00066 
00067 /* Compact and possibly grow GC_uninit_list.  The old copy is         */
00068 /* left alone.       Lock must be held.                               */
00069 /* When called GC_changing_list_current == GC_changing_list_limit     */
00070 /* which is one past the current element.                      */
00071 /* When we finish GC_changing_list_current again points one past last */
00072 /* element.                                                    */
00073 /* Invariant while this is running: GC_changing_list_current          */
00074 /* points at a word containing 0.                              */
00075 /* Returns FALSE on failure.                                          */
00076 GC_bool GC_compact_changing_list()
00077 {
00078     register GC_PTR *p, *q;
00079     register word count = 0;
00080     word old_size = (char **)GC_changing_list_limit
00081                   - (char **)GC_changing_list_start+1;
00082                   /* The casts are needed as a workaround for an Amiga bug */
00083     register word new_size = old_size;
00084     GC_PTR * new_list;
00085     
00086     for (p = GC_changing_list_start; p < GC_changing_list_limit; p++) {
00087         if (*p != 0) count++;
00088     }
00089     if (2 * count > old_size) new_size = 2 * count;
00090     new_list = (GC_PTR *)
00091               GC_generic_malloc_inner(
00092                             new_size * sizeof(GC_PTR), PTRFREE);
00093               /* PTRFREE is a lie.  But we don't want the collector to  */
00094               /* consider these.  We do want the list itself to be      */
00095               /* collectable.                                           */
00096     if (new_list == 0) return(FALSE);
00097     BZERO(new_list, new_size * sizeof(GC_PTR));
00098     q = new_list;
00099     for (p = GC_changing_list_start; p < GC_changing_list_limit; p++) {
00100         if (*p != 0) *q++ = *p;
00101     }
00102     GC_changing_list_start = new_list;
00103     GC_changing_list_limit = new_list + new_size - 1;
00104     GC_changing_list_current = q;
00105     return(TRUE);
00106 }
00107 
00108 /* Add p to changing list.  Clear p on failure.  */
00109 # define ADD_CHANGING(p) \
00110        {      \
00111            register struct hblk * h = HBLKPTR(p);       \
00112            register word index = PHT_HASH(h);    \
00113            \
00114            set_pht_entry_from_index(GC_changed_pages, index);  \
00115        }      \
00116        if (*GC_changing_list_current != 0 \
00117            && ++GC_changing_list_current == GC_changing_list_limit) { \
00118            if (!GC_compact_changing_list()) (p) = 0; \
00119        } \
00120        *GC_changing_list_current = p;
00121 
00122 void GC_change_stubborn(p)
00123 GC_PTR p;
00124 {
00125     DCL_LOCK_STATE;
00126     
00127     DISABLE_SIGNALS();
00128     LOCK();
00129     ADD_CHANGING(p);
00130     UNLOCK();
00131     ENABLE_SIGNALS();
00132 }
00133 
00134 void GC_end_stubborn_change(p)
00135 GC_PTR p;
00136 {
00137 #   ifdef THREADS
00138       register VOLATILE GC_PTR * my_current = GC_changing_list_current;
00139 #   else
00140       register GC_PTR * my_current = GC_changing_list_current;
00141 #   endif
00142     register GC_bool tried_quick;
00143     DCL_LOCK_STATE;
00144     
00145     if (*my_current == p) {
00146         /* Hopefully the normal case.                                 */
00147         /* Compaction could not have been running when we started.    */
00148         *my_current = 0;
00149 #      ifdef THREADS
00150           if (my_current == GC_changing_list_current) {
00151             /* Compaction can't have run in the interim.       */
00152             /* We got away with the quick and dirty approach.   */
00153             return;
00154           }
00155           tried_quick = TRUE;
00156 #      else
00157          return;
00158 #      endif
00159     } else {
00160         tried_quick = FALSE;
00161     }
00162     DISABLE_SIGNALS();
00163     LOCK();
00164     my_current = GC_changing_list_current;
00165     for (; my_current >= GC_changing_list_start; my_current--) {
00166         if (*my_current == p) {
00167             *my_current = 0;
00168             UNLOCK();
00169             ENABLE_SIGNALS();
00170             return;
00171         }
00172     }
00173     if (!tried_quick) {
00174         GC_err_printf1("Bad arg to GC_end_stubborn_change: 0x%lx\n",
00175                      (unsigned long)p);
00176         ABORT("Bad arg to GC_end_stubborn_change");
00177     }
00178     UNLOCK();
00179     ENABLE_SIGNALS();
00180 }
00181 
00182 /* Allocate lb bytes of composite (pointerful) data     */
00183 /* No pointer fields may be changed after a call to     */
00184 /* GC_end_stubborn_change(p) where p is the value       */
00185 /* returned by GC_malloc_stubborn.               */
00186 # ifdef __STDC__
00187     GC_PTR GC_malloc_stubborn(size_t lb)
00188 # else
00189     GC_PTR GC_malloc_stubborn(lb)
00190     size_t lb;
00191 # endif
00192 {
00193 register ptr_t op;
00194 register ptr_t *opp;
00195 register word lw;
00196 ptr_t result;
00197 DCL_LOCK_STATE;
00198 
00199     if( SMALL_OBJ(lb) ) {
00200 #       ifdef MERGE_SIZES
00201          lw = GC_size_map[lb];
00202 #      else
00203          lw = ALIGNED_WORDS(lb);
00204 #       endif
00205        opp = &(GC_sobjfreelist[lw]);
00206        FASTLOCK();
00207         if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
00208             FASTUNLOCK();
00209             result = GC_generic_malloc((word)lb, STUBBORN);
00210             goto record;
00211         }
00212         *opp = obj_link(op);
00213         obj_link(op) = 0;
00214         GC_words_allocd += lw;
00215         result = (GC_PTR) op;
00216         ADD_CHANGING(result);
00217         FASTUNLOCK();
00218         return((GC_PTR)result);
00219    } else {
00220        result = (GC_PTR)
00221               GC_generic_malloc((word)lb, STUBBORN);
00222    }
00223 record:
00224    DISABLE_SIGNALS();
00225    LOCK();
00226    ADD_CHANGING(result);
00227    UNLOCK();
00228    ENABLE_SIGNALS();
00229    return((GC_PTR)GC_clear_stack(result));
00230 }
00231 
00232 
00233 /* Functions analogous to GC_read_dirty and GC_page_was_dirty. */
00234 /* Report pages on which stubborn objects were changed.        */
00235 void GC_read_changed()
00236 {
00237     register GC_PTR * p = GC_changing_list_start;
00238     register GC_PTR q;
00239     register struct hblk * h;
00240     register word index;
00241     
00242     if (p == 0) /* initializing */ return;
00243     BCOPY(GC_changed_pages, GC_prev_changed_pages,
00244           (sizeof GC_changed_pages));
00245     BZERO(GC_changed_pages, (sizeof GC_changed_pages));
00246     for (; p <= GC_changing_list_current; p++) {
00247         if ((q = *p) != 0) {
00248             h = HBLKPTR(q);
00249             index = PHT_HASH(h);
00250             set_pht_entry_from_index(GC_changed_pages, index);
00251         }
00252     }
00253 }
00254 
00255 GC_bool GC_page_was_changed(h)
00256 struct hblk * h;
00257 {
00258     register word index = PHT_HASH(h);
00259     
00260     return(get_pht_entry_from_index(GC_prev_changed_pages, index));
00261 }
00262 
00263 /* Remove unreachable entries from changed list. Should only be       */
00264 /* called with mark bits consistent and lock held.             */
00265 void GC_clean_changing_list()
00266 {
00267     register GC_PTR * p = GC_changing_list_start;
00268     register GC_PTR q;
00269     register ptr_t r;
00270     register unsigned long count = 0;
00271     register unsigned long dropped_count = 0;
00272     
00273     if (p == 0) /* initializing */ return;
00274     for (; p <= GC_changing_list_current; p++) {
00275         if ((q = *p) != 0) {
00276             count++;
00277             r = (ptr_t)GC_base(q);
00278             if (r == 0 || !GC_is_marked(r)) {
00279                 *p = 0;
00280                 dropped_count++;
00281            }
00282         }
00283     }
00284 #   ifdef PRINTSTATS
00285       if (count > 0) {
00286         GC_printf2("%lu entries in changing list: reclaimed %lu\n",
00287                   (unsigned long)count, (unsigned long)dropped_count);
00288       }
00289 #   endif
00290 }
00291 
00292 #else /* !STUBBORN_ALLOC */
00293 
00294 # ifdef __STDC__
00295     GC_PTR GC_malloc_stubborn(size_t lb)
00296 # else
00297     GC_PTR GC_malloc_stubborn(lb)
00298     size_t lb;
00299 # endif
00300 {
00301     return(GC_malloc(lb));
00302 }
00303 
00304 /*ARGSUSED*/
00305 void GC_end_stubborn_change(p)
00306 GC_PTR p;
00307 {
00308 }
00309 
00310 /*ARGSUSED*/
00311 void GC_change_stubborn(p)
00312 GC_PTR p;
00313 {
00314 }
00315 
00316 
00317 #endif