Generational GC. Initial version, may be buggy.

This commit is contained in:
Jesse D. McDonald 2010-04-08 01:12:23 -05:00
parent 075cbac672
commit 7fb083a5f9
3 changed files with 449 additions and 129 deletions

517
gc.c
View File

@ -3,6 +3,7 @@
#include <assert.h>
#include <ctype.h>
#include <inttypes.h>
#include <setjmp.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
@ -27,21 +28,13 @@ gc_stats_t gc_stats;
/* Alignment must ensure each object has enough room to hold a forwarding object */
#define GC_ALIGNMENT ((size_t)(sizeof(object_t)))
/* Smaller block sizes allow for more precise GC, but require more memory for the bitmap. */
/* The block bitmap will be allocated at startup assuming a max. heap size of 2GB. */
/* The default of 128 KB/block should require a 2 KB bitmap. */
#define GC_DIRTY_BLOCK_SIZE (128UL << 10)
/****************************************************************************/
static char *gc_ranges[2];
static size_t gc_min_size;
static size_t gc_max_size;
static size_t gc_soft_limit;
static bool gc_enabled;
static int gc_current_range;
static char *gc_free_ptr;
static char *gc_range_end;
/* A convenient shorthand */
#define gc_other_range() (1-gc_current_range)
static value_t gc_weak_box_list;
static value_t gc_will_list;
static value_t gc_will_active_list;
@ -343,51 +336,61 @@ builtin_fn_t *get_builtin_fn(value_t v)
return _get_builtin_fn(v);
}
/****************************************************************************/
/*************************** Common Collector Code **************************/
static bool gc_enabled;
static bool gc_in_gen0_collection;
static bool gc_in_gen1_collection;
/* Also used from Gen-0 code to track new Gen-1 objects */
static char *gc_gen1_ranges[2];
static size_t gc_gen1_min_size;
static size_t gc_gen1_max_size;
static size_t gc_gen1_soft_limit;
static int gc_gen1_current_range;
static char *gc_gen1_free_ptr;
static char *gc_gen1_range_end;
static size_t gc_gen1_max_blocks;
static uint32_t *gc_gen1_dirty_bits;
static char **gc_gen1_block_starts;
/* A convenient shorthand */
#define gc_gen1_other_range() (1-gc_gen1_current_range)
static inline size_t gc_align(size_t nbytes) __attribute__ ((const));
static int gc_range_of(void *object) __attribute__ ((const,unused));
static void transfer_object(value_t *value);
static size_t transfer_children(object_t *object);
static void _collect_garbage(size_t min_free);
static void transfer_roots(void);
static size_t transfer_children(object_t *obj);
static void process_weak_boxes(void);
static void process_wills(void);
static void update_weak_box_list(void);
static void gc_gen0_init(size_t gen0_size);
static void gc_gen1_init(size_t min_size, size_t max_size);
static int gen1_gc_range_of(void *object) __attribute__ ((const));
static size_t gc_gen1_block_of(void *obj) __attribute__ ((const));
static inline size_t gc_gen1_free_space(void);
static void *gc_alloc_gen1(size_t nbytes);
static void collect_gen1_garbage(size_t min_free);
static void gc_gen1_clear_dirty_bits(void);
static inline size_t gc_gen1_free_space(void)
{
return gc_gen1_range_end - gc_gen1_free_ptr;
}
static inline size_t gc_align(size_t nbytes)
{
return ((nbytes + GC_ALIGNMENT - 1) & ~(GC_ALIGNMENT - 1));
}
static int gc_range_of(void *object)
{
if (((value_t)object >= (value_t)gc_ranges[0]) &&
((value_t)object < (value_t)gc_ranges[1]))
return 0;
if (((value_t)object >= (value_t)gc_ranges[1]) &&
((value_t)object < (value_t)gc_ranges[2]))
return 1;
return -1;
}
static inline size_t gc_free_space(void)
{
return gc_range_end - gc_free_ptr;
}
void gc_init(size_t min_size, size_t max_size)
{
assert(min_size <= max_size);
gc_ranges[0] = (char*)malloc(max_size);
gc_ranges[1] = (char*)malloc(max_size);
assert(gc_ranges[0] && gc_ranges[1]);
gc_current_range = 0;
gc_free_ptr = gc_ranges[gc_current_range];
gc_min_size = min_size;
gc_max_size = max_size;
gc_soft_limit = gc_min_size;
gc_range_end = gc_free_ptr + gc_soft_limit;
gc_gen0_init(min_size);
gc_gen1_init(min_size, max_size);
gc_weak_box_list = NIL;
gc_will_list = NIL;
@ -400,46 +403,269 @@ void gc_init(size_t min_size, size_t max_size)
void clear_gc_stats(void)
{
gc_stats.collections = 0;
gc_stats.gen0_passes = 0;
gc_stats.gen1_passes = 0;
gc_stats.total_ns = 0;
gc_stats.total_freed = 0;
gc_stats.high_water = 0;
gc_stats.max_ns = 0;
}
/* Preconditions: nbytes pre-aligned a la gc_align(), and space exists. */
static inline void *_gc_alloc(size_t nbytes)
/****************************** Gen-0 Collector *****************************/
/* These private variables are exported ONLY for use by is_gen0_object(). */
char *gc_gen0_range;
size_t gc_gen0_size;
static char *gc_gen0_free_ptr;
static char *gc_gen0_range_end;
/* Used to signal that Gen-0 pass has been obviated by Gen-1 collection. */
static jmp_buf gc_gen0_end_ctx;
static inline size_t gc_gen0_free_space(void)
{
void *p = gc_free_ptr;
assert(nbytes == gc_align(nbytes));
assert(nbytes <= gc_free_space());
gc_free_ptr += nbytes;
return p;
return gc_gen0_range_end - gc_gen0_free_ptr;
}
static void gc_gen0_init(size_t gen0_size)
{
assert(gen0_size >= GC_ALIGNMENT);
gc_gen0_size = gen0_size;
gc_gen0_range = (char*)malloc(gc_gen0_size);
release_assert(gc_gen0_range);
gc_gen0_free_ptr = gc_gen0_range;
gc_gen0_range_end = gc_gen0_range + gc_gen0_size;
}
static void collect_gen0_garbage(void)
{
#ifndef NO_STATS
size_t initial_free_space;
#ifndef NO_TIMING_STATS
struct timespec start_time;
clock_gettime(TIMING_CLOCK, &start_time);
#endif
initial_free_space = gc_gen0_free_space() + gc_gen1_free_space();
#endif
//debug(("Performing Gen-0 garbage collection pass...\n"));
assert(!gc_in_gen0_collection);
assert(!gc_in_gen1_collection);
gc_in_gen0_collection = true;
/* If we trigger a Gen-1 collection at any point then we are done. */
/* Full collection will pull in any current Gen-0 objects. */
if (setjmp(gc_gen0_end_ctx) == 0)
{
char *object_ptr = gc_gen1_free_ptr;
const size_t used_bytes = gc_gen1_free_ptr - gc_gen1_ranges[gc_gen1_current_range];
const int current_block_groups = (used_bytes + GC_DIRTY_BLOCK_SIZE - 1) / GC_DIRTY_BLOCK_SIZE;
int group;
int bit;
/* 1. Transfer Gen-0 roots (ignore Gen-1). */
transfer_roots();
/* 2. Locate and transfer Gen-0 references from dirty Gen-1 blocks. */
for (group = 0; group < current_block_groups; ++group)
{
for (bit = 0; bit < 32; ++bit)
{
if (gc_gen1_dirty_bits[group] & (1UL << bit))
{
/* Find first object in block */
const int block = group * 32 + bit;
char *block_obj = gc_gen1_block_starts[block];
/* For each object in block: transfer children */
do {
block_obj += gc_align(transfer_children((object_t*)block_obj));
} while (gen1_gc_range_of(block_obj) == block);
}
}
}
/* Transfer Gen-0 children of objects newly moved to Gen-1 */
while (object_ptr < gc_gen1_free_ptr)
{
object_ptr += gc_align(transfer_children((object_t*)object_ptr));
}
/* These have to be examined after normal reachability has been determined */
process_weak_boxes();
process_wills();
/* Keep transferring until no more objects in the new range refer to the old one.
* This is so that values which are otherwise unreachable, but have finalizers which
* may be able to reach them, are not collected prematurely. process_wills() transfers
* the value of any will newly placed on the active list. Note that these values may
* be finalized in any order, and that any weak references have already been cleared. */
while (object_ptr < gc_gen1_free_ptr)
{
object_ptr += gc_align(transfer_children((object_t*)object_ptr));
}
update_weak_box_list();
/* 4. Reset Gen-0 range to 'empty' state. */
gc_gen1_clear_dirty_bits();
gc_gen0_free_ptr = gc_gen0_range;
#ifndef NO_STATS
#ifndef NO_TIMING_STATS
{
struct timespec end_time;
nsec_t nsec;
clock_gettime(TIMING_CLOCK, &end_time);
nsec = (end_time.tv_sec - start_time.tv_sec) * 1000000000LL;
nsec += (end_time.tv_nsec - start_time.tv_nsec);
gc_stats.total_ns += nsec;
if (nsec > gc_stats.max_ns)
gc_stats.max_ns = nsec;
}
#endif
gc_stats.total_freed -= initial_free_space;
gc_stats.total_freed += gc_gen0_free_space();
gc_stats.total_freed += gc_gen1_free_space();
++gc_stats.gen0_passes;
#endif
}
else
{
//debug(("Gen-0 pass was interrupted by Gen-1 (full) collection.\n"));
}
#ifndef NDEBUG
/* Clear old range, to make it easier to detect bugs. */
memset(gc_gen0_range, 0, gc_gen0_size);
#endif
gc_in_gen0_collection = false;
}
void *gc_alloc(size_t nbytes)
{
nbytes = gc_align(nbytes);
if (nbytes > gc_free_space())
_collect_garbage(nbytes);
assert(!gc_in_gen0_collection);
assert(!gc_in_gen1_collection);
return _gc_alloc(nbytes);
if (nbytes >= gc_gen0_size)
{
//debug(("Allocating directly from Gen-0...\n"));
return gc_alloc_gen1(nbytes);
}
else
{
if (nbytes > gc_gen0_free_space())
collect_gen0_garbage();
assert(nbytes <= gc_gen0_free_space());
{
void *const p = gc_gen0_free_ptr;
gc_gen0_free_ptr += nbytes;
return p;
}
}
}
/****************************** Gen-1 Collector *****************************/
static int gen1_gc_range_of(void *object)
{
if (((value_t)object >= (value_t)gc_gen1_ranges[0]) &&
((value_t)object < (value_t)(gc_gen1_ranges[0] + gc_gen1_max_size)))
return 0;
if (((value_t)object >= (value_t)gc_gen1_ranges[1]) &&
((value_t)object < (value_t)(gc_gen1_ranges[1] + gc_gen1_max_size)))
return 1;
return -1;
}
static void gc_gen1_init(size_t min_size, size_t max_size)
{
release_assert(min_size <= ((max_size+1)/2));
gc_gen1_ranges[0] = (char*)malloc(max_size);
gc_gen1_ranges[1] = (char*)malloc(max_size);
release_assert(gc_gen1_ranges[0] && gc_gen1_ranges[1]);
gc_gen1_current_range = 0;
gc_gen1_free_ptr = gc_gen1_ranges[gc_gen1_current_range];
gc_gen1_min_size = min_size;
gc_gen1_max_size = max_size;
gc_gen1_soft_limit = 2*gc_gen1_min_size;
gc_gen1_range_end = gc_gen1_free_ptr + gc_gen1_soft_limit;
{
gc_gen1_max_blocks = ((size_t)2 << 30) / GC_DIRTY_BLOCK_SIZE;
gc_gen1_dirty_bits = (uint32_t*)malloc(4 * ((gc_gen1_max_blocks + 31) / 32));
release_assert(gc_gen1_dirty_bits);
gc_gen1_block_starts = (char**)malloc(gc_gen1_max_blocks * sizeof(char*));
gc_gen1_clear_dirty_bits();
memset(gc_gen1_block_starts, 0, gc_gen1_max_blocks * sizeof(char*));
}
}
static void gc_gen1_clear_dirty_bits(void)
{
memset(gc_gen1_dirty_bits, 0, 4 * ((gc_gen1_max_blocks + 31) / 32));
}
static void *gc_alloc_gen1(size_t nbytes)
{
nbytes = gc_align(nbytes);
if ((nbytes + gc_gen0_size) > gc_gen1_free_space())
collect_gen1_garbage(nbytes);
assert(nbytes <= gc_gen1_free_space());
{
void *const p = gc_gen1_free_ptr;
const size_t block = gc_gen1_block_of(p);
if (!gc_gen1_block_starts[block])
gc_gen1_block_starts[block] = (char*)p;
gc_gen1_free_ptr += nbytes;
return p;
}
}
/* Precondition: *value refers to an object (or pair). */
static void transfer_object(value_t *value)
{
if (is_object(*value))
/* During Gen-0 collection pass, leave Gen-1 objects alone. Always ignore non-objects. */
if (is_object(*value) && (gc_in_gen1_collection || is_gen0_object(*value)))
{
object_t *obj = _get_object(*value);
size_t nbytes;
void *newobj;
assert(gc_range_of(obj) != gc_current_range);
assert(is_gen0_object(*value) ||
(gen1_gc_range_of(obj) == gc_gen1_other_range()));
if (obj->tag == BROKEN_HEART)
{
if (gen1_gc_range_of(_get_object(obj->forward)) != gc_gen1_current_range)
{
/* Gen-0 object was transferred into old range; needs to move to current range */
transfer_object(&obj->forward);
}
/* Object has already been moved; just update the reference */
*value = obj->forward;
return;
@ -476,7 +702,7 @@ static void transfer_object(value_t *value)
break;
}
newobj = _gc_alloc(gc_align(nbytes));
newobj = gc_alloc_gen1(nbytes);
memcpy(newobj, obj, nbytes);
/* Keep the original tag bits (pair or object) */
@ -558,11 +784,11 @@ static size_t transfer_children(object_t *obj)
}
}
static void swap_gc_ranges(void)
static void swap_gen1_gc_ranges(void)
{
gc_current_range = gc_other_range();
gc_free_ptr = gc_ranges[gc_current_range];
gc_range_end = gc_free_ptr + gc_soft_limit;
gc_gen1_current_range = gc_gen1_other_range();
gc_gen1_free_ptr = gc_gen1_ranges[gc_gen1_current_range];
gc_gen1_range_end = gc_gen1_free_ptr + gc_gen1_soft_limit;
}
static void transfer_roots(void)
@ -599,7 +825,7 @@ static void process_weak_boxes(void)
}
else
{
/* Box hasn't been moved yet, but may live on as the value of a will. */
/* Box hasn't been moved. Could be Gen-0 pass, or may live on as the value of a will. */
assert(is_weak_box(wb));
box = _get_weak_box(wb);
}
@ -609,15 +835,21 @@ static void process_weak_boxes(void)
/* The value in the box is reachable; update w/ new location. */
box->value = _get_object(box->value)->forward;
}
else if (is_object(box->value))
else if (is_object(box->value) && (gc_in_gen1_collection || is_gen0_object(box->value)))
{
/* The value in the box is an unreachable object; change to #f. */
/* Note that an object is considered unreachable via weak box when it could be finalized,
* even though it will be kept alive until the finalizer(s) is/are removed from the 'active'
* list and the finalizer(s) itself/themselves may restore the object to a reachable state. */
/* This last behavior is not recommended. */
/*
* NOTE: An object is considered unreachable via weak box when it could be finalized, even
* though it will be kept alive until any finalizers are removed from the 'active' list.
* The finalizer(s) may restore the object to a reachable state, in which case it will not
* be collected--but the weak reference will remain broken.
*
* Restoring references to otherwise GC'able objects is not recommended.
*
* The only known alternative would have been to invoke the finalizer while other objects
* may still be able to access the object (and create new references) via the weak box.
*/
box->value = FALSE_VALUE;
}
@ -650,7 +882,7 @@ static void process_wills(void)
/* Move on to this will's 'next' pointer */
will = &w->next;
}
else
else if (gc_in_gen1_collection || is_gen0_object(w->value))
{
assert(is_object(w->value));
@ -679,6 +911,8 @@ static void update_weak_box_list(void)
{
if (is_broken_heart(*wb))
{
assert(gen1_gc_range_of(_get_object(_get_object(*wb)->forward)) == gc_gen1_current_range);
/* The box itself is reachable; need to update 'next' pointer to new location */
*wb = _get_object(*wb)->forward;
@ -699,36 +933,51 @@ static void update_weak_box_list(void)
static void update_soft_limit(size_t min_free)
{
size_t bytes_used = gc_free_ptr - gc_ranges[gc_current_range];
size_t bytes_used = gc_gen1_free_ptr - gc_gen1_ranges[gc_gen1_current_range];
size_t min_limit = bytes_used + min_free;
size_t new_limit = (4 * min_limit) / 3;
size_t new_limit = 2 * min_limit;
if (gc_soft_limit > GC_DEFLATE_SIZE)
if (gc_gen1_soft_limit > GC_DEFLATE_SIZE)
{
size_t deflate_limit = gc_soft_limit - GC_DEFLATE_SIZE;
size_t deflate_limit = gc_gen1_soft_limit - GC_DEFLATE_SIZE;
if (new_limit < deflate_limit)
new_limit = deflate_limit;
}
if (new_limit > gc_max_size)
new_limit = gc_max_size;
else if (new_limit < gc_min_size)
new_limit = gc_min_size;
if (new_limit > gc_gen1_max_size)
new_limit = gc_gen1_max_size;
else if (new_limit < gc_gen1_min_size)
new_limit = gc_gen1_min_size;
gc_soft_limit = new_limit;
gc_gen1_soft_limit = new_limit;
/* Update end of range to reflect new limit */
gc_range_end = gc_ranges[gc_current_range] + gc_soft_limit;
gc_gen1_range_end = gc_gen1_ranges[gc_gen1_current_range] + gc_gen1_soft_limit;
#ifndef NO_STATS
if (gc_soft_limit > gc_stats.high_water)
if (gc_gen1_soft_limit > gc_stats.high_water)
{
gc_stats.high_water = gc_soft_limit;
gc_stats.high_water = gc_gen1_soft_limit;
}
#endif
}
static size_t gc_gen1_block_of(void *obj)
{
const intptr_t offset = (intptr_t)obj
- (intptr_t)gc_gen1_ranges[gc_gen1_current_range];
return (offset & ((intptr_t)2 << 30)) / GC_DIRTY_BLOCK_SIZE;
}
void _gc_mark_updated_gen1_object(value_t v)
{
const size_t block = gc_gen1_block_of(_get_object(v));
assert(is_object(v) && !is_gen0_object(v));
gc_gen1_dirty_bits[block / 32] |= (1UL << (block % 32));
}
static void _out_of_memory(void) __attribute__ ((noreturn));
static void _out_of_memory(void)
{
@ -736,35 +985,41 @@ static void _out_of_memory(void)
abort();
}
static void _collect_garbage(size_t min_free)
static void collect_gen1_garbage(size_t min_free)
{
bool collected_garbage = false;
gc_in_gen1_collection = true;
if (gc_enabled)
{
char *object_ptr;
#ifndef NO_STATS
size_t initial_free_space = gc_gen0_free_space() + gc_gen1_free_space();
#ifndef NO_TIMING_STATS
struct timespec start_time;
clock_gettime(TIMING_CLOCK, &start_time);
#endif
gc_stats.total_freed -= gc_free_space();
++gc_stats.collections;
#endif
//debug(("Collecting garbage...\n"));
//debug(("Performing Gen-1 garbage collection pass...\n"));
swap_gc_ranges();
gc_enabled = false;
swap_gen1_gc_ranges();
/* Record the start of each Gen-1 block as we go. */
memset(gc_gen1_block_starts, 0, gc_gen1_max_blocks * sizeof(char*));
/* New "current" range is initially empty, old one is full */
object_ptr = gc_free_ptr;
object_ptr = gc_gen1_free_ptr;
/* Prime the pump */
transfer_roots();
/* Keep transferring until no more objects in the new range refer to the old one,
* other than pending wills and weak boxes. */
while (object_ptr < gc_free_ptr)
while (object_ptr < gc_gen1_free_ptr)
{
object_ptr += gc_align(transfer_children((object_t*)object_ptr));
}
@ -778,7 +1033,7 @@ static void _collect_garbage(size_t min_free)
* may be able to reach them, are not collected prematurely. process_wills() transfers
* the value of any will newly placed on the active list. Note that these values may
* be finalized in any order, and that any weak references have already been cleared. */
while (object_ptr < gc_free_ptr)
while (object_ptr < gc_gen1_free_ptr)
{
object_ptr += gc_align(transfer_children((object_t*)object_ptr));
}
@ -787,10 +1042,21 @@ static void _collect_garbage(size_t min_free)
#ifndef NDEBUG
/* Clear old range, to make it easier to detect bugs. */
memset(gc_ranges[gc_other_range()], 0, gc_soft_limit);
memset(gc_gen1_ranges[gc_gen1_other_range()], 0, gc_gen1_soft_limit);
#endif
/*
* Gen-0 should be empty at this point; all active objects
* have been moved to the Gen-1 memory region.
*/
gc_gen1_clear_dirty_bits();
gc_gen0_free_ptr = gc_gen0_range;
collected_garbage = true;
//debug(("Finished collection with %d bytes to spare (out of %d bytes).\n", gc_free_space(), gc_soft_limit));
//debug(("Finished collection with %d bytes to spare (out of %d bytes).\n", gc_gen1_free_space(), gc_gen1_soft_limit));
gc_enabled = true;
#ifndef NO_STATS
#ifndef NO_TIMING_STATS
@ -808,17 +1074,19 @@ static void _collect_garbage(size_t min_free)
if (nsec > gc_stats.max_ns)
gc_stats.max_ns = nsec;
}
#endif
gc_stats.total_freed -= initial_free_space;
gc_stats.total_freed += gc_gen0_free_space();
gc_stats.total_freed += gc_gen1_free_space();
++gc_stats.gen1_passes;
#endif
}
#endif
gc_stats.total_freed += gc_free_space();
#endif
update_soft_limit(min_free + gc_gen0_size);
update_soft_limit(min_free);
if (gc_free_space() < min_free)
if (gc_gen1_free_space() < min_free)
{
size_t bytes_used = gc_free_ptr - gc_ranges[gc_current_range];
size_t bytes_used = gc_gen1_free_ptr - gc_gen1_ranges[gc_gen1_current_range];
size_t need_bytes = bytes_used + min_free;
/* If GC is disabled then we can't move anything, so reallocating is impossible. */
@ -832,39 +1100,45 @@ static void _collect_garbage(size_t min_free)
debug(("Ran out of free memory; will try to allocate more...\n"));
do {
release_assert(gc_max_size < (SIZE_MAX/2));
gc_max_size *= 2;
} while (gc_max_size < need_bytes);
release_assert(gc_gen1_max_size < (SIZE_MAX/2));
gc_gen1_max_size *= 2;
} while (gc_gen1_max_size < need_bytes);
/* Reallocate the unused space. */
{
char *unused_range = gc_ranges[gc_other_range()];
gc_ranges[gc_other_range()] = (char*)malloc(gc_max_size);
char *unused_range = gc_gen1_ranges[gc_gen1_other_range()];
gc_gen1_ranges[gc_gen1_other_range()] = (char*)malloc(gc_gen1_max_size);
free(unused_range);
}
/* See if reallocation succeeded. */
if (!gc_ranges[gc_other_range()])
if (!gc_gen1_ranges[gc_gen1_other_range()])
_out_of_memory();
/* Move everything into the newly enlarged space.
* Will update the soft-limit. */
_collect_garbage(0);
collect_gen1_garbage(0);
/* Reallocate the other space, now unused. */
free(gc_ranges[gc_other_range()]);
gc_ranges[gc_other_range()] = (char*)malloc(gc_max_size);
free(gc_gen1_ranges[gc_gen1_other_range()]);
gc_gen1_ranges[gc_gen1_other_range()] = (char*)malloc(gc_gen1_max_size);
/* Ensure second reallocation succeeded. */
if (!gc_ranges[gc_other_range()])
if (!gc_gen1_ranges[gc_gen1_other_range()])
_out_of_memory();
}
gc_in_gen1_collection = false;
/* If Gen-1 was invoked within Gen-0, skip the rest: we're done. */
if (gc_in_gen0_collection && collected_garbage)
longjmp(gc_gen0_end_ctx, 1);
}
void collect_garbage(size_t min_free)
{
bool was_enabled = set_gc_enabled(true);
_collect_garbage(min_free);
collect_gen1_garbage(min_free);
set_gc_enabled(was_enabled);
}
@ -1075,22 +1349,23 @@ void fprint_value(FILE *f, value_t v)
void fprint_gc_stats(FILE *f)
{
if (gc_stats.collections > 0)
if ((gc_stats.gen0_passes + gc_stats.gen1_passes) > 0)
{
const double total_time = gc_stats.total_ns / 1.0e9;
const double max_time = gc_stats.max_ns / 1.0e9;
fprintf(f, "GC: %lld bytes freed by %d GCs in %0.6f sec => %0.3f MB/sec.\n",
fprintf(f, "GC: %lld bytes freed by %d+%d GCs in %0.6f sec => %0.3f MB/sec.\n",
gc_stats.total_freed,
gc_stats.collections,
gc_stats.gen0_passes,
gc_stats.gen1_passes,
total_time,
(gc_stats.total_freed / total_time) / (1024*1024));
fprintf(f, "GC: Avg. time was %0.6f sec, max %0.6f.\n",
(total_time / gc_stats.collections), max_time);
(total_time / (gc_stats.gen0_passes + gc_stats.gen1_passes)), max_time);
fprintf(f, "GC: The soft-limit peaked at %d bytes out of %d allocated.\n",
gc_stats.high_water, gc_max_size);
gc_stats.high_water, gc_gen1_max_size);
}
else
{

28
gc.h
View File

@ -82,7 +82,7 @@ typedef void (builtin_fn_t)(struct interp_state *state);
/* Invoke this macro after creating any reference from a Gen-1 GC object to a Gen-0 object. */
/* If unsure, invoke the macro; at most there will be a slight cost in performance. */
/* Failing to invoke the macro before the next Gen-0 GC can lead to incorrect behavior. */
#define WRITE_BARRIER(gen1_value) ((void)0)
#define WRITE_BARRIER(value) ((void)_gc_write_barrier((value)))
typedef struct object
{
@ -166,7 +166,8 @@ typedef unsigned long long llsize_t;
typedef struct gc_stats
{
int collections;
int gen0_passes;
int gen1_passes;
nsec_t total_ns;
llsize_t total_freed;
size_t high_water;
@ -392,6 +393,7 @@ void unregister_gc_root(gc_root_t *root);
void *gc_alloc(size_t nbytes);
void collect_garbage(size_t min_free);
bool set_gc_enabled(bool enable);
void _gc_mark_updated_gen1_object(value_t v);
void fprint_value(FILE *f, value_t v);
void fprint_gc_stats(FILE *f);
@ -406,6 +408,28 @@ static inline void print_gc_stats(void)
fprint_gc_stats(stderr);
}
static inline bool is_gen0_object(value_t v)
{
/* These private variables are exported ONLY for use by this inline function. */
extern char *gc_gen0_range;
extern size_t gc_gen0_size;
const char const *obj = (const char*)_get_object(v);
return is_object(v)
&& (obj >= (char*)gc_gen0_range)
&& ((obj - (char*)gc_gen0_range) < gc_gen0_size);
}
/* Don't call this directly; use the WRITE_BARRIER macro. */
static inline void _gc_write_barrier(value_t v)
{
if (is_object(v) && !is_gen0_object(v))
{
_gc_mark_updated_gen1_object(v);
}
}
/* Implements the release_assert() macro */
void _release_assert(bool expr, const char *str, const char *file, int line);
/* To be provided by the main application */

View File

@ -34,8 +34,20 @@ void out_of_memory(void)
int main(int argc, char **argv)
{
srand((unsigned int)time(NULL));
#ifdef __linux__
{
FILE *f = fopen("/dev/urandom", "rb");
if (f)
{
unsigned int seed;
if (fread(&seed, sizeof(seed), 1, f)==1)
srand(seed);
fclose(f);
}
}
#endif
gc_init(256*1024, 1024*1024);
gc_init(12*1024*1024, 64*1024*1024);
builtin_init();
interpreter_init();
@ -142,9 +154,17 @@ static void test_weak_boxes_and_wills(void)
static void test_garbage_collection(bool keep_going)
{
gc_root_t root;
gc_root_t root2;
int count = 0;
register_gc_root(&root, NIL);
register_gc_root(&root2, NIL);
/* Construct a large, static tree w/ many links. */
for (int i = 0; i < 1000000; ++i)
{
root2.value = cons(root2.value, root2.value);
}
while (1)
{
@ -156,7 +176,7 @@ static void test_garbage_collection(bool keep_going)
}
else
{
switch (r & 7)
switch (r & 15)
{
case 0:
root.value = cons(fixnum_value(rand()), root.value);
@ -173,19 +193,19 @@ static void test_garbage_collection(bool keep_going)
WRITE_BARRIER(_CDR(root.value));
break;
case 4:
case 5:
case 6:
case 7:
{
value_t s = make_vector(4, FALSE_VALUE);
_get_vector(s)->elements[r & 3] = root.value;
root.value = s;
}
break;
default:
(void)cons(make_box(NIL), cons(NIL, cons(NIL, NIL)));
break;
}
}
if (++count >= 50000000)
if (++count >= 80000000)
{
print_gc_stats();
nl();
@ -199,6 +219,7 @@ static void test_garbage_collection(bool keep_going)
}
unregister_gc_root(&root);
unregister_gc_root(&root2);
}
static void test_reader(void)