summaryrefslogtreecommitdiff
path: root/gc.c
diff options
context:
space:
mode:
authorPeter Zhu <peter@peterzhu.ca>2021-03-12 19:36:58 +0000
committerPeter Zhu <peter@peterzhu.ca>2021-03-24 14:31:10 -0400
commitb25361f7319cac953145d9d15e2e673e560ec3d9 (patch)
tree6df9c6dba3398bb5fb6bb429ddec632512160b0b /gc.c
parentf9f13a4f6d8be706b17efc089c28f7bc617ef549 (diff)
Change heap walking to be safe for object allocation
Notes
Notes: Merged: https://github.com/ruby/ruby/pull/4263
Diffstat (limited to 'gc.c')
-rw-r--r--gc.c130
1 files changed, 86 insertions, 44 deletions
diff --git a/gc.c b/gc.c
index f7deedcbcd..8218f88d0d 100644
--- a/gc.c
+++ b/gc.c
@@ -3216,53 +3216,67 @@ Init_gc_stress(void)
typedef int each_obj_callback(void *, void *, size_t, void *);
-static void objspace_each_objects(rb_objspace_t *objspace, each_obj_callback *callback, void *data);
+static void objspace_each_objects(rb_objspace_t *objspace, each_obj_callback *callback, void *data, bool protected);
static void objspace_reachable_objects_from_root(rb_objspace_t *, void (func)(const char *, VALUE, void *), void *);
-struct each_obj_args {
+struct each_obj_data {
rb_objspace_t *objspace;
+ bool reenable_incremental;
+
each_obj_callback *callback;
void *data;
+
+ struct heap_page **pages;
+ size_t pages_count;
};
-static void
-objspace_each_objects_without_setup(rb_objspace_t *objspace, each_obj_callback *callback, void *data)
+static VALUE
+objspace_each_objects_ensure(VALUE arg)
{
- size_t i;
- struct heap_page *page;
- RVALUE *pstart = NULL, *pend;
-
- i = 0;
- while (i < heap_allocated_pages) {
- while (0 < i && pstart < heap_pages_sorted[i-1]->start) i--;
- while (i < heap_allocated_pages && heap_pages_sorted[i]->start <= pstart) i++;
- if (heap_allocated_pages <= i) break;
-
- page = heap_pages_sorted[i];
-
- pstart = page->start;
- pend = pstart + page->total_slots;
+ struct each_obj_data *data = (struct each_obj_data *)arg;
+ rb_objspace_t *objspace = data->objspace;
- if ((*callback)(pstart, pend, sizeof(RVALUE), data)) {
- break;
- }
+ /* Reenable incremental GC */
+ if (data->reenable_incremental) {
+ objspace->flags.dont_incremental = FALSE;
}
-}
-static VALUE
-objspace_each_objects_protected(VALUE arg)
-{
- struct each_obj_args *args = (struct each_obj_args *)arg;
- objspace_each_objects_without_setup(args->objspace, args->callback, args->data);
+ /* Free pages buffer */
+ struct heap_page **pages = data->pages;
+ GC_ASSERT(pages);
+ free(pages);
+
return Qnil;
}
static VALUE
-incremental_enable(VALUE _)
+objspace_each_objects_try(VALUE arg)
{
- rb_objspace_t *objspace = &rb_objspace;
+ struct each_obj_data *data = (struct each_obj_data *)arg;
+ rb_objspace_t *objspace = data->objspace;
+ struct heap_page **pages = data->pages;
+ size_t pages_count = data->pages_count;
+
+ struct heap_page *page = list_top(&heap_eden->pages, struct heap_page, page_node);
+ for (size_t i = 0; i < pages_count; i++) {
+ /* If we have reached the end of the linked list then there are no
+ * more pages, so break. */
+ if (page == NULL) break;
+
+ /* If this page does not match the one in the buffer, then move to
+ * the next page in the buffer. */
+ if (pages[i] != page) continue;
+
+ RVALUE *pstart = page->start;
+ RVALUE *pend = pstart + page->total_slots;
+
+ if ((*data->callback)(pstart, pend, sizeof(RVALUE), data->data)) {
+ break;
+ }
+
+ page = list_next(&heap_eden->pages, page, page_node);
+ }
- objspace->flags.dont_incremental = FALSE;
return Qnil;
}
@@ -3305,30 +3319,58 @@ incremental_enable(VALUE _)
void
rb_objspace_each_objects(each_obj_callback *callback, void *data)
{
- objspace_each_objects(&rb_objspace, callback, data);
+ objspace_each_objects(&rb_objspace, callback, data, TRUE);
}
static void
-objspace_each_objects(rb_objspace_t *objspace, each_obj_callback *callback, void *data)
+objspace_each_objects(rb_objspace_t *objspace, each_obj_callback *callback, void *data, bool protected)
{
- int prev_dont_incremental = objspace->flags.dont_incremental;
+ /* Disable incremental GC */
+ bool reenable_incremental = FALSE;
+ if (protected) {
+ reenable_incremental = !objspace->flags.dont_incremental;
- gc_rest(objspace);
- objspace->flags.dont_incremental = TRUE;
-
- if (prev_dont_incremental) {
- objspace_each_objects_without_setup(objspace, callback, data);
+ gc_rest(objspace);
+ objspace->flags.dont_incremental = TRUE;
}
- else {
- struct each_obj_args args = {objspace, callback, data};
- rb_ensure(objspace_each_objects_protected, (VALUE)&args, incremental_enable, Qnil);
+
+ /* Create pages buffer */
+ size_t size = size_mul_or_raise(heap_allocated_pages, sizeof(struct heap_page *), rb_eRuntimeError);
+ struct heap_page **pages = malloc(size);
+ if (!pages) rb_memerror();
+
+ /* Set up pages buffer by iterating over all pages in the current eden
+ * heap. This will be a snapshot of the state of the heap before we
+ * call the callback over each page that exists in this buffer. Thus it
+ * is safe for the callback to allocate objects without possibly entering
+ * an infinte loop. */
+ struct heap_page *page;
+ size_t pages_count = 0;
+ list_for_each(&heap_eden->pages, page, page_node) {
+ pages[pages_count] = page;
+ pages_count++;
}
+ GC_ASSERT(pages_count <= heap_allocated_pages);
+
+ /* Run the callback */
+ struct each_obj_data each_obj_data = {
+ .objspace = objspace,
+ .reenable_incremental = reenable_incremental,
+
+ .callback = callback,
+ .data = data,
+
+ .pages = pages,
+ .pages_count = pages_count
+ };
+ rb_ensure(objspace_each_objects_try, (VALUE)&each_obj_data,
+ objspace_each_objects_ensure, (VALUE)&each_obj_data);
}
void
rb_objspace_each_objects_without_setup(each_obj_callback *callback, void *data)
{
- objspace_each_objects_without_setup(&rb_objspace, callback, data);
+ objspace_each_objects(&rb_objspace, callback, data, FALSE);
}
struct os_each_struct {
@@ -7133,7 +7175,7 @@ gc_verify_internal_consistency_(rb_objspace_t *objspace)
/* check relations */
- objspace_each_objects_without_setup(objspace, verify_internal_consistency_i, &data);
+ objspace_each_objects(objspace, verify_internal_consistency_i, &data, FALSE);
if (data.err_count != 0) {
#if RGENGC_CHECK_MODE >= 5
@@ -9545,7 +9587,7 @@ gc_verify_compaction_references(rb_execution_context_t *ec, VALUE self, VALUE do
gc_start_internal(ec, self, Qtrue, Qtrue, Qtrue, Qtrue);
objspace_reachable_objects_from_root(objspace, root_obj_check_moved_i, NULL);
- objspace_each_objects(objspace, heap_check_moved_i, NULL);
+ objspace_each_objects(objspace, heap_check_moved_i, NULL, TRUE);
return gc_compact_stats(ec, self);
}