summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ChangeLog46
-rw-r--r--array.c35
-rw-r--r--bignum.c2
-rw-r--r--complex.c2
-rw-r--r--debug.c4
-rw-r--r--gc.c819
-rw-r--r--include/ruby/ruby.h142
-rw-r--r--internal.h7
-rw-r--r--marshal.c2
-rw-r--r--node.h14
-rw-r--r--numeric.c2
-rw-r--r--object.c2
-rw-r--r--random.c2
-rw-r--r--range.c2
-rw-r--r--rational.c2
-rw-r--r--string.c30
-rw-r--r--variable.c5
-rw-r--r--vm_insnhelper.c2
18 files changed, 985 insertions, 135 deletions
diff --git a/ChangeLog b/ChangeLog
index dfab373..27a77ea 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,49 @@
+Tue May 14 02:47:30 2013 Koichi Sasada <ko1@atdot.net>
+
+ * gc.c: support RGENGC. [ruby-trunk - Feature #8339]
+ See this ticet about RGENGC.
+
+ * gc.c: Add several flags:
+ * RGENGC_DEBUG: if >0, then prints debug information.
+ * RGENGC_CHECK_MODE: if >0, add assertions.
+ * RGENGC_PROFILE: if >0, add profiling features.
+ check GC.stat and GC::Profiler.
+
+ * include/ruby/ruby.h: disable RGENGC by default (USE_RGENGC == 0).
+
+ * array.c: add write barriers for T_ARRAY and generate sunny objects.
+
+ * include/ruby/ruby.h (RARRAY_PTR_USE): added. Use this macro if
+ you want to access raw pointers. If you modify the contents which
+ pointer pointed, then you need to care write barrier.
+
+ * bignum.c, marshal.c, random.c: generate T_BIGNUM sunny objects.
+
+ * complex.c, include/ruby/ruby.h: add write barriers for T_COMPLEX
+ and generate sunny objects.
+
+ * rational.c (nurat_s_new_internal), include/ruby/ruby.h: add write
+ barriers for T_RATIONAL and generate sunny objects.
+
+ * internal.h: add write barriers for RBasic::klass.
+
+ * numeric.c (rb_float_new_in_heap): generate sunny T_FLOAT objects.
+
+ * object.c (rb_class_allocate_instance), range.c:
+ generate sunny T_OBJECT objects.
+
+ * string.c: add write barriers for T_STRING and generate sunny objects.
+
+ * variable.c: add write barriers for ivars.
+
+ * vm_insnhelper.c (vm_setivar): ditto.
+
+ * include/ruby/ruby.h, debug.c: use two flags
+ FL_WB_PROTECTED and FL_OLDGEN.
+
+ * node.h (NODE_FL_CREF_PUSHED_BY_EVAL, NODE_FL_CREF_OMOD_SHARED):
+ move flag bits.
+
Tue May 14 01:54:48 2013 Koichi Sasada <ko1@atdot.net>
* gc.c: remove rb_objspace_t::marked_num.
diff --git a/array.c b/array.c
index f605af2..f5f6d6f 100644
--- a/array.c
+++ b/array.c
@@ -128,10 +128,12 @@ memfill(register VALUE *mem, register long size, register VALUE val)
#define ARY_SHARED(ary) (assert(ARY_SHARED_P(ary)), RARRAY(ary)->as.heap.aux.shared)
#define ARY_SET_SHARED(ary, value) do { \
- assert(!ARY_EMBED_P(ary)); \
- assert(ARY_SHARED_P(ary)); \
- assert(ARY_SHARED_ROOT_P(value)); \
- RARRAY(ary)->as.heap.aux.shared = (value); \
+ const VALUE _ary_ = (ary); \
+ const VALUE _value_ = (value); \
+ assert(!ARY_EMBED_P(_ary_)); \
+ assert(ARY_SHARED_P(_ary_)); \
+ assert(ARY_SHARED_ROOT_P(_value_)); \
+ OBJ_WRITE(_ary_, &RARRAY(_ary_)->as.heap.aux.shared, _value_); \
} while (0)
#define RARRAY_SHARED_ROOT_FLAG FL_USER5
#define ARY_SHARED_ROOT_P(ary) (FL_TEST((ary), RARRAY_SHARED_ROOT_FLAG))
@@ -370,7 +372,7 @@ rb_ary_shared_with_p(VALUE ary1, VALUE ary2)
static VALUE
ary_alloc(VALUE klass)
{
- NEWOBJ_OF(ary, struct RArray, klass, T_ARRAY);
+ NEWOBJ_OF(ary, struct RArray, klass, T_ARRAY | (RGENGC_WB_PROTECTED_ARRAY ? FL_WB_PROTECTED : 0));
FL_SET_EMBED((VALUE)ary);
ARY_SET_EMBED_LEN((VALUE)ary, 0);
@@ -409,6 +411,14 @@ ary_new(VALUE klass, long capa)
ARY_SET_PTR(ary, ALLOC_N(VALUE, capa));
ARY_SET_CAPA(ary, capa);
ARY_SET_HEAP_LEN(ary, 0);
+
+ /* NOTE: `ary' can be old because the following suquence is possible.
+ * (1) ary = ary_alloc();
+ * (2) GC (for (3)) -> promote ary
+ * (3) ALLOC_N(VALUE, capa)
+ * So that force ary as young object.
+ */
+ RBASIC(ary)->flags &= ~FL_OLDGEN;
}
return ary;
@@ -455,7 +465,9 @@ rb_ary_new4(long n, const VALUE *elts)
ary = rb_ary_new2(n);
if (n > 0 && elts) {
- MEMCPY(RARRAY_PTR(ary), elts, VALUE, n);
+ RARRAY_PTR_USE(ary, ptr, {
+ MEMCPY(ptr, elts, VALUE, n); /* new array is not old gen */
+ });
ARY_SET_LEN(ary, n);
}
@@ -512,7 +524,7 @@ ary_make_shared(VALUE ary)
return ary;
}
else {
- NEWOBJ_OF(shared, struct RArray, 0, T_ARRAY);
+ NEWOBJ_OF(shared, struct RArray, 0, T_ARRAY | (RGENGC_WB_PROTECTED_ARRAY ? FL_WB_PROTECTED : 0));
FL_UNSET_EMBED(shared);
ARY_SET_LEN((VALUE)shared, ARY_CAPA(ary));
@@ -649,8 +661,8 @@ rb_ary_initialize(int argc, VALUE *argv, VALUE ary)
rb_ary_modify(ary);
if (argc == 0) {
- if (ARY_OWNS_HEAP_P(ary) && RARRAY_PTR(ary)) {
- xfree(RARRAY_PTR(ary));
+ if (ARY_OWNS_HEAP_P(ary) && RARRAY_RAWPTR(ary) != 0) {
+ xfree(RARRAY_RAWPTR(ary));
}
rb_ary_unshare_safe(ary);
FL_SET_EMBED(ary);
@@ -690,7 +702,10 @@ rb_ary_initialize(int argc, VALUE *argv, VALUE ary)
}
}
else {
- memfill(RARRAY_PTR(ary), len, val);
+ RARRAY_PTR_USE(ary, ptr, {
+ memfill(ptr, len, val);
+ });
+ OBJ_WRITTEN(ary, Qundef, val);
ARY_SET_LEN(ary, len);
}
return ary;
diff --git a/bignum.c b/bignum.c
index ee9d3dd..0b9573f 100644
--- a/bignum.c
+++ b/bignum.c
@@ -166,7 +166,7 @@ rb_big_resize(VALUE big, long len)
static VALUE
bignew_1(VALUE klass, long len, int sign)
{
- NEWOBJ_OF(big, struct RBignum, klass, T_BIGNUM);
+ NEWOBJ_OF(big, struct RBignum, klass, T_BIGNUM | (RGENGC_WB_PROTECTED_BIGNUM ? FL_WB_PROTECTED : 0));
RBIGNUM_SET_SIGN(big, sign?1:0);
if (len <= RBIGNUM_EMBED_LEN_MAX) {
RBASIC(big)->flags |= RBIGNUM_EMBED_FLAG;
diff --git a/complex.c b/complex.c
index 8420c19..14869e0 100644
--- a/complex.c
+++ b/complex.c
@@ -313,7 +313,7 @@ k_complex_p(VALUE x)
inline static VALUE
nucomp_s_new_internal(VALUE klass, VALUE real, VALUE imag)
{
- NEWOBJ_OF(obj, struct RComplex, klass, T_COMPLEX);
+ NEWOBJ_OF(obj, struct RComplex, klass, T_COMPLEX | (RGENGC_WB_PROTECTED_COMPLEX ? FL_WB_PROTECTED : 0));
RCOMPLEX_SET_REAL(obj, real);
RCOMPLEX_SET_IMAG(obj, imag);
diff --git a/debug.c b/debug.c
index 4909a45..e7dc6fc 100644
--- a/debug.c
+++ b/debug.c
@@ -32,8 +32,8 @@ const union {
RUBY_ENC_CODERANGE_7BIT = ENC_CODERANGE_7BIT,
RUBY_ENC_CODERANGE_VALID = ENC_CODERANGE_VALID,
RUBY_ENC_CODERANGE_BROKEN = ENC_CODERANGE_BROKEN,
- RUBY_FL_RESERVED1 = FL_RESERVED1,
- RUBY_FL_RESERVED2 = FL_RESERVED2,
+ RUBY_FL_WB_PROTECTED = FL_WB_PROTECTED,
+ RUBY_FL_OLDGEN = FL_OLDGEN,
RUBY_FL_FINALIZE = FL_FINALIZE,
RUBY_FL_TAINT = FL_TAINT,
RUBY_FL_UNTRUSTED = FL_UNTRUSTED,
diff --git a/gc.c b/gc.c
index cf02d33..35c152a 100644
--- a/gc.c
+++ b/gc.c
@@ -25,6 +25,7 @@
#include "ruby_atomic.h"
#include "probes.h"
#include <stdio.h>
+#include <stdarg.h>
#include <setjmp.h>
#include <sys/types.h>
#include <assert.h>
@@ -95,6 +96,37 @@ static ruby_gc_params_t initial_params = {
#define nomem_error GET_VM()->special_exceptions[ruby_error_nomemory]
+#if USE_RGENGC
+/* RGENGC_DEBUG:
+ * 1:
+ * 2: remember set operation
+ * 3: mark
+ * 4:
+ * 5: sweep
+ */
+#ifndef RGENGC_DEBUG
+#define RGENGC_DEBUG 0
+#endif
+
+/* RGENGC_CHECK_MODE
+ * 0:
+ * 1: enable assertions
+ * 2: enable bits check (for debugging)
+ */
+#ifndef RGENGC_CHECK_MODE
+#define RGENGC_CHECK_MODE 0
+#endif
+
+#ifndef RGENGC_PROFILE
+#define RGENGC_PROFILE 0
+#endif
+
+#else /* USE_RGENGC */
+#define RGENGC_DEBUG 0
+#define RGENGC_CHECK_MODE 0
+#define RGENGC_PROFILE 0
+#endif
+
#ifndef GC_PROFILE_MORE_DETAIL
#define GC_PROFILE_MORE_DETAIL 0
#endif
@@ -167,7 +199,6 @@ typedef struct RVALUE {
struct heaps_slot {
struct heaps_header *header;
- uintptr_t *bits;
RVALUE *freelist;
struct heaps_slot *next;
struct heaps_slot *prev;
@@ -176,7 +207,10 @@ struct heaps_slot {
struct heaps_header {
struct heaps_slot *base;
- uintptr_t *bits;
+ uintptr_t *mark_bits;
+#if USE_RGENGC
+ uintptr_t *rememberset_bits;
+#endif
RVALUE *start;
RVALUE *end;
size_t limit;
@@ -254,6 +288,18 @@ typedef struct rb_objspace {
size_t size;
double invoke_time;
+#if USE_RGENGC
+ size_t minor_gc_count;
+ size_t major_gc_count;
+#ifdef RGENGC_PROFILE
+ size_t generated_sunny_object_count;
+ size_t generated_shady_object_count;
+ size_t shade_operation_count;
+ size_t remembered_sunny_object_count;
+ size_t remembered_shady_object_count;
+#endif /* RGENGC_PROFILE */
+#endif /* USE_RGENGC */
+
#if GC_PROFILE_MORE_DETAIL
double gc_sweep_start_time; /* temporary profiling space */
#endif
@@ -268,6 +314,16 @@ typedef struct rb_objspace {
void *data;
void (*mark_func)(VALUE v, void *data);
} *mark_func_data;
+
+ struct {
+ int during_minor_gc;
+ int parent_object_is_promoted;
+
+ /* for check mode */
+ VALUE parent_object;
+ VALUE interesting_object;
+ } rgengc;
+
} rb_objspace_t;
#if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
@@ -318,7 +374,8 @@ int *ruby_initial_gc_stress_ptr = &rb_objspace.gc_stress;
#define HEAP_HEADER(p) ((struct heaps_header *)(p))
#define GET_HEAP_HEADER(x) (HEAP_HEADER((uintptr_t)(x) & ~(HEAP_ALIGN_MASK)))
#define GET_HEAP_SLOT(x) (GET_HEAP_HEADER(x)->base)
-#define GET_HEAP_BITMAP(x) (GET_HEAP_HEADER(x)->bits)
+#define GET_HEAP_MARK_BITS(x) (GET_HEAP_HEADER(x)->mark_bits)
+#define GET_HEAP_REMEMBERSET_BITS(x) (GET_HEAP_HEADER(x)->rememberset_bits)
#define NUM_IN_SLOT(p) (((uintptr_t)(p) & HEAP_ALIGN_MASK)/sizeof(RVALUE))
#define BITMAP_INDEX(p) (NUM_IN_SLOT(p) / (sizeof(uintptr_t) * CHAR_BIT))
#define BITMAP_OFFSET(p) (NUM_IN_SLOT(p) & ((sizeof(uintptr_t) * CHAR_BIT)-1))
@@ -374,6 +431,49 @@ static inline void gc_prof_sweep_slot_timer_start(rb_objspace_t *);
static inline void gc_prof_sweep_slot_timer_stop(rb_objspace_t *);
static inline void gc_prof_set_malloc_info(rb_objspace_t *);
+static const char *obj_type_name(VALUE obj);
+
+#if USE_RGENGC
+static int rgengc_remembered(rb_objspace_t *objspace, VALUE obj);
+static void rgengc_remember(rb_objspace_t *objspace, VALUE obj);
+static void rgengc_rememberset_clear(rb_objspace_t *objspace);
+static size_t rgengc_rememberset_mark(rb_objspace_t *objspace);
+
+#define FL_TEST2(x,f) ((RGENGC_CHECK_MODE && SPECIAL_CONST_P(x)) ? (rb_bug("FL_TEST2: SPECIAL_CONST"), 0) : FL_TEST_RAW((x),(f)))
+#define FL_SET2(x,f) do {if (RGENGC_CHECK_MODE && SPECIAL_CONST_P(x)) rb_bug("FL_SET2: SPECIAL_CONST"); RBASIC(x)->flags |= (f);} while (0)
+#define FL_UNSET2(x,f) do {if (RGENGC_CHECK_MODE && SPECIAL_CONST_P(x)) rb_bug("FL_UNSET2: SPECIAL_CONST"); RBASIC(x)->flags &= ~(f);} while (0)
+
+#define RVALUE_SUNNY(x) FL_TEST2((x), FL_WB_PROTECTED)
+#define RVALUE_SHADY(x) (!RVALUE_SUNNY(x))
+#define RVALUE_PROMOTED(x) FL_TEST2((x), FL_OLDGEN)
+
+#define RVALUE_PROMOTE(x) FL_SET2((x), FL_OLDGEN)
+#define RVALUE_DEMOTE(x) FL_UNSET2((x), FL_OLDGEN)
+#endif
+
+static void
+rgengc_report_body(int level, rb_objspace_t *objspace, const char *fmt, ...)
+{
+ if (level <= RGENGC_DEBUG) {
+ char buf[1024];
+ FILE *out = stderr;
+ va_list args;
+ const char *status = " ";
+
+ if (during_gc) {
+ status = objspace->rgengc.during_minor_gc ? "-" : "+";
+ }
+
+ va_start(args, fmt);
+ vsnprintf(buf, 1024, fmt, args);
+ va_end(args);
+
+ fprintf(out, "%s|", status);
+ fputs(buf, out);
+ }
+}
+
+#define rgengc_report if (RGENGC_DEBUG) rgengc_report_body
/*
--------------------------- ObjectSpace -----------------------------
@@ -420,7 +520,10 @@ rb_objspace_free(rb_objspace_t *objspace)
if (objspace->heap.sorted) {
size_t i;
for (i = 0; i < heaps_used; ++i) {
- free(objspace->heap.sorted[i]->bits);
+ free(objspace->heap.sorted[i]->mark_bits);
+#if USE_RGENGC
+ free(objspace->heap.sorted[i]->rememberset_bits);
+#endif
aligned_free(objspace->heap.sorted[i]);
}
free(objspace->heap.sorted);
@@ -461,8 +564,8 @@ allocate_sorted_heaps(rb_objspace_t *objspace, size_t next_heaps_length)
rb_memerror();
}
- for (i = 0; i < add; i++) {
- bits = (struct heaps_free_bitmap *)malloc(HEAP_BITMAP_LIMIT * sizeof(uintptr_t));
+ for (i = 0; i < add * (USE_RGENGC ? 2 : 1) /* mark bits and rememberset bits */; i++) {
+ bits = (struct heaps_free_bitmap *)malloc(HEAP_BITMAP_LIMIT * sizeof(uintptr_t));
if (bits == 0) {
during_gc = 0;
rb_memerror();
@@ -487,6 +590,23 @@ unlink_free_heap_slot(rb_objspace_t *objspace, struct heaps_slot *slot)
slot->free_next = NULL;
}
+static uintptr_t *
+alloc_bitmap(rb_objspace_t *objspace)
+{
+ uintptr_t *bits = (uintptr_t *)objspace->heap.free_bitmap;
+ assert(objspace->heap.free_bitmap != NULL);
+ objspace->heap.free_bitmap = objspace->heap.free_bitmap->next;
+ memset(bits, 0, HEAP_BITMAP_LIMIT * sizeof(uintptr_t));
+ return bits;
+}
+
+static void
+free_bitmap(rb_objspace_t *objspace, uintptr_t *bits)
+{
+ ((struct heaps_free_bitmap *)(bits))->next = objspace->heap.free_bitmap;
+ objspace->heap.free_bitmap = (struct heaps_free_bitmap *)bits;
+}
+
static void
assign_heap_slot(rb_objspace_t *objspace)
{
@@ -545,11 +665,10 @@ assign_heap_slot(rb_objspace_t *objspace)
objspace->heap.sorted[hi]->end = (p + objs);
objspace->heap.sorted[hi]->base = heaps;
objspace->heap.sorted[hi]->limit = objs;
- assert(objspace->heap.free_bitmap != NULL);
- heaps->bits = (uintptr_t *)objspace->heap.free_bitmap;
- objspace->heap.sorted[hi]->bits = (uintptr_t *)objspace->heap.free_bitmap;
- objspace->heap.free_bitmap = objspace->heap.free_bitmap->next;
- memset(heaps->bits, 0, HEAP_BITMAP_LIMIT * sizeof(uintptr_t));
+ objspace->heap.sorted[hi]->mark_bits = alloc_bitmap(objspace);
+#if USE_RGENGC
+ objspace->heap.sorted[hi]->rememberset_bits = alloc_bitmap(objspace);
+#endif
pend = p + objs;
if (lomem == 0 || lomem > p) lomem = p;
if (himem < pend) himem = pend;
@@ -557,6 +676,7 @@ assign_heap_slot(rb_objspace_t *objspace)
while (p < pend) {
p->as.free.flags = 0;
+ rgengc_report(3, objspace, "assign_heap_slot: %p (%s) is added to freelist\n", p, obj_type_name((VALUE)p));
p->as.free.next = heaps->freelist;
heaps->freelist = p;
p++;
@@ -624,6 +744,9 @@ set_heaps_increment(rb_objspace_t *objspace)
heaps_inc = next_heaps_length - heaps_used;
+ rgengc_report(5, objspace, "set_heaps_increment: heaps_length: %d, next_heaps_length: %d, heaps_inc: %d\n",
+ heaps_length, next_heaps_length, heaps_inc);
+
if (next_heaps_length > heaps_length) {
allocate_sorted_heaps(objspace, next_heaps_length);
heaps_length = next_heaps_length;
@@ -633,6 +756,8 @@ set_heaps_increment(rb_objspace_t *objspace)
static int
heaps_increment(rb_objspace_t *objspace)
{
+ rgengc_report(5, objspace, "heaps_increment: heaps_inc: %d\n", heaps_inc);
+
if (heaps_inc > 0) {
assign_heap_slot(objspace);
heaps_inc--;
@@ -654,7 +779,8 @@ newobj(VALUE klass, VALUE flags)
}
if (UNLIKELY(ruby_gc_stress && !ruby_disable_gc_stress)) {
- if (!garbage_collect(objspace)) {
+ /* if (!garbage_collect(objspace)) { */
+ if (!gc_prepare_free_objects(objspace)) {
during_gc = 0;
rb_memerror();
}
@@ -670,9 +796,14 @@ newobj(VALUE klass, VALUE flags)
obj = (VALUE)objspace->heap.free_slots->freelist;
objspace->heap.free_slots->freelist = RANY(obj)->as.free.next;
if (objspace->heap.free_slots->freelist == NULL) {
- unlink_free_heap_slot(objspace, objspace->heap.free_slots);
+ unlink_free_heap_slot(objspace, objspace->heap.free_slots);
}
+#if RGENGC_PROFILE
+ if (flags & FL_WB_PROTECTED) objspace->profile.generated_sunny_object_count++;
+ else objspace->profile.generated_shady_object_count++;
+#endif
+
MEMZERO((void*)obj, RVALUE, 1);
#ifdef GC_DEBUG
RANY(obj)->file = rb_sourcefile();
@@ -680,6 +811,14 @@ newobj(VALUE klass, VALUE flags)
#endif
objspace->total_allocated_object_num++;
+ rgengc_report(5, objspace, "newobj: %p (%s)\n", (void *)obj, obj_type_name(obj));
+
+#if USE_RGENGC && RGENGC_CHECK_MODE
+ if (RBASIC(obj)->flags) rb_bug("newobj: flags of %p (%s) is not zero (%-8lx).\n", (void *)obj, obj_type_name(obj), RBASIC(obj)->flags);
+ if (RVALUE_PROMOTED(obj)) rb_bug("newobj: %p (%s) is promoted.\n", (void *)obj, obj_type_name(obj));
+ if (rgengc_remembered(objspace, (VALUE)obj)) rb_bug("newobj: %p (%s) is remembered.\n", (void *)obj, obj_type_name(obj));
+#endif
+
return obj;
}
@@ -834,6 +973,7 @@ add_slot_local_freelist(rb_objspace_t *objspace, RVALUE *p)
slot = GET_HEAP_SLOT(p);
p->as.free.next = slot->freelist;
slot->freelist = p;
+ rgengc_report(3, objspace, "add_slot_local_freelist: %p (%s) is added to freelist\n", p, obj_type_name((VALUE)p));
return slot;
}
@@ -862,9 +1002,10 @@ free_unused_heaps(rb_objspace_t *objspace)
for (i = j = 1; j < heaps_used; i++) {
if (objspace->heap.sorted[i]->limit == 0) {
struct heaps_header* h = objspace->heap.sorted[i];
- ((struct heaps_free_bitmap *)(h->bits))->next =
- objspace->heap.free_bitmap;
- objspace->heap.free_bitmap = (struct heaps_free_bitmap *)h->bits;
+ free_bitmap(objspace, h->mark_bits);
+#if USE_RGENGC
+ free_bitmap(objspace, h->rememberset_bits);
+#endif
if (!last) {
last = objspace->heap.sorted[i];
}
@@ -1527,7 +1668,8 @@ rb_objspace_call_finalizer(rb_objspace_t *objspace)
while (p < pend) {
if (BUILTIN_TYPE(p) == T_DATA &&
DATA_PTR(p) && RANY(p)->as.data.dfree &&
- !rb_obj_is_thread((VALUE)p) && !rb_obj_is_mutex((VALUE)p) &&
+ !rb_obj_is_thread((VALUE)p) &&
+ !rb_obj_is_mutex((VALUE)p) &&
!rb_obj_is_fiber((VALUE)p)) {
p->as.free.flags = 0;
if (RTYPEDDATA_P(p)) {
@@ -1587,7 +1729,7 @@ is_swept_object(rb_objspace_t *objspace, VALUE ptr)
static inline int
is_dead_object(rb_objspace_t *objspace, VALUE ptr)
{
- if (!is_lazy_sweeping(objspace) || MARKED_IN_BITMAP(GET_HEAP_BITMAP(ptr), ptr))
+ if (!is_lazy_sweeping(objspace) || MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(ptr), ptr))
return FALSE;
if (!is_swept_object(objspace, ptr))
return TRUE;
@@ -1872,7 +2014,7 @@ lazy_sweep_enable(void)
static void
gc_clear_slot_bits(struct heaps_slot *slot)
{
- memset(slot->bits, 0, HEAP_BITMAP_LIMIT * sizeof(uintptr_t));
+ memset(slot->header->mark_bits, 0, HEAP_BITMAP_LIMIT * sizeof(uintptr_t));
}
static size_t
@@ -1881,8 +2023,43 @@ objspace_live_num(rb_objspace_t *objspace)
return objspace->total_allocated_object_num - objspace->total_freed_object_num;
}
-static void
-slot_sweep_body(rb_objspace_t *objspace, struct heaps_slot *sweep_slot)
+static inline int
+living_object_p(rb_objspace_t *objspace, VALUE p, uintptr_t *bits, const int during_minor_gc)
+{
+#if USE_RGENGC
+ int reason = 0;
+
+ if (during_minor_gc) {
+ if (MARKED_IN_BITMAP(bits, p)) {
+ reason = 1;
+ }
+ else if (RVALUE_PROMOTED(p)) {
+ reason = 2;
+ }
+ }
+ else {
+ if (MARKED_IN_BITMAP(bits, p)) {
+ reason = 1;
+ }
+ }
+
+ if (RGENGC_DEBUG && reason > 0) {
+ rgengc_report(5, objspace, "living_object_p: %p (%s) is living (%s).\n",
+ (void *)p, obj_type_name(p), reason == 1 ? "marked" : "promoted");
+ }
+
+ if (RGENGC_CHECK_MODE && reason == 0 && rgengc_remembered(objspace, p)) {
+ rb_bug("living_object_p: %p (%s) is remembered, but not marked.\n", (void *)p, obj_type_name(p));
+ }
+
+ return reason != 0;
+#else /* USE_RGENGC */
+ return MARKED_IN_BITMAP(bits, p) != 0;
+#endif
+}
+
+static inline void
+slot_sweep_body(rb_objspace_t *objspace, struct heaps_slot *sweep_slot, const int during_minor_gc)
{
size_t empty_num = 0, freed_num = 0, final_num = 0;
RVALUE *p, *pend;
@@ -1890,12 +2067,21 @@ slot_sweep_body(rb_objspace_t *objspace, struct heaps_slot *sweep_slot)
int deferred;
uintptr_t *bits;
+ rgengc_report(3, objspace, "slot_sweep_body: start.\n");
+
p = sweep_slot->header->start; pend = p + sweep_slot->header->limit;
- bits = GET_HEAP_BITMAP(p);
+ bits = GET_HEAP_MARK_BITS(p);
while (p < pend) {
- if ((!(MARKED_IN_BITMAP(bits, p))) && BUILTIN_TYPE(p) != T_ZOMBIE) {
+ if (!living_object_p(objspace, (VALUE)p, bits, during_minor_gc) && BUILTIN_TYPE(p) != T_ZOMBIE) {
if (p->as.basic.flags) {
- if ((deferred = obj_free(objspace, (VALUE)p)) ||
+ rgengc_report(3, objspace, "slot_sweep_body: free %p (%s)\n", p, obj_type_name((VALUE)p));
+
+#if USE_RGENGC && RGENGC_CHECK_MODE
+ if (objspace->rgengc.during_minor_gc && RVALUE_PROMOTED(p)) rb_bug("slot_sweep_body: %p (%s) is promoted.\n", p, obj_type_name((VALUE)p));
+ if (rgengc_remembered(objspace, (VALUE)p)) rb_bug("slot_sweep_body: %p (%s) is remembered.\n", p, obj_type_name((VALUE)p));
+#endif
+
+ if ((deferred = obj_free(objspace, (VALUE)p)) ||
(FL_TEST(p, FL_FINALIZE))) {
if (!deferred) {
p->as.free.flags = T_ZOMBIE;
@@ -1911,6 +2097,7 @@ slot_sweep_body(rb_objspace_t *objspace, struct heaps_slot *sweep_slot)
p->as.free.flags = 0;
p->as.free.next = sweep_slot->freelist;
sweep_slot->freelist = p;
+ rgengc_report(3, objspace, "slot_sweep_body: %p (%s) is added to freelist\n", p, obj_type_name((VALUE)p));
freed_num++;
}
}
@@ -1950,24 +2137,50 @@ slot_sweep_body(rb_objspace_t *objspace, struct heaps_slot *sweep_slot)
RUBY_VM_SET_FINALIZER_INTERRUPT(th);
}
}
+
+ rgengc_report(3, objspace, "slot_sweep_body: end.\n");
}
+#if USE_RGENGC
+static void
+slot_sweep_minor(rb_objspace_t *objspace, struct heaps_slot *sweep_slot)
+{
+ slot_sweep_body(objspace, sweep_slot, TRUE);
+}
+
+static void
+slot_sweep_major(rb_objspace_t *objspace, struct heaps_slot *sweep_slot)
+{
+ slot_sweep_body(objspace, sweep_slot, FALSE);
+}
+#endif
+
static void
slot_sweep(rb_objspace_t *objspace, struct heaps_slot *sweep_slot)
{
gc_prof_sweep_slot_timer_start(objspace);
- slot_sweep_body(objspace, sweep_slot);
+
+#if USE_RGENGC
+ if (objspace->rgengc.during_minor_gc) {
+ slot_sweep_minor(objspace, sweep_slot);
+ }
+ else {
+ slot_sweep_major(objspace, sweep_slot);
+ }
+#else
+ slot_sweep_body(objspace, sweep_slot, FALSE);
+#endif
+
gc_prof_sweep_slot_timer_stop(objspace);
}
-
static int
ready_to_gc(rb_objspace_t *objspace)
{
if (dont_gc || during_gc) {
if (!has_free_object) {
if (!heaps_increment(objspace)) {
- set_heaps_increment(objspace);
+ set_heaps_increment(objspace);
heaps_increment(objspace);
}
}
@@ -2002,6 +2215,8 @@ after_gc_sweep(rb_objspace_t *objspace)
size_t inc;
gc_prof_set_malloc_info(objspace);
+ rgengc_report(5, objspace, "after_gc_sweep: objspace->heap.free_num: %d, objspace->heap.free_min: %d\n",
+ objspace->heap.free_num, objspace->heap.free_min);
if (objspace->heap.free_num < objspace->heap.free_min) {
set_heaps_increment(objspace);
heaps_increment(objspace);
@@ -2048,7 +2263,25 @@ rest_sweep(rb_objspace_t *objspace)
}
}
-static void gc_marks(rb_objspace_t *objspace);
+static void gc_marks(rb_objspace_t *objspace, int minor_gc);
+
+static void
+gc_sweep(rb_objspace_t *objspace)
+{
+ struct heaps_slot *next;
+
+ before_gc_sweep(objspace);
+
+ while (objspace->heap.sweep_slots) {
+ next = objspace->heap.sweep_slots->next;
+ slot_sweep(objspace, objspace->heap.sweep_slots);
+ objspace->heap.sweep_slots = next;
+ }
+
+ after_gc_sweep(objspace);
+
+ during_gc = 0;
+}
static int
gc_prepare_free_objects(rb_objspace_t *objspace)
@@ -2072,8 +2305,8 @@ gc_prepare_free_objects(rb_objspace_t *objspace)
if (objspace->heap.sweep_slots) {
res = lazy_sweep(objspace);
- if (res) {
- gc_prof_set_malloc_info(objspace);
+ if (res) {
+ gc_prof_set_malloc_info(objspace);
gc_prof_timer_stop(objspace, Qfalse);
return res;
}
@@ -2085,46 +2318,31 @@ gc_prepare_free_objects(rb_objspace_t *objspace)
}
}
- gc_marks(objspace);
+ gc_marks(objspace, TRUE);
before_gc_sweep(objspace);
if (!(res = lazy_sweep(objspace))) {
/* there is no freespace after slot_sweep() */
while (1) {
+ /* There is no empty RVALUE spaces */
+ /* TODO: [RGENGC] Should do major GC before adding hepas */
+
set_heaps_increment(objspace);
heaps_increment(objspace);
- if (has_free_object) {
- res = TRUE;
- during_gc = 0;
+ if (has_free_object) {
+ res = TRUE;
+ during_gc = 0;
break;
}
- }
+ }
}
gc_prof_timer_stop(objspace, Qtrue);
return res;
}
-static void
-gc_sweep(rb_objspace_t *objspace)
-{
- struct heaps_slot *next;
-
- before_gc_sweep(objspace);
-
- while (objspace->heap.sweep_slots) {
- next = objspace->heap.sweep_slots->next;
- slot_sweep(objspace, objspace->heap.sweep_slots);
- objspace->heap.sweep_slots = next;
- }
-
- after_gc_sweep(objspace);
-
- during_gc = 0;
-}
-
/* Marking stack */
static void push_mark_stack(mark_stack_t *, VALUE);
@@ -2261,6 +2479,7 @@ init_mark_stack(mark_stack_t *stack)
/* Marking */
#define MARK_IN_BITMAP(bits, p) (bits[BITMAP_INDEX(p)] = bits[BITMAP_INDEX(p)] | ((uintptr_t)1 << BITMAP_OFFSET(p)))
+#define CLEAR_IN_BITMAP(bits, p) (bits[BITMAP_INDEX(p)] = bits[BITMAP_INDEX(p)] & ~((uintptr_t)1 << BITMAP_OFFSET(p)))
#ifdef __ia64
@@ -2529,7 +2748,6 @@ mark_current_machine_context(rb_objspace_t *objspace, rb_thread_t *th)
/* This assumes that all registers are saved into the jmp_buf (and stack) */
rb_setjmp(save_regs_gc_mark.j);
- SET_STACK_END;
GET_STACK_BOUNDS(stack_start, stack_end, 1);
mark_locations_array(objspace, save_regs_gc_mark.v, numberof(save_regs_gc_mark.v));
@@ -2572,10 +2790,18 @@ rb_gc_mark_maybe(VALUE obj)
}
static int
+gc_marked(rb_objspace_t *objspace, VALUE ptr)
+{
+ register uintptr_t *bits = GET_HEAP_MARK_BITS(ptr);
+ if (MARKED_IN_BITMAP(bits, ptr)) return 1;
+ return 0;
+}
+
+static int
gc_mark_ptr(rb_objspace_t *objspace, VALUE ptr)
{
- register uintptr_t *bits = GET_HEAP_BITMAP(ptr);
- if (MARKED_IN_BITMAP(bits, ptr)) return 0;
+ register uintptr_t *bits = GET_HEAP_MARK_BITS(ptr);
+ if (gc_marked(objspace, ptr)) return 0;
MARK_IN_BITMAP(bits, ptr);
return 1;
}
@@ -2597,14 +2823,78 @@ rb_objspace_markable_object_p(VALUE obj)
return markable_object_p(/* now it doesn't use &rb_objspace */ 0, obj);
}
+static const char *
+obj_type_name(VALUE obj)
+{
+ switch (TYPE(obj)) {
+#define TYPE_NAME(t) case (t): return #t;
+ TYPE_NAME(T_NONE);
+ TYPE_NAME(T_OBJECT);
+ TYPE_NAME(T_CLASS);
+ TYPE_NAME(T_MODULE);
+ TYPE_NAME(T_FLOAT);
+ TYPE_NAME(T_STRING);
+ TYPE_NAME(T_REGEXP);
+ TYPE_NAME(T_ARRAY);
+ TYPE_NAME(T_HASH);
+ TYPE_NAME(T_STRUCT);
+ TYPE_NAME(T_BIGNUM);
+ TYPE_NAME(T_FILE);
+ TYPE_NAME(T_MATCH);
+ TYPE_NAME(T_COMPLEX);
+ TYPE_NAME(T_RATIONAL);
+ TYPE_NAME(T_NIL);
+ TYPE_NAME(T_TRUE);
+ TYPE_NAME(T_FALSE);
+ TYPE_NAME(T_SYMBOL);
+ TYPE_NAME(T_FIXNUM);
+ TYPE_NAME(T_UNDEF);
+ TYPE_NAME(T_NODE);
+ TYPE_NAME(T_ICLASS);
+ TYPE_NAME(T_ZOMBIE);
+ case T_DATA:
+ if (rb_objspace_data_type_name(obj)) {
+ return rb_objspace_data_type_name(obj);
+ }
+ return "T_DATA";
+#undef TYPE_NAME
+ }
+ return "unknown";
+}
+
+static void
+rgengc_check_shady(rb_objspace_t *objspace, VALUE obj)
+{
+#if USE_RGENGC
+ if (RGENGC_CHECK_MODE > 1) {
+ if (objspace->rgengc.interesting_object == obj) {
+ if (FIXNUM_P(objspace->rgengc.parent_object)) {
+ fprintf(stderr, "rgengc_check_shady: points %p (%s) is pointed at line %d\n",
+ (void *)obj, obj_type_name(obj), FIX2INT(objspace->rgengc.parent_object));
+ }
+ else {
+ fprintf(stderr, "rgengc_check_shady: %p (%s) points %p (%s)\n",
+ (void *)objspace->rgengc.parent_object, obj_type_name(objspace->rgengc.parent_object),
+ (void *)obj, obj_type_name(obj));
+ }
+ }
+ }
+
+ if (objspace->rgengc.parent_object_is_promoted &&
+ RVALUE_SHADY(obj) && !rgengc_remembered(objspace, obj)) {
+ RVALUE_DEMOTE(obj);
+ rgengc_remember(objspace, obj);
+ }
+#endif
+}
+
static void
gc_mark(rb_objspace_t *objspace, VALUE ptr)
{
- if (!markable_object_p(objspace, ptr)) {
- return;
- }
+ if (!markable_object_p(objspace, ptr)) return;
if (LIKELY(objspace->mark_func_data == 0)) {
+ rgengc_check_shady(objspace, ptr);
if (!gc_mark_ptr(objspace, ptr)) return; /* already marked */
push_mark_stack(&objspace->mark_stack, ptr);
}
@@ -2624,20 +2914,60 @@ gc_mark_children(rb_objspace_t *objspace, VALUE ptr)
{
register RVALUE *obj = RANY(ptr);
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = (VALUE)ptr;
+
goto marking; /* skip */
again:
if (LIKELY(objspace->mark_func_data == 0)) {
obj = RANY(ptr);
if (!markable_object_p(objspace, ptr)) return;
+ rgengc_check_shady(objspace, ptr);
if (!gc_mark_ptr(objspace, ptr)) return; /* already marked */
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = (VALUE)ptr;
}
else {
gc_mark(objspace, ptr);
return;
}
+#if USE_RGENGC
+ if (RGENGC_CHECK_MODE && RVALUE_SHADY(obj) && RVALUE_PROMOTED(obj)) {
+ rb_bug("gc_mark_children: (0) %p (%s) is shady and promoted.\n", (void *)obj, obj_type_name((VALUE)obj));
+ }
+#endif /* USE_RGENGC */
+
marking:
+
+#if USE_RGENGC
+ if (RGENGC_CHECK_MODE && RVALUE_SHADY(obj) && RVALUE_PROMOTED(obj)) {
+ rb_bug("gc_mark_children: (1) %p (%s) is shady and promoted.\n", (void *)obj, obj_type_name((VALUE)obj));
+ }
+
+ if (objspace->rgengc.during_minor_gc) {
+ /* only minor gc skip marking promoted objects */
+ if (RVALUE_PROMOTED(obj)) {
+ rgengc_report(3, objspace, "gc_mark_children: %p (%s) was promoted.\n", obj, obj_type_name((VALUE)obj));
+ return; /* old gen */
+ }
+ }
+
+ /* minor/major common */
+ if (RVALUE_SUNNY(obj)) {
+ RVALUE_PROMOTE(obj); /* Sunny object can be promoted to OLDGEN object */
+ rgengc_report(3, objspace, "gc_mark_children: promote %p (%s).\n", (void *)obj, obj_type_name((VALUE)obj));
+ objspace->rgengc.parent_object_is_promoted = TRUE;
+ }
+ else {
+ rgengc_report(3, objspace, "gc_mark_children: do not promote shady %p (%s).\n", (void *)obj, obj_type_name((VALUE)obj));
+ objspace->rgengc.parent_object_is_promoted = FALSE;
+ }
+
+ if (RGENGC_CHECK_MODE && RVALUE_SHADY(obj) && RVALUE_PROMOTED(obj)) {
+ rb_bug("gc_mark_children: (2) %p (%s) is shady and promoted.\n", (void *)obj, obj_type_name((VALUE)obj));
+ }
+#endif /* USE_RGENGC */
+
if (FL_TEST(obj, FL_EXIVAR)) {
rb_mark_generic_ivar(ptr);
}
@@ -2817,7 +3147,7 @@ gc_mark_children(rb_objspace_t *objspace, VALUE ptr)
}
else {
long i, len = RARRAY_LEN(obj);
- VALUE *ptr = RARRAY_PTR(obj);
+ VALUE *ptr = RARRAY_RAWPTR(obj);
for (i=0; i < len; i++) {
gc_mark(objspace, *ptr++);
}
@@ -2927,52 +3257,339 @@ gc_mark_stacked_objects(rb_objspace_t *objspace)
}
static void
-gc_marks(rb_objspace_t *objspace)
+gc_marks_body(rb_objspace_t *objspace, rb_thread_t *th)
{
struct gc_list *list;
- rb_thread_t *th = GET_THREAD();
- struct mark_func_data_struct *prev_mark_func_data;
- prev_mark_func_data = objspace->mark_func_data;
- objspace->mark_func_data = 0;
+ /* start marking */
+ rgengc_report(1, objspace, "gc_marks_body: start.\n");
- gc_prof_mark_timer_start(objspace);
- objspace->count++;
+#if USE_RGENGC
+ if (objspace->rgengc.during_minor_gc) {
+ objspace->profile.minor_gc_count++;
+ rgengc_rememberset_mark(objspace);
+ }
+ else {
+ objspace->profile.major_gc_count++;
+ rgengc_rememberset_clear(objspace);
+ }
+#endif
SET_STACK_END;
-
th->vm->self ? rb_gc_mark(th->vm->self) : rb_vm_mark(th->vm);
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
mark_tbl(objspace, finalizer_table);
+
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
mark_current_machine_context(objspace, th);
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
rb_gc_mark_symbols();
+
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
rb_gc_mark_encodings();
/* mark protected global variables */
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
for (list = global_List; list; list = list->next) {
rb_gc_mark_maybe(*list->varptr);
}
+
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
rb_mark_end_proc();
+
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
rb_gc_mark_global_tbl();
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
mark_tbl(objspace, rb_class_tbl);
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
/* mark generic instance variables for special constants */
rb_mark_generic_ivar_tbl();
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
rb_gc_mark_parser();
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = INT2FIX(__LINE__);
rb_gc_mark_unlinked_live_method_entries(th->vm);
+ if (RGENGC_CHECK_MODE > 1) objspace->rgengc.parent_object = Qundef;
+
/* marking-loop */
gc_mark_stacked_objects(objspace);
+ /* cleanup */
gc_prof_mark_timer_stop(objspace);
+ rgengc_report(1, objspace, "gc_marks_body: end.\n");
+}
+
+static void
+gc_marks_test(rb_objspace_t *objspace, rb_thread_t *th)
+{
+#if USE_RGENGC
+ size_t i;
+ uintptr_t **prev_bitmaps = (uintptr_t **)malloc(sizeof(uintptr_t **) * heaps_used * 2);
+ uintptr_t *temp_bitmaps = (uintptr_t *)malloc((HEAP_BITMAP_LIMIT * sizeof(uintptr_t)) * heaps_used * 2);
+
+ rgengc_report(1, objspace, "gc_marks_test: test-full-gc\n");
+
+ if (prev_bitmaps == 0 || temp_bitmaps == 0) {
+ rb_bug("gc_marks_test: not enough memory to test.\n");
+ }
+ memset(temp_bitmaps, 0, (HEAP_BITMAP_LIMIT * sizeof(uintptr_t)) * heaps_used * 2);
+
+ /* swap with temporary bitmaps */
+ for (i=0; i<heaps_used; i++) {
+ prev_bitmaps[2*i+0] = objspace->heap.sorted[i]->mark_bits;
+ prev_bitmaps[2*i+1] = objspace->heap.sorted[i]->rememberset_bits;
+ objspace->heap.sorted[i]->mark_bits = &temp_bitmaps[(2*i+0)*HEAP_BITMAP_LIMIT];
+ objspace->heap.sorted[i]->rememberset_bits = &temp_bitmaps[(2*i+1)*HEAP_BITMAP_LIMIT];
+ }
+
+ /* run major (full) gc with temporary mark/rememberset */
+ objspace->rgengc.parent_object_is_promoted = FALSE;
+ objspace->rgengc.parent_object = Qundef;
+ objspace->rgengc.during_minor_gc = FALSE; /* major/full GC with temporary bitmaps */
+ gc_marks_body(objspace, th);
+
+ /* check & restore */
+ for (i=0; i<heaps_used; i++) {
+ uintptr_t *minor_mark_bits = prev_bitmaps[2*i+0];
+ uintptr_t *minor_rememberset_bits = prev_bitmaps[2*i+1];
+ uintptr_t *major_mark_bits = objspace->heap.sorted[i]->mark_bits;
+ /* uintptr_t *major_rememberset_bits = objspace->heap.sorted[i]->rememberset_bits; */
+ RVALUE *p = objspace->heap.sorted[i]->start;
+ RVALUE *pend = p + objspace->heap.sorted[i]->limit;
+
+ while (p < pend) {
+ if (MARKED_IN_BITMAP(major_mark_bits, p) && /* should be lived */
+ !MARKED_IN_BITMAP(minor_mark_bits, p) &&
+ !RVALUE_PROMOTED(p)) {
+
+ fprintf(stderr, "gc_marks_test: %p (%s) is living, but not marked && not promoted.\n", p, obj_type_name((VALUE)p));
+ objspace->rgengc.interesting_object = (VALUE)p;
+ gc_marks_test(objspace, th);
+ rb_bug("gc_marks_test (again): %p (%s) is living, but not marked && not promoted.\n", p, obj_type_name((VALUE)p));
+ }
+ p++;
+ }
+ objspace->heap.sorted[i]->mark_bits = minor_mark_bits;
+ objspace->heap.sorted[i]->rememberset_bits = minor_rememberset_bits;
+ }
+ free(prev_bitmaps);
+ free(temp_bitmaps);
+
+ objspace->rgengc.during_minor_gc = TRUE;
+#endif
+}
+
+static void
+gc_marks(rb_objspace_t *objspace, int minor_gc)
+{
+ struct mark_func_data_struct *prev_mark_func_data;
+ rb_thread_t *th = GET_THREAD();
+
+ /* setup marking */
+ prev_mark_func_data = objspace->mark_func_data;
+ objspace->mark_func_data = 0;
+
+ gc_prof_mark_timer_start(objspace);
+ objspace->count++;
+
+ SET_STACK_END;
+
+ if (USE_RGENGC) {
+ objspace->rgengc.parent_object_is_promoted = FALSE;
+ objspace->rgengc.parent_object = Qundef;
+ objspace->rgengc.during_minor_gc = minor_gc;
+ }
+
+ gc_marks_body(objspace, th);
+
+ if (RGENGC_CHECK_MODE > 1 && minor_gc) {
+ gc_marks_test(objspace, th);
+ }
objspace->mark_func_data = prev_mark_func_data;
}
+/* RGENGC */
+
+#if USE_RGENGC
+
+/* bit operations */
+
+static int
+rgengc_remembersetbits_get(rb_objspace_t *objspace, VALUE obj)
+{
+ uintptr_t *bits = GET_HEAP_REMEMBERSET_BITS(obj);
+ return MARKED_IN_BITMAP(bits, obj) ? 1 : 0;
+}
+
+static void
+rgengc_remembersetbits_set(rb_objspace_t *objspace, VALUE obj)
+{
+ uintptr_t *bits = GET_HEAP_REMEMBERSET_BITS(obj);
+ MARK_IN_BITMAP(bits, obj);
+}
+
+/* wb, etc */
+
+static void
+rgengc_remember(rb_objspace_t *objspace, VALUE obj)
+{
+ if (RGENGC_CHECK_MODE && RVALUE_PROMOTED(obj)) {
+ rb_bug("rgengc_remember: %p (%s) is promoted object",
+ (void *)obj, obj_type_name(obj));
+ }
+
+ rgengc_report(0, objspace, "rgengc_remember: %p (%s, %s) %s\n", (void *)obj, obj_type_name(obj),
+ RVALUE_SUNNY(obj) ? "sunny" : "shady",
+ rgengc_remembersetbits_get(objspace, obj) ? "was already remembered" : "is remembered now");
+
+ if (RGENGC_PROFILE) {
+ if (!rgengc_remembered(objspace, obj)) {
+ if (RVALUE_SUNNY(obj)) objspace->profile.remembered_sunny_object_count++;
+ else objspace->profile.remembered_shady_object_count++;
+ }
+ }
+
+ rgengc_remembersetbits_set(objspace, obj);
+}
+
+static int
+rgengc_remembered(rb_objspace_t *objspace, VALUE obj)
+{
+ int result = rgengc_remembersetbits_get(objspace, obj);
+ rgengc_report(6, objspace, "gc_remembered: %p (%s) => %d\n", (void *)obj, obj_type_name(obj), result);
+ return result;
+}
+
+static size_t
+rgengc_rememberset_mark(rb_objspace_t *objspace)
+{
+ size_t i;
+ size_t mark_cnt = 0, clear_cnt = 0, skip_cnt = 0;
+ RVALUE *p, *pend;
+ uintptr_t *bits;
+
+ for (i=0; i<heaps_used; i++) {
+ if (0 /* TODO: optimization - skip it if there are no remembered objects */) {
+ skip_cnt++;
+ continue;
+ }
+
+ p = objspace->heap.sorted[i]->start; pend = p + objspace->heap.sorted[i]->limit;
+ bits = GET_HEAP_REMEMBERSET_BITS(p);
+
+ while (p < pend) {
+ if (MARKED_IN_BITMAP(bits, p)) {
+ gc_mark(objspace, (VALUE)p);
+ rgengc_report(2, objspace, "rgengc_rememberset_mark: mark %p (%s)\n", p, obj_type_name((VALUE)p));
+
+ if (RGENGC_CHECK_MODE) mark_cnt++;
+
+ if (RVALUE_SUNNY(p)) {
+ rgengc_report(2, objspace, "rgengc_rememberset_mark: clear %p (%s)\n", p, obj_type_name((VALUE)p));
+ CLEAR_IN_BITMAP(bits, p);
+ if (RGENGC_CHECK_MODE) clear_cnt++;
+ }
+ }
+ p++;
+ }
+ }
+
+ if (RGENGC_CHECK_MODE && mark_cnt < clear_cnt) rb_bug("rgengc_rememberset_mark: mark_cnt (%"PRIdSIZE") < clear_cnt (%"PRIdSIZE")", mark_cnt, clear_cnt);
+ rgengc_report(2, objspace, "rgengc_rememberset_mark: mark_cnt: %"PRIdSIZE", clear_cnt: %"PRIdSIZE", skip_cnt: %"PRIdSIZE"\n", mark_cnt, clear_cnt, skip_cnt);
+
+ return mark_cnt - clear_cnt; /* totalc count of objects in remember set */
+}
+
+static void
+rgengc_rememberset_clear(rb_objspace_t *objspace)
+{
+ size_t i;
+
+ for (i=0; i<heaps_used; i++) {
+ uintptr_t *bits = objspace->heap.sorted[i]->rememberset_bits;
+ memset(bits, 0, HEAP_BITMAP_LIMIT * sizeof(uintptr_t));
+ }
+}
+
+/* RGENGC: APIs */
+
+void
+rb_gc_writebarrier(VALUE a, VALUE b)
+{
+ rb_objspace_t *objspace = &rb_objspace;
+
+ if (RGENGC_CHECK_MODE) {
+ if (!RVALUE_PROMOTED(a)) rb_bug("rb_gc_wb: referer object %p (%s) is not promoted.\n", (void *)a, obj_type_name(a));
+ if (RVALUE_PROMOTED(b)) rb_bug("rb_gc_wb: refered object %p (%s) is promoted.\n", (void *)b, obj_type_name(b));
+ }
+
+ if (!rgengc_remembered(objspace, a)) {
+ rgengc_report(2, objspace, "rb_gc_wb: %p (%s) -> %p (%s)\n",
+ (void *)a, obj_type_name(a), (void *)b, obj_type_name(b));
+
+ /* need to sweep all slots before demote */
+ /* TODO: check delayed sweeping slot or not
+ * if delayed sweepling slot, then mark it
+ * else demote simple
+ */
+ rest_sweep(objspace);
+
+ RVALUE_DEMOTE(a);
+ rgengc_remember(objspace, a);
+ }
+}
+
+void
+rb_gc_giveup_promoted_writebarrier(VALUE obj)
+{
+ rb_objspace_t *objspace = &rb_objspace;
+
+ if (RGENGC_CHECK_MODE) {
+ if (!RVALUE_PROMOTED(obj)) rb_bug("rb_gc_giveup_promoted_writebarrier: called on non-promoted object");
+ if (RVALUE_SUNNY(obj)) rb_bug("rb_gc_giveup_promoted_writebarrier: called on sunny object");
+ }
+
+ rgengc_report(2, objspace, "rb_gc_giveup_writebarrier: %p (%s)%s\n", (void *)obj, obj_type_name(obj),
+ rgengc_remembered(objspace, obj) ? " (already remembered)" : "");
+
+ /* need to sweep all slots before demote */
+ /* TODO: check delayed sweeping slot or not
+ * if delayed sweepling slot, then mark it
+ * else demote simple
+ */
+ rest_sweep(objspace);
+
+ RVALUE_DEMOTE(obj);
+ rgengc_remember(objspace, obj);
+
+#if RGENGC_PROFILE
+ objspace->profile.shade_operation_count++;
+#endif
+}
+
+#endif /* USE_RGENGC */
+
+/* RGENGC analysis information */
+
+VALUE
+rb_obj_rgengc_writebarrier_protected_p(VALUE obj)
+{
+ return OBJ_WB_PROTECTED(obj) ? Qtrue : Qfalse;
+}
+
+VALUE
+rb_obj_rgengc_promoted_p(VALUE obj)
+{
+ return OBJ_PROMOTED(obj) ? Qtrue : Qfalse;
+}
+
/* GC */
void
@@ -2981,8 +3598,12 @@ rb_gc_force_recycle(VALUE p)
rb_objspace_t *objspace = &rb_objspace;
struct heaps_slot *slot;
+#if USE_RGENGC
+ CLEAR_IN_BITMAP(GET_HEAP_REMEMBERSET_BITS(p), p);
+#endif
+
objspace->total_freed_object_num++;
- if (MARKED_IN_BITMAP(GET_HEAP_BITMAP(p), p)) {
+ if (MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(p), p)) {
add_slot_local_freelist(objspace, (RVALUE *)p);
}
else {
@@ -3055,7 +3676,7 @@ garbage_collect(rb_objspace_t *objspace)
rest_sweep(objspace);
during_gc++;
- gc_marks(objspace);
+ gc_marks(objspace, FALSE);
gc_sweep(objspace);
@@ -3188,21 +3809,44 @@ gc_stat(int argc, VALUE *argv, VALUE self)
static VALUE sym_heap_used, sym_heap_length, sym_heap_increment;
static VALUE sym_heap_live_num, sym_heap_free_num, sym_heap_final_num;
static VALUE sym_total_allocated_object, sym_total_freed_object;
+#if USE_RGENGC
+ static VALUE sym_minor_gc_count, sym_major_gc_count;
+#if RGENGC_PROFILE
+ static VALUE sym_generated_sunny_object_count, sym_generated_shady_object_count;
+ static VALUE sym_shade_operation_count;
+ static VALUE sym_remembered_sunny_object_count, sym_remembered_shady_object_count;
+#endif /* RGENGC_PROFILE */
+#endif /* USE_RGENGC */
+
if (sym_count == 0) {
- sym_count = ID2SYM(rb_intern_const("count"));
- sym_heap_used = ID2SYM(rb_intern_const("heap_used"));
- sym_heap_length = ID2SYM(rb_intern_const("heap_length"));
- sym_heap_increment = ID2SYM(rb_intern_const("heap_increment"));
- sym_heap_live_num = ID2SYM(rb_intern_const("heap_live_num"));
- sym_heap_free_num = ID2SYM(rb_intern_const("heap_free_num"));
- sym_heap_final_num = ID2SYM(rb_intern_const("heap_final_num"));
- sym_total_allocated_object = ID2SYM(rb_intern_const("total_allocated_object"));
- sym_total_freed_object = ID2SYM(rb_intern_const("total_freed_object"));
+#define S(s) sym_##s = ID2SYM(rb_intern_const(#s))
+ S(count);
+ S(heap_used);
+ S(heap_length);
+ S(heap_increment);
+ S(heap_live_num);
+ S(heap_free_num);
+ S(heap_final_num);
+ S(total_allocated_object);
+ S(total_freed_object);
+#if USE_RGENGC
+ S(minor_gc_count);
+ S(major_gc_count);
+#if RGENGC_PROFILE
+ S(generated_sunny_object_count);
+ S(generated_shady_object_count);
+ S(shade_operation_count);
+ S(remembered_sunny_object_count);
+ S(remembered_shady_object_count);
+#endif /* USE_RGENGC */
+#endif /* RGENGC_PROFILE */
+#undef S
}
if (rb_scan_args(argc, argv, "01", &hash) == 1) {
- if (!RB_TYPE_P(hash, T_HASH))
- rb_raise(rb_eTypeError, "non-hash given");
+ if (!RB_TYPE_P(hash, T_HASH)) {
+ rb_raise(rb_eTypeError, "non-hash given");
+ }
}
if (hash == Qnil) {
@@ -3221,6 +3865,17 @@ gc_stat(int argc, VALUE *argv, VALUE self)
rb_hash_aset(hash, sym_heap_final_num, SIZET2NUM(objspace->heap.final_num));
rb_hash_aset(hash, sym_total_allocated_object, SIZET2NUM(objspace->total_allocated_object_num));
rb_hash_aset(hash, sym_total_freed_object, SIZET2NUM(objspace->total_freed_object_num));
+#if USE_RGENGC
+ rb_hash_aset(hash, sym_minor_gc_count, SIZET2NUM(objspace->profile.minor_gc_count));
+ rb_hash_aset(hash, sym_major_gc_count, SIZET2NUM(objspace->profile.major_gc_count));
+#if RGENGC_PROFILE
+ rb_hash_aset(hash, sym_generated_sunny_object_count, SIZET2NUM(objspace->profile.generated_sunny_object_count));
+ rb_hash_aset(hash, sym_generated_shady_object_count, SIZET2NUM(objspace->profile.generated_shady_object_count));
+ rb_hash_aset(hash, sym_shade_operation_count, SIZET2NUM(objspace->profile.shade_operation_count));
+ rb_hash_aset(hash, sym_remembered_sunny_object_count, SIZET2NUM(objspace->profile.remembered_sunny_object_count));
+ rb_hash_aset(hash, sym_remembered_shady_object_count, SIZET2NUM(objspace->profile.remembered_shady_object_count));
+#endif /* RGENGC_PROFILE */
+#endif /* USE_RGENGC */
return hash;
}
@@ -4415,7 +5070,7 @@ rb_gcdebug_print_obj_condition(VALUE obj)
return;
}
fprintf(stderr, "marked?: %s\n",
- MARKED_IN_BITMAP(GET_HEAP_BITMAP(obj), obj) ? "true" : "false");
+ MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(obj), obj) ? "true" : "false");
if (is_lazy_sweeping(objspace)) {
fprintf(stderr, "lazy sweeping?: true\n");
fprintf(stderr, "swept?: %s\n",
diff --git a/include/ruby/ruby.h b/include/ruby/ruby.h
index 3e6909d..1d37b8d 100644
--- a/include/ruby/ruby.h
+++ b/include/ruby/ruby.h
@@ -681,6 +681,32 @@ VALUE rb_obj_setup(VALUE obj, VALUE klass, VALUE type);
if (FL_TEST((obj), FL_EXIVAR)) rb_copy_generic_ivar((VALUE)(dup),(VALUE)(obj));\
} while (0)
+#ifndef USE_RGENGC
+#define USE_RGENGC 0
+#endif
+
+#ifndef RGENGC_WB_PROTECTED_ARRAY
+#define RGENGC_WB_PROTECTED_ARRAY 0
+#endif
+#ifndef RGENGC_WB_PROTECTED_STRING
+#define RGENGC_WB_PROTECTED_STRING 0
+#endif
+#ifndef RGENGC_WB_PROTECTED_OBJECT
+#define RGENGC_WB_PROTECTED_OBJECT 0
+#endif
+#ifndef RGENGC_WB_PROTECTED_FLOAT
+#define RGENGC_WB_PROTECTED_FLOAT 0
+#endif
+#ifndef RGENGC_WB_PROTECTED_COMPLEX
+#define RGENGC_WB_PROTECTED_COMPLEX 0
+#endif
+#ifndef RGENGC_WB_PROTECTED_RATIONAL
+#define RGENGC_WB_PROTECTED_RATIONAL 0
+#endif
+#ifndef RGENGC_WB_PROTECTED_BIGNUM
+#define RGENGC_WB_PROTECTED_BIGNUM 0
+#endif
+
struct RBasic {
VALUE flags;
const VALUE klass;
@@ -891,14 +917,32 @@ struct RArray {
(long)((RBASIC(a)->flags >> RARRAY_EMBED_LEN_SHIFT) & \
(RARRAY_EMBED_LEN_MASK >> RARRAY_EMBED_LEN_SHIFT)) : \
RARRAY(a)->as.heap.len)
-#define RARRAY_PTR(a) \
- ((RBASIC(a)->flags & RARRAY_EMBED_FLAG) ? \
- RARRAY(a)->as.ary : \
- RARRAY(a)->as.heap.ptr)
+
#define RARRAY_LENINT(ary) rb_long2int(RARRAY_LEN(ary))
-#define RARRAY_AREF(a, i) (RARRAY_PTR(a)[i])
-#define RARRAY_ASET(a, i, v) do {RARRAY_PTR(a)[i] = (v);} while (0)
+/* DO NOT USE THIS MACRO DIRECTLY */
+#define RARRAY_RAWPTR(a) \
+ ((RBASIC(a)->flags & RARRAY_EMBED_FLAG) ? \
+ RARRAY(a)->as.ary : \
+ RARRAY(a)->as.heap.ptr)
+
+#define RARRAY_PTR_USE_START(a) RARRAY_RAWPTR(a)
+#define RARRAY_PTR_USE_END(a) /* */
+
+#define RARRAY_PTR_USE(ary, ptr_name, expr) do { \
+ const VALUE _ary = (ary); \
+ VALUE *ptr_name = RARRAY_PTR_USE_START(_ary); \
+ expr; \
+ RARRAY_PTR_USE_END(_ary); \
+} while (0)
+
+#define RARRAY_AREF(a, i) (RARRAY_RAWPTR(a)[i])
+#define RARRAY_ASET(a, i, v) do { \
+ const VALUE _ary_ = (a); \
+ OBJ_WRITE(_ary_, &RARRAY_RAWPTR(_ary_)[i], (v)); \
+} while (0)
+
+#define RARRAY_PTR(a) RARRAY_RAWPTR(RGENGC_WB_PROTECTED_ARRAY ? OBJ_WB_GIVEUP((VALUE)a) : ((VALUE)a))
struct RRegexp {
struct RBasic basic;
@@ -935,8 +979,8 @@ struct RRational {
const VALUE den;
};
-#define RRATIONAL_SET_NUM(rat, n) (*((VALUE *)(&((struct RRational *)(rat))->num)) = (n))
-#define RRATIONAL_SET_DEN(rat, d) (*((VALUE *)(&((struct RRational *)(rat))->den)) = (d))
+#define RRATIONAL_SET_NUM(rat, n) OBJ_WRITE((rat), ((VALUE *)(&((struct RRational *)(rat))->num)),(n))
+#define RRATIONAL_SET_DEN(rat, d) OBJ_WRITE((rat), ((VALUE *)(&((struct RRational *)(rat))->den)),(d))
struct RComplex {
struct RBasic basic;
@@ -944,8 +988,8 @@ struct RComplex {
const VALUE imag;
};
-#define RCOMPLEX_SET_REAL(cmp, r) (*((VALUE *)(&((struct RComplex *)(cmp))->real)) = (r))
-#define RCOMPLEX_SET_IMAG(cmp, i) (*((VALUE *)(&((struct RComplex *)(cmp))->imag)) = (i))
+#define RCOMPLEX_SET_REAL(cmp, r) OBJ_WRITE((cmp), ((VALUE *)(&((struct RComplex *)(cmp))->real)),(r))
+#define RCOMPLEX_SET_IMAG(cmp, i) OBJ_WRITE((cmp), ((VALUE *)(&((struct RComplex *)(cmp))->imag)),(i))
struct RData {
struct RBasic basic;
@@ -1108,8 +1152,8 @@ struct RBignum {
#define RCOMPLEX(obj) (R_CAST(RComplex)(obj))
#define FL_SINGLETON FL_USER0
-#define FL_RESERVED1 (((VALUE)1)<<5)
-#define FL_RESERVED2 (((VALUE)1)<<6) /* will be used in the future GC */
+#define FL_WB_PROTECTED (((VALUE)1)<<5)
+#define FL_OLDGEN (((VALUE)1)<<6)
#define FL_FINALIZE (((VALUE)1)<<7)
#define FL_TAINT (((VALUE)1)<<8)
#define FL_UNTRUSTED (((VALUE)1)<<9)
@@ -1142,7 +1186,8 @@ struct RBignum {
#define SPECIAL_CONST_P(x) (IMMEDIATE_P(x) || !RTEST(x))
#define FL_ABLE(x) (!SPECIAL_CONST_P(x) && BUILTIN_TYPE(x) != T_NODE)
-#define FL_TEST(x,f) (FL_ABLE(x)?(RBASIC(x)->flags&(f)):0)
+#define FL_TEST_RAW(x,f) (RBASIC(x)->flags&(f))
+#define FL_TEST(x,f) (FL_ABLE(x)?FL_TEST_RAW((x),(f)):0)
#define FL_ANY(x,f) FL_TEST((x),(f))
#define FL_ALL(x,f) (FL_TEST((x),(f)) == (f))
#define FL_SET(x,f) do {if (FL_ABLE(x)) RBASIC(x)->flags |= (f);} while (0)
@@ -1162,6 +1207,77 @@ struct RBignum {
#define OBJ_FROZEN(x) (!!(FL_ABLE(x)?(RBASIC(x)->flags&(FL_FREEZE)):(FIXNUM_P(x)||FLONUM_P(x))))
#define OBJ_FREEZE(x) FL_SET((x), FL_FREEZE)
+#if USE_RGENGC
+#define OBJ_PROMOTED(x) (SPECIAL_CONST_P(x) ? 0 : FL_TEST_RAW((x), FL_OLDGEN))
+#define OBJ_WB_PROTECTED(x) (SPECIAL_CONST_P(x) ? 1 : FL_TEST_RAW((x), FL_WB_PROTECTED))
+#define OBJ_WB_GIVEUP(x) rb_obj_wb_giveup(x, __FILE__, __LINE__)
+
+void rb_gc_writebarrier(VALUE a, VALUE b);
+void rb_gc_giveup_promoted_writebarrier(VALUE obj);
+
+#else /* USE_RGENGC */
+#define OBJ_PROMOTED(x) 0
+#define OBJ_WB_PROTECTED(x) 0
+#define OBJ_WB_GIVEUP(x) rb_obj_wb_giveup(x, __FILE__, __LINE__)
+#define OBJ_SHADE(x) OBJ_WB_GIVEUP(x) /* RGENGC terminology */
+#endif
+
+#define OBJ_WRITE(a, slot, b) rb_obj_write((VALUE)(a), (slot), (VALUE)(b), __FILE__, __LINE__)
+#define OBJ_WRITTEN(a, oldv, b) rb_obj_written((VALUE)(a), (VALUE)(oldv), (VALUE)(b), __FILE__, __LINE__)
+
+static inline VALUE
+rb_obj_wb_giveup(VALUE x, const char *filename, int line)
+{
+#ifdef RGENGC_LOGGING_WB_GIVEUP
+ RGENGC_LOGGING_WB_GIVEUP(x, filename, line);
+#endif
+
+#if USE_RGENGC
+ /* `x' should be an RVALUE object */
+ if (FL_TEST_RAW((x), FL_WB_PROTECTED)) {
+ RBASIC(x)->flags &= ~FL_WB_PROTECTED;
+
+ if (FL_TEST_RAW((x), FL_OLDGEN)) {
+ rb_gc_giveup_promoted_writebarrier(x);
+ }
+ }
+#endif
+ return x;
+}
+
+static inline VALUE
+rb_obj_written(VALUE a, VALUE oldv, VALUE b, const char *filename, int line)
+{
+#ifdef RGENGC_LOGGING_OBJ_WRITTEN
+ RGENGC_LOGGING_OBJ_WRITTEN(a, oldv, b, filename, line);
+#endif
+
+#if USE_RGENGC
+ /* `a' should be an RVALUE object */
+ if (FL_TEST_RAW((a), FL_OLDGEN) &&
+ !SPECIAL_CONST_P(b) && !FL_TEST_RAW((b), FL_OLDGEN)) {
+ rb_gc_writebarrier(a, b);
+ }
+#endif
+
+ return a;
+}
+
+static inline VALUE
+rb_obj_write(VALUE a, VALUE *slot, VALUE b, const char *filename, int line)
+{
+#ifdef RGENGC_LOGGING_WRIET
+ RGENGC_LOGGING_WRIET(a, slot, b, filename, line);
+#endif
+
+ *slot = b;
+
+#if USE_RGENGC
+ rb_obj_written(a, Qundef /* ignore `oldv' now */, b, filename, line);
+#endif
+ return a;
+}
+
#if SIZEOF_INT < SIZEOF_LONG
# define INT2NUM(v) INT2FIX((int)(v))
# define UINT2NUM(v) LONG2FIX((unsigned int)(v))
diff --git a/internal.h b/internal.h
index 6f2091c..b0c36a8 100644
--- a/internal.h
+++ b/internal.h
@@ -81,7 +81,7 @@ RCLASS_SUPER(VALUE c)
static inline VALUE
RCLASS_SET_SUPER(VALUE a, VALUE b) {
- RCLASS_EXT(a)->super = b;
+ OBJ_WRITE(a, &RCLASS_EXT(a)->super, b);
return b;
}
@@ -239,7 +239,10 @@ struct RBasicRaw {
#define RBASIC_CLEAR_CLASS(obj) (((struct RBasicRaw *)((VALUE)(obj)))->klass = 0)
#define RBASIC_SET_CLASS_RAW(obj, cls) (((struct RBasicRaw *)((VALUE)(obj)))->klass = (cls))
-#define RBASIC_SET_CLASS(obj, cls) do {((struct RBasicRaw *)(obj))->klass = cls; } while (0)
+#define RBASIC_SET_CLASS(obj, cls) do { \
+ VALUE _obj_ = (obj); \
+ OBJ_WRITE(_obj_, &((struct RBasicRaw *)(_obj_))->klass, cls); \
+} while (0)
/* parse.y */
VALUE rb_parser_get_yydebug(VALUE);
diff --git a/marshal.c b/marshal.c
index c948a1d..464ef15 100644
--- a/marshal.c
+++ b/marshal.c
@@ -1613,7 +1613,7 @@ r_object0(struct load_arg *arg, int *ivp, VALUE extmod)
BDIGIT *digits;
VALUE data;
- NEWOBJ_OF(big, struct RBignum, rb_cBignum, T_BIGNUM);
+ NEWOBJ_OF(big, struct RBignum, rb_cBignum, T_BIGNUM | (RGENGC_WB_PROTECTED_BIGNUM ? FL_WB_PROTECTED : 0));
RBIGNUM_SET_SIGN(big, (r_byte(arg) == '+'));
len = r_long(arg);
data = r_bytes0(len * 2, arg);
diff --git a/node.h b/node.h
index 3d915c5..57b1bc7 100644
--- a/node.h
+++ b/node.h
@@ -265,10 +265,16 @@ typedef struct RNode {
#define RNODE(obj) (R_CAST(RNode)(obj))
-/* 0..4:T_TYPES, 5:reserved, 6:NODE_FL_CREF_OMOD_SHARED, 7:NODE_FL_NEWLINE */
-#define NODE_FL_NEWLINE (((VALUE)1)<<7)
-#define NODE_FL_CREF_PUSHED_BY_EVAL NODE_FL_NEWLINE
-#define NODE_FL_CREF_OMOD_SHARED (((VALUE)1)<<6)
+/* FL : 0..4: T_TYPES, 5: KEEP_WB, 6: OLDGEN, 7: FINALIZE, 8: TAINT, 9: UNTRUSTERD, 10: EXIVAR, 11: FREEZE */
+/* NODE_FL: 0..4: T_TYPES, 5: KEEP_WB, 6: OLDGEN, 7: NODE_FL_NEWLINE|NODE_FL_CREF_PUSHED_BY_EVAL,
+ * 8..14: nd_type,
+ * 15..: nd_line or
+ * 15: NODE_FL_CREF_PUSHED_BY_EVAL
+ * 16: NODE_FL_CREF_OMOD_SHARED
+ */
+#define NODE_FL_NEWLINE (((VALUE)1)<<7)
+#define NODE_FL_CREF_PUSHED_BY_EVAL (((VALUE)1)<<15)
+#define NODE_FL_CREF_OMOD_SHARED (((VALUE)1)<<16)
#define NODE_TYPESHIFT 8
#define NODE_TYPEMASK (((VALUE)0x7f)<<NODE_TYPESHIFT)
diff --git a/numeric.c b/numeric.c
index 69cc719..89544dc 100644
--- a/numeric.c
+++ b/numeric.c
@@ -637,7 +637,7 @@ num_to_int(VALUE num)
VALUE
rb_float_new_in_heap(double d)
{
- NEWOBJ_OF(flt, struct RFloat, rb_cFloat, T_FLOAT);
+ NEWOBJ_OF(flt, struct RFloat, rb_cFloat, T_FLOAT | (RGENGC_WB_PROTECTED_FLOAT ? FL_WB_PROTECTED : 0));
flt->float_value = d;
OBJ_FREEZE(flt);
diff --git a/object.c b/object.c
index e15d89f..44cab14 100644
--- a/object.c
+++ b/object.c
@@ -1800,7 +1800,7 @@ rb_obj_alloc(VALUE klass)
static VALUE
rb_class_allocate_instance(VALUE klass)
{
- NEWOBJ_OF(obj, struct RObject, klass, T_OBJECT);
+ NEWOBJ_OF(obj, struct RObject, klass, T_OBJECT | (RGENGC_WB_PROTECTED_OBJECT ? FL_WB_PROTECTED : 0));
return (VALUE)obj;
}
diff --git a/random.c b/random.c
index cb5fd57..2a72d01 100644
--- a/random.c
+++ b/random.c
@@ -540,7 +540,7 @@ make_seed_value(const void *ptr)
{
const long len = DEFAULT_SEED_LEN/SIZEOF_BDIGITS;
BDIGIT *digits;
- NEWOBJ_OF(big, struct RBignum, rb_cBignum, T_BIGNUM);
+ NEWOBJ_OF(big, struct RBignum, rb_cBignum, T_BIGNUM | (RGENGC_WB_PROTECTED_ARRAY ? FL_WB_PROTECTED : 0));
RBIGNUM_SET_SIGN(big, 1);
rb_big_resize((VALUE)big, len + 1);
diff --git a/range.c b/range.c
index d3ead78..df4ad47 100644
--- a/range.c
+++ b/range.c
@@ -1211,7 +1211,7 @@ static VALUE
range_dumper(VALUE range)
{
VALUE v;
- NEWOBJ_OF(m, struct RObject, rb_cObject, T_OBJECT);
+ NEWOBJ_OF(m, struct RObject, rb_cObject, T_OBJECT | (RGENGC_WB_PROTECTED_OBJECT ? FL_WB_PROTECTED : 1));
v = (VALUE)m;
diff --git a/rational.c b/rational.c
index b7a3ee6..93a5377 100644
--- a/rational.c
+++ b/rational.c
@@ -368,7 +368,7 @@ f_lcm(VALUE x, VALUE y)
inline static VALUE
nurat_s_new_internal(VALUE klass, VALUE num, VALUE den)
{
- NEWOBJ_OF(obj, struct RRational, klass, T_RATIONAL);
+ NEWOBJ_OF(obj, struct RRational, klass, T_RATIONAL | (RGENGC_WB_PROTECTED_RATIONAL ? FL_WB_PROTECTED : 0));
RRATIONAL_SET_NUM(obj, num);
RRATIONAL_SET_DEN(obj, den);
diff --git a/string.c b/string.c
index b6adfef..f02e83f 100644
--- a/string.c
+++ b/string.c
@@ -68,7 +68,6 @@ VALUE rb_cSymbol;
if (FL_TEST((s),STR_NOEMBED)) FL_UNSET((s),(ELTS_SHARED|STR_ASSOC));\
} while (0)
-
#define STR_SET_NOEMBED(str) do {\
FL_SET((str), STR_NOEMBED);\
STR_SET_EMBED_LEN((str), 0);\
@@ -119,6 +118,11 @@ VALUE rb_cSymbol;
}\
} while (0)
+#define STR_SET_SHARED(str, shared_str) do { \
+ OBJ_WRITE((str), &RSTRING(str)->as.heap.aux.shared, (shared_str)); \
+ FL_SET((str), ELTS_SHARED); \
+} while (0)
+
#define is_ascii_string(str) (rb_enc_str_coderange(str) == ENC_CODERANGE_7BIT)
#define is_broken_string(str) (rb_enc_str_coderange(str) == ENC_CODERANGE_BROKEN)
@@ -375,7 +379,7 @@ rb_str_capacity(VALUE str)
static inline VALUE
str_alloc(VALUE klass)
{
- NEWOBJ_OF(str, struct RString, klass, T_STRING);
+ NEWOBJ_OF(str, struct RString, klass, T_STRING | (RGENGC_WB_PROTECTED_STRING ? FL_WB_PROTECTED : 0));
str->as.heap.ptr = 0;
str->as.heap.len = 0;
@@ -649,8 +653,7 @@ str_replace_shared_without_enc(VALUE str2, VALUE str)
FL_SET(str2, STR_NOEMBED);
RSTRING(str2)->as.heap.len = RSTRING_LEN(str);
RSTRING(str2)->as.heap.ptr = RSTRING_PTR(str);
- RSTRING(str2)->as.heap.aux.shared = str;
- FL_SET(str2, ELTS_SHARED);
+ STR_SET_SHARED(str2, str);
}
return str2;
}
@@ -699,12 +702,10 @@ str_new4(VALUE klass, VALUE str)
if (STR_SHARED_P(str)) {
VALUE shared = RSTRING(str)->as.heap.aux.shared;
assert(OBJ_FROZEN(shared));
- FL_SET(str2, ELTS_SHARED);
- RSTRING(str2)->as.heap.aux.shared = shared;
+ STR_SET_SHARED(str2, shared); /* TODO: WB is not needed because str2 is *new* object */
}
else {
- FL_SET(str, ELTS_SHARED);
- RSTRING(str)->as.heap.aux.shared = str2;
+ STR_SET_SHARED(str, str2);
}
rb_enc_cr_str_exact_copy(str2, str);
OBJ_INFECT(str2, str);
@@ -720,6 +721,9 @@ rb_str_new_frozen(VALUE orig)
klass = rb_obj_class(orig);
if (STR_SHARED_P(orig) && (str = RSTRING(orig)->as.heap.aux.shared)) {
long ofs;
+ if (!OBJ_FROZEN(str)) {
+ rb_bug("xyzzy");
+ }
assert(OBJ_FROZEN(str));
ofs = RSTRING_LEN(str) - RSTRING_LEN(orig);
if ((ofs > 0) || (klass != RBASIC(str)->klass) ||
@@ -742,7 +746,8 @@ rb_str_new_frozen(VALUE orig)
FL_UNSET(orig, STR_ASSOC);
str = str_new4(klass, orig);
FL_SET(str, STR_ASSOC);
- RSTRING(str)->as.heap.aux.shared = assoc;
+ OBJ_WRITE(str, &RSTRING(str)->as.heap.aux.shared, assoc);
+ /* TODO: WB is not needed because str is new object */
}
else {
str = str_new4(klass, orig);
@@ -878,8 +883,9 @@ rb_str_shared_replace(VALUE str, VALUE str2)
RSTRING(str)->as.heap.ptr = RSTRING_PTR(str2);
RSTRING(str)->as.heap.len = RSTRING_LEN(str2);
if (STR_NOCAPA_P(str2)) {
+ VALUE shared = RSTRING(str2)->as.heap.aux.shared;
FL_SET(str, RBASIC(str2)->flags & STR_NOCAPA);
- RSTRING(str)->as.heap.aux.shared = RSTRING(str2)->as.heap.aux.shared;
+ OBJ_WRITE(str, &RSTRING(str)->as.heap.aux.shared, shared);
}
else {
RSTRING(str)->as.heap.aux.capa = RSTRING(str2)->as.heap.aux.capa;
@@ -925,7 +931,7 @@ str_replace(VALUE str, VALUE str2)
RSTRING(str)->as.heap.ptr = RSTRING_PTR(str2);
FL_SET(str, ELTS_SHARED);
FL_UNSET(str, STR_ASSOC);
- RSTRING(str)->as.heap.aux.shared = shared;
+ STR_SET_SHARED(str, shared);
}
else {
str_replace_shared(str, str2);
@@ -1447,7 +1453,7 @@ rb_str_associate(VALUE str, VALUE add)
}
FL_SET(str, STR_ASSOC);
RBASIC_CLEAR_CLASS(add);
- RSTRING(str)->as.heap.aux.shared = add;
+ OBJ_WRITE(str, &RSTRING(str)->as.heap.aux.shared, add);
}
}
diff --git a/variable.c b/variable.c
index 2c7a0b5..5f06a9d 100644
--- a/variable.c
+++ b/variable.c
@@ -940,9 +940,11 @@ generic_ivar_set(VALUE obj, ID id, VALUE val)
tbl = st_init_numtable();
st_add_direct(generic_iv_tbl, (st_data_t)obj, (st_data_t)tbl);
st_add_direct(tbl, (st_data_t)id, (st_data_t)val);
+ if (FL_ABLE(obj)) OBJ_WRITTEN(obj, Qundef, val);
return;
}
st_insert((st_table *)data, (st_data_t)id, (st_data_t)val);
+ if (FL_ABLE(obj)) OBJ_WRITTEN(obj, data, val);
}
static VALUE
@@ -1181,12 +1183,13 @@ rb_ivar_set(VALUE obj, ID id, VALUE val)
ROBJECT(obj)->as.heap.iv_index_tbl = iv_index_tbl;
}
}
- ROBJECT_IVPTR(obj)[index] = val;
+ OBJ_WRITE(obj, &ROBJECT_IVPTR(obj)[index], val);
break;
case T_CLASS:
case T_MODULE:
if (!RCLASS_IV_TBL(obj)) RCLASS_IV_TBL(obj) = st_init_numtable();
st_insert(RCLASS_IV_TBL(obj), (st_data_t)id, val);
+ OBJ_WRITTEN(obj, Qundef, val);
break;
default:
generic:
diff --git a/vm_insnhelper.c b/vm_insnhelper.c
index 98ca16c..bd3620a 100644
--- a/vm_insnhelper.c
+++ b/vm_insnhelper.c
@@ -577,7 +577,7 @@ vm_setivar(VALUE obj, ID id, VALUE val, IC ic, rb_call_info_t *ci, int is_attr)
VALUE *ptr = ROBJECT_IVPTR(obj);
if (index < len) {
- ptr[index] = val;
+ OBJ_WRITE(obj, &ptr[index], val);
return val; /* inline cache hit */
}
}