summaryrefslogtreecommitdiff
path: root/gc.c
diff options
context:
space:
mode:
Diffstat (limited to 'gc.c')
-rw-r--r--gc.c216
1 files changed, 109 insertions, 107 deletions
diff --git a/gc.c b/gc.c
index 21c719f7ba..ad5969c691 100644
--- a/gc.c
+++ b/gc.c
@@ -20,7 +20,7 @@
#include <setjmp.h>
void re_free_registers _((struct re_registers*));
-void io_fptr_finalize _((struct OpenFile*));
+void rb_io_fptr_finalize _((struct OpenFile*));
#ifndef setjmp
#ifdef HAVE__SETJMP
@@ -54,19 +54,19 @@ xmalloc(size)
void *mem;
if (size < 0) {
- ArgError("negative allocation size (or too big)");
+ rb_raise(rb_eArgError, "negative allocation size (or too big)");
}
if (size == 0) size = 1;
malloc_memories += size;
if (malloc_memories > GC_MALLOC_LIMIT && alloc_objects > GC_NEWOBJ_LIMIT) {
- gc_gc();
+ rb_gc();
}
mem = malloc(size);
if (!mem) {
- gc_gc();
+ rb_gc();
mem = malloc(size);
if (!mem)
- Fatal("failed to allocate memory");
+ rb_fatal("failed to allocate memory");
}
return mem;
@@ -92,16 +92,16 @@ xrealloc(ptr, size)
void *mem;
if (size < 0) {
- ArgError("negative re-allocation size");
+ rb_raise(rb_eArgError, "negative re-allocation size");
}
if (!ptr) return xmalloc(size);
malloc_memories += size;
mem = realloc(ptr, size);
if (!mem) {
- gc_gc();
+ rb_gc();
mem = realloc(ptr, size);
if (!mem)
- Fatal("failed to allocate memory(realloc)");
+ rb_fatal("failed to allocate memory(realloc)");
}
return mem;
@@ -140,25 +140,25 @@ Cambridge, MA 02138
extern int rb_in_compile;
static int dont_gc;
-VALUE
-gc_s_enable()
+static VALUE
+gc_enable()
{
int old = dont_gc;
- dont_gc = FALSE;
+ dont_gc = Qfalse;
return old;
}
-VALUE
-gc_s_disable()
+static VALUE
+gc_disable()
{
int old = dont_gc;
- dont_gc = TRUE;
+ dont_gc = Qtrue;
return old;
}
-VALUE mGC;
+VALUE rb_mGC;
static struct gc_list {
VALUE *varptr;
@@ -225,11 +225,11 @@ add_heap()
heaps = (heaps_used>0)?
(RVALUE**)realloc(heaps, heaps_length*sizeof(RVALUE)):
(RVALUE**)malloc(heaps_length*sizeof(RVALUE));
- if (heaps == 0) Fatal("can't alloc memory");
+ if (heaps == 0) rb_fatal("can't alloc memory");
}
p = heaps[heaps_used++] = (RVALUE*)malloc(sizeof(RVALUE)*HEAP_SLOTS);
- if (p == 0) Fatal("can't alloc memory");
+ if (p == 0) rb_fatal("add_heap: can't alloc memory");
pend = p + HEAP_SLOTS;
if (lomem == 0 || lomem > p) lomem = p;
if (himem < pend) himem = pend;
@@ -255,14 +255,14 @@ rb_newobj()
alloc_objects++;
return obj;
}
- if (dont_gc || prohibit_interrupt) add_heap();
- else gc_gc();
+ if (dont_gc || rb_prohibit_interrupt) add_heap();
+ else rb_gc();
goto retry;
}
VALUE
-data_object_alloc(klass, datap, dmark, dfree)
+rb_data_object_alloc(klass, datap, dmark, dfree)
VALUE klass;
void *datap;
void (*dfree)();
@@ -278,7 +278,7 @@ data_object_alloc(klass, datap, dmark, dfree)
}
extern st_table *rb_class_tbl;
-VALUE *gc_stack_start;
+VALUE *rb_gc_stack_start;
static int
looks_pointerp(ptr)
@@ -288,16 +288,16 @@ looks_pointerp(ptr)
register RVALUE *heap_org;
register long i;
- if (p < lomem || p > himem) return FALSE;
+ if (p < lomem || p > himem) return Qfalse;
/* check if p looks like a pointer */
for (i=0; i < heaps_used; i++) {
heap_org = heaps[i];
if (heap_org <= p && p < heap_org + HEAP_SLOTS
&& ((((char*)p)-((char*)heap_org))%sizeof(RVALUE)) == 0)
- return TRUE;
+ return Qtrue;
}
- return FALSE;
+ return Qfalse;
}
static void
@@ -307,14 +307,14 @@ mark_locations_array(x, n)
{
while (n--) {
if (looks_pointerp(*x)) {
- gc_mark(*x);
+ rb_gc_mark(*x);
}
x++;
}
}
void
-gc_mark_locations(start, end)
+rb_gc_mark_locations(start, end)
VALUE *start, *end;
{
VALUE *tmp;
@@ -334,7 +334,7 @@ mark_entry(key, value)
ID key;
VALUE value;
{
- gc_mark(value);
+ rb_gc_mark(value);
return ST_CONTINUE;
}
@@ -351,8 +351,8 @@ mark_hashentry(key, value)
ID key;
VALUE value;
{
- gc_mark(key);
- gc_mark(value);
+ rb_gc_mark(key);
+ rb_gc_mark(value);
return ST_CONTINUE;
}
@@ -365,16 +365,16 @@ mark_hash(tbl)
}
void
-gc_mark_maybe(obj)
+rb_gc_mark_maybe(obj)
void *obj;
{
if (looks_pointerp(obj)) {
- gc_mark(obj);
+ rb_gc_mark(obj);
}
}
void
-gc_mark(ptr)
+rb_gc_mark(ptr)
void *ptr;
{
register RVALUE *obj = RANY(ptr);
@@ -390,7 +390,7 @@ gc_mark(ptr)
switch (obj->as.basic.flags & T_MASK) {
case T_NIL:
case T_FIXNUM:
- Bug("gc_mark() called for broken object");
+ rb_bug("rb_gc_mark() called for broken object");
break;
case T_NODE:
@@ -399,7 +399,7 @@ gc_mark(ptr)
case NODE_FOR:
case NODE_ITER:
case NODE_CREF:
- gc_mark(obj->as.node.u2.node);
+ rb_gc_mark(obj->as.node.u2.node);
/* fall through */
case NODE_BLOCK: /* 1,3 */
case NODE_ARRAY:
@@ -413,7 +413,7 @@ gc_mark(ptr)
#ifdef C_ALLOCA
case NODE_ALLOCA:
#endif
- gc_mark(obj->as.node.u1.node);
+ rb_gc_mark(obj->as.node.u1.node);
/* fall through */
case NODE_SUPER: /* 3 */
case NODE_FCALL:
@@ -425,7 +425,7 @@ gc_mark(ptr)
case NODE_UNTIL:
case NODE_MATCH2:
case NODE_MATCH3:
- gc_mark(obj->as.node.u1.node);
+ rb_gc_mark(obj->as.node.u1.node);
/* fall through */
case NODE_METHOD: /* 2 */
case NODE_NOT:
@@ -447,7 +447,7 @@ gc_mark(ptr)
goto Top;
case NODE_SCOPE: /* 2,3 */
- gc_mark(obj->as.node.u3.node);
+ rb_gc_mark(obj->as.node.u3.node);
obj = RANY(obj->as.node.u2.node);
goto Top;
@@ -471,10 +471,10 @@ gc_mark(ptr)
default:
if (looks_pointerp(obj->as.node.u1.node)) {
- gc_mark(obj->as.node.u1.node);
+ rb_gc_mark(obj->as.node.u1.node);
}
if (looks_pointerp(obj->as.node.u2.node)) {
- gc_mark(obj->as.node.u2.node);
+ rb_gc_mark(obj->as.node.u2.node);
}
if (looks_pointerp(obj->as.node.u3.node)) {
obj = RANY(obj->as.node.u3.node);
@@ -484,17 +484,17 @@ gc_mark(ptr)
return; /* no need to mark class. */
}
- gc_mark(obj->as.basic.klass);
+ rb_gc_mark(obj->as.basic.klass);
switch (obj->as.basic.flags & T_MASK) {
case T_ICLASS:
- gc_mark(obj->as.klass.super);
+ rb_gc_mark(obj->as.klass.super);
mark_tbl(obj->as.klass.iv_tbl);
mark_tbl(obj->as.klass.m_tbl);
break;
case T_CLASS:
case T_MODULE:
- gc_mark(obj->as.klass.super);
+ rb_gc_mark(obj->as.klass.super);
mark_tbl(obj->as.klass.m_tbl);
mark_tbl(obj->as.klass.iv_tbl);
break;
@@ -505,12 +505,13 @@ gc_mark(ptr)
VALUE *ptr = obj->as.array.ptr;
for (i=0; i < len; i++)
- gc_mark(*ptr++);
+ rb_gc_mark(*ptr++);
}
break;
case T_HASH:
mark_hash(obj->as.hash.tbl);
+ rb_gc_mark(obj->as.hash.ifnone);
break;
case T_STRING:
@@ -542,7 +543,7 @@ gc_mark(ptr)
break;
case T_VARMAP:
- gc_mark(obj->as.varmap.val);
+ rb_gc_mark(obj->as.varmap.val);
obj = RANY(obj->as.varmap.next);
goto Top;
break;
@@ -553,7 +554,7 @@ gc_mark(ptr)
VALUE *vars = &obj->as.scope.local_vars[-1];
while (n--) {
- gc_mark_maybe(*vars);
+ rb_gc_mark(*vars);
vars++;
}
}
@@ -565,14 +566,14 @@ gc_mark(ptr)
VALUE *ptr = obj->as.rstruct.ptr;
for (i=0; i < len; i++)
- gc_mark(*ptr++);
+ rb_gc_mark(*ptr++);
}
break;
default:
- Bug("gc_mark(): unknown data type 0x%x(0x%x) %s",
- obj->as.basic.flags & T_MASK, obj,
- looks_pointerp(obj)?"corrupted object":"non object");
+ rb_bug("rb_gc_mark(): unknown data type 0x%x(0x%x) %s",
+ obj->as.basic.flags & T_MASK, obj,
+ looks_pointerp(obj)?"corrupted object":"non object");
}
}
@@ -588,11 +589,12 @@ gc_sweep()
int i;
if (rb_in_compile) {
+ /* sould not reclaim nodes during compilation */
for (i = 0; i < heaps_used; i++) {
p = heaps[i]; pend = p + HEAP_SLOTS;
while (p < pend) {
if (!(p->as.basic.flags&FL_MARK) && BUILTIN_TYPE(p) == T_NODE)
- gc_mark(p);
+ rb_gc_mark(p);
p++;
}
}
@@ -628,7 +630,7 @@ gc_sweep()
}
void
-gc_force_recycle(p)
+rb_gc_force_recycle(p)
VALUE p;
{
RANY(p)->as.free.flag = 0;
@@ -647,11 +649,11 @@ obj_free(obj)
case T_FIXNUM:
case T_TRUE:
case T_FALSE:
- Bug("obj_free() called for broken object");
+ rb_bug("obj_free() called for broken object");
break;
}
- if (need_call_final) {
+ if (need_call_final && FL_TEST(obj, FL_FINALIZE)) {
run_final(obj);
}
switch (RANY(obj)->as.basic.flags & T_MASK) {
@@ -705,7 +707,7 @@ obj_free(obj)
break;
case T_FILE:
if (RANY(obj)->as.file.fptr) {
- io_fptr_finalize(RANY(obj)->as.file.fptr);
+ rb_io_fptr_finalize(RANY(obj)->as.file.fptr);
free(RANY(obj)->as.file.fptr);
}
break;
@@ -742,22 +744,23 @@ obj_free(obj)
break;
default:
- Bug("gc_sweep(): unknown data type %d", RANY(obj)->as.basic.flags & T_MASK);
+ rb_bug("gc_sweep(): unknown data type %d",
+ RANY(obj)->as.basic.flags & T_MASK);
}
}
void
-gc_mark_frame(frame)
+rb_gc_mark_frame(frame)
struct FRAME *frame;
{
int n = frame->argc;
VALUE *tbl = frame->argv;
while (n--) {
- gc_mark_maybe(*tbl);
+ rb_gc_mark_maybe(*tbl);
tbl++;
}
- gc_mark(frame->cbase);
+ rb_gc_mark(frame->cbase);
}
#ifdef __GNUC__
@@ -796,7 +799,7 @@ int rb_setjmp (rb_jmp_buf);
#endif /* __GNUC__ */
void
-gc_gc()
+rb_gc()
{
struct gc_list *list;
struct FRAME *frame;
@@ -812,52 +815,52 @@ gc_gc()
#endif
/* mark frame stack */
- for (frame = the_frame; frame; frame = frame->prev) {
- gc_mark_frame(frame);
+ for (frame = ruby_frame; frame; frame = frame->prev) {
+ rb_gc_mark_frame(frame);
}
- gc_mark(the_scope);
- gc_mark(the_dyna_vars);
+ rb_gc_mark(ruby_scope);
+ rb_gc_mark(ruby_dyna_vars);
FLUSH_REGISTER_WINDOWS;
/* This assumes that all registers are saved into the jmp_buf */
setjmp(save_regs_gc_mark);
mark_locations_array((VALUE*)&save_regs_gc_mark, sizeof(save_regs_gc_mark) / sizeof(VALUE *));
- gc_mark_locations(gc_stack_start, (VALUE*)&stack_end);
+ rb_gc_mark_locations(rb_gc_stack_start, (VALUE*)&stack_end);
#if defined(THINK_C) || defined(__human68k__)
#ifndef __human68k__
mark_locations_array((VALUE*)((char*)save_regs_gc_mark+2),
sizeof(save_regs_gc_mark) / sizeof(VALUE *));
#endif
- gc_mark_locations((VALUE*)((char*)gc_stack_start + 2),
+ rb_gc_mark_locations((VALUE*)((char*)rb_gc_stack_start + 2),
(VALUE*)((char*)&stack_end + 2));
#endif
#ifdef THREAD
- gc_mark_threads();
+ rb_gc_mark_threads();
#endif
/* mark protected global variables */
for (list = Global_List; list; list = list->next) {
- gc_mark(*list->varptr);
+ rb_gc_mark(*list->varptr);
}
- gc_mark_global_tbl();
+ rb_gc_mark_global_tbl();
mark_tbl(rb_class_tbl);
- gc_mark_trap_list();
+ rb_gc_mark_trap_list();
gc_sweep();
dont_gc--;
}
static VALUE
-gc_method()
+gc_start()
{
- gc_gc();
+ rb_gc();
return Qnil;
}
void
-init_stack()
+Init_stack()
{
#ifdef __human68k__
extern void *_SEND;
@@ -865,14 +868,14 @@ init_stack()
#else
VALUE start;
- gc_stack_start = &start;
+ rb_gc_stack_start = &start;
#endif
}
void
-init_heap()
+Init_heap()
{
- init_stack();
+ Init_stack();
add_heap();
}
@@ -929,7 +932,7 @@ os_obj_of(of)
case T_CLASS:
if (FL_TEST(p, FL_SINGLETON)) continue;
default:
- if (obj_is_kind_of((VALUE)p, of)) {
+ if (rb_obj_is_kind_of((VALUE)p, of)) {
rb_yield((VALUE)p);
n++;
}
@@ -962,11 +965,11 @@ static VALUE
add_final(os, proc)
VALUE os, proc;
{
- if (!obj_is_kind_of(proc, cProc)) {
- ArgError("wrong type argument %s (Proc required)",
- rb_class2name(CLASS_OF(proc)));
+ if (!rb_obj_is_kind_of(proc, rb_cProc)) {
+ rb_raise(rb_eArgError, "wrong type argument %s (Proc required)",
+ rb_class2name(CLASS_OF(proc)));
}
- ary_push(finalizers, proc);
+ rb_ary_push(finalizers, proc);
return proc;
}
@@ -974,7 +977,7 @@ static VALUE
rm_final(os, proc)
VALUE os, proc;
{
- ary_delete(finalizers, proc);
+ rb_ary_delete(finalizers, proc);
return proc;
}
@@ -999,16 +1002,14 @@ run_final(obj)
{
int i;
- if (!FL_TEST(obj, FL_FINALIZE)) return;
-
obj = INT2NUM((long)obj); /* make obj into id */
for (i=0; i<RARRAY(finalizers)->len; i++) {
- rb_eval_cmd(RARRAY(finalizers)->ptr[i], ary_new3(1,obj));
+ rb_eval_cmd(RARRAY(finalizers)->ptr[i], rb_ary_new3(1,obj));
}
}
void
-gc_call_finalizer_at_exit()
+rb_gc_call_finalizer_at_exit()
{
RVALUE *p, *pend;
int i;
@@ -1017,7 +1018,8 @@ gc_call_finalizer_at_exit()
for (i = 0; i < heaps_used; i++) {
p = heaps[i]; pend = p + HEAP_SLOTS;
while (p < pend) {
- run_final((VALUE)p);
+ if (FL_TEST(p, FL_FINALIZE))
+ run_final((VALUE)p);
p++;
}
}
@@ -1041,15 +1043,15 @@ id2ref(obj, id)
unsigned long ptr = NUM2UINT(id);
if (FIXNUM_P(ptr)) return (VALUE)ptr;
- if (ptr == TRUE) return TRUE;
- if (ptr == FALSE) return FALSE;
+ if (ptr == Qtrue) return Qtrue;
+ if (ptr == Qfalse) return Qfalse;
if (ptr == Qnil) return Qnil;
if (!looks_pointerp(ptr)) {
- IndexError("0x%x is not the id value", ptr);
+ rb_raise(rb_eIndexError, "0x%x is not the id value", ptr);
}
if (RANY(ptr)->as.free.flag == 0) {
- IndexError("0x%x is recycled object", ptr);
+ rb_raise(rb_eIndexError, "0x%x is recycled object", ptr);
}
return (VALUE)ptr;
}
@@ -1057,23 +1059,23 @@ id2ref(obj, id)
void
Init_GC()
{
- VALUE mObSpace;
-
- mGC = rb_define_module("GC");
- rb_define_singleton_method(mGC, "start", gc_method, 0);
- rb_define_singleton_method(mGC, "enable", gc_s_enable, 0);
- rb_define_singleton_method(mGC, "disable", gc_s_disable, 0);
- rb_define_method(mGC, "garbage_collect", gc_method, 0);
-
- mObSpace = rb_define_module("ObjectSpace");
- rb_define_module_function(mObSpace, "each_object", os_each_obj, -1);
- rb_define_module_function(mObSpace, "garbage_collect", gc_method, 0);
- rb_define_module_function(mObSpace, "add_finalizer", add_final, 1);
- rb_define_module_function(mObSpace, "remove_finalizer", rm_final, 1);
- rb_define_module_function(mObSpace, "finalizers", finals, 0);
- rb_define_module_function(mObSpace, "call_finalizer", call_final, 1);
- rb_define_module_function(mObSpace, "_id2ref", id2ref, 1);
+ VALUE rb_mObSpace;
+
+ rb_mGC = rb_define_module("GC");
+ rb_define_singleton_method(rb_mGC, "start", gc_start, 0);
+ rb_define_singleton_method(rb_mGC, "enable", gc_enable, 0);
+ rb_define_singleton_method(rb_mGC, "disable", gc_disable, 0);
+ rb_define_method(rb_mGC, "garbage_collect", gc_start, 0);
+
+ rb_mObSpace = rb_define_module("ObjectSpace");
+ rb_define_module_function(rb_mObSpace, "each_object", os_each_obj, -1);
+ rb_define_module_function(rb_mObSpace, "garbage_collect", gc_start, 0);
+ rb_define_module_function(rb_mObSpace, "add_finalizer", add_final, 1);
+ rb_define_module_function(rb_mObSpace, "remove_finalizer", rm_final, 1);
+ rb_define_module_function(rb_mObSpace, "finalizers", finals, 0);
+ rb_define_module_function(rb_mObSpace, "call_finalizer", call_final, 1);
+ rb_define_module_function(rb_mObSpace, "_id2ref", id2ref, 1);
rb_global_variable(&finalizers);
- finalizers = ary_new();
+ finalizers = rb_ary_new();
}