summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--debug_counter.h2
-rw-r--r--gc.c7
-rw-r--r--imemo.c24
-rw-r--r--iseq.c16
-rw-r--r--vm.c4
-rw-r--r--vm_callinfo.h32
-rw-r--r--vm_eval.c4
-rw-r--r--vm_method.c2
8 files changed, 49 insertions, 42 deletions
diff --git a/debug_counter.h b/debug_counter.h
index 8ffce66f0f..c8d8ed8f11 100644
--- a/debug_counter.h
+++ b/debug_counter.h
@@ -49,7 +49,7 @@ RB_DEBUG_COUNTER(cc_temp) // dummy CC (stack-allocated)
RB_DEBUG_COUNTER(cc_found_in_ccs) // count for CC lookup success in CCS
RB_DEBUG_COUNTER(cc_not_found_in_ccs) // count for CC lookup success in CCS
-RB_DEBUG_COUNTER(cc_ent_invalidate) // count for invalidating cc (cc->klass = 0)
+RB_DEBUG_COUNTER(cc_ent_invalidate) // count for invalidating cc (cc->klass = Qundef)
RB_DEBUG_COUNTER(cc_cme_invalidate) // count for invalidating CME
RB_DEBUG_COUNTER(cc_invalidate_leaf) // count for invalidating klass if klass has no-subclasses
diff --git a/gc.c b/gc.c
index 499d0cda8d..d685a7a836 100644
--- a/gc.c
+++ b/gc.c
@@ -1209,6 +1209,7 @@ classext_free(rb_classext_t *ext, bool is_prime, VALUE namespace, void *arg)
rb_id_table_free(RCLASSEXT_M_TBL(ext));
rb_cc_tbl_free(RCLASSEXT_CC_TBL(ext), args->klass);
+
if (!RCLASSEXT_SHARED_CONST_TBL(ext) && (tbl = RCLASSEXT_CONST_TBL(ext)) != NULL) {
rb_free_const_table(tbl);
}
@@ -1743,7 +1744,7 @@ rb_objspace_free_objects(void *objspace)
int
rb_objspace_garbage_object_p(VALUE obj)
{
- return rb_gc_impl_garbage_object_p(rb_gc_get_objspace(), obj);
+ return !SPECIAL_CONST_P(obj) && rb_gc_impl_garbage_object_p(rb_gc_get_objspace(), obj);
}
bool
@@ -4924,11 +4925,11 @@ rb_raw_obj_info_buitin_type(char *const buff, const size_t buff_size, const VALU
case imemo_callcache:
{
const struct rb_callcache *cc = (const struct rb_callcache *)obj;
- VALUE class_path = cc->klass ? rb_class_path_cached(cc->klass) : Qnil;
+ VALUE class_path = vm_cc_valid(cc) ? rb_class_path_cached(cc->klass) : Qnil;
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
APPEND_F("(klass:%s cme:%s%s (%p) call:%p",
- NIL_P(class_path) ? (cc->klass ? "??" : "<NULL>") : RSTRING_PTR(class_path),
+ NIL_P(class_path) ? (vm_cc_valid(cc) ? "??" : "<NULL>") : RSTRING_PTR(class_path),
cme ? rb_id2name(cme->called_id) : "<NULL>",
cme ? (METHOD_ENTRY_INVALIDATED(cme) ? " [inv]" : "") : "",
(void *)cme,
diff --git a/imemo.c b/imemo.c
index 7153689030..985ab9aa5d 100644
--- a/imemo.c
+++ b/imemo.c
@@ -273,7 +273,7 @@ rb_imemo_memsize(VALUE obj)
static bool
moved_or_living_object_strictly_p(VALUE obj)
{
- return obj && (!rb_objspace_garbage_object_p(obj) || BUILTIN_TYPE(obj) == T_MOVED);
+ return !SPECIAL_CONST_P(obj) && (!rb_objspace_garbage_object_p(obj) || BUILTIN_TYPE(obj) == T_MOVED);
}
static void
@@ -353,25 +353,19 @@ rb_imemo_mark_and_move(VALUE obj, bool reference_updating)
*/
struct rb_callcache *cc = (struct rb_callcache *)obj;
if (reference_updating) {
- if (!cc->klass) {
- // already invalidated
+ if (moved_or_living_object_strictly_p((VALUE)cc->cme_)) {
+ *((VALUE *)&cc->klass) = rb_gc_location(cc->klass);
+ *((struct rb_callable_method_entry_struct **)&cc->cme_) =
+ (struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cc->cme_);
}
- else {
- if (moved_or_living_object_strictly_p(cc->klass) &&
- moved_or_living_object_strictly_p((VALUE)cc->cme_)) {
- *((VALUE *)&cc->klass) = rb_gc_location(cc->klass);
- *((struct rb_callable_method_entry_struct **)&cc->cme_) =
- (struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cc->cme_);
- }
- else {
- vm_cc_invalidate(cc);
- }
+ else if (vm_cc_valid(cc)) {
+ vm_cc_invalidate(cc);
}
}
else {
- if (cc->klass && (vm_cc_super_p(cc) || vm_cc_refinement_p(cc))) {
+ rb_gc_mark_weak((VALUE *)&cc->klass);
+ if ((vm_cc_super_p(cc) || vm_cc_refinement_p(cc))) {
rb_gc_mark_movable((VALUE)cc->cme_);
- rb_gc_mark_movable((VALUE)cc->klass);
}
}
diff --git a/iseq.c b/iseq.c
index c0523f61d7..4334bdd795 100644
--- a/iseq.c
+++ b/iseq.c
@@ -325,15 +325,13 @@ cc_is_active(const struct rb_callcache *cc, bool reference_updating)
cc = (const struct rb_callcache *)rb_gc_location((VALUE)cc);
}
- if (vm_cc_markable(cc)) {
- if (cc->klass) { // cc is not invalidated
- const struct rb_callable_method_entry_struct *cme = vm_cc_cme(cc);
- if (reference_updating) {
- cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cme);
- }
- if (!METHOD_ENTRY_INVALIDATED(cme)) {
- return true;
- }
+ if (vm_cc_markable(cc) && vm_cc_valid(cc)) {
+ const struct rb_callable_method_entry_struct *cme = vm_cc_cme(cc);
+ if (reference_updating) {
+ cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cme);
+ }
+ if (!METHOD_ENTRY_INVALIDATED(cme)) {
+ return true;
}
}
}
diff --git a/vm.c b/vm.c
index c46efafb0d..9284a2ce69 100644
--- a/vm.c
+++ b/vm.c
@@ -607,7 +607,7 @@ rb_serial_t ruby_vm_global_cvar_state = 1;
static const struct rb_callcache vm_empty_cc = {
.flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
- .klass = Qfalse,
+ .klass = Qundef,
.cme_ = NULL,
.call_ = vm_call_general,
.aux_ = {
@@ -617,7 +617,7 @@ static const struct rb_callcache vm_empty_cc = {
static const struct rb_callcache vm_empty_cc_for_super = {
.flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
- .klass = Qfalse,
+ .klass = Qundef,
.cme_ = NULL,
.call_ = vm_call_super_method,
.aux_ = {
diff --git a/vm_callinfo.h b/vm_callinfo.h
index 0ce25c2c0f..a8806bd4d7 100644
--- a/vm_callinfo.h
+++ b/vm_callinfo.h
@@ -279,9 +279,7 @@ struct rb_callcache {
const VALUE flags;
/* inline cache: key */
- const VALUE klass; // should not mark it because klass can not be free'd
- // because of this marking. When klass is collected,
- // cc will be cleared (cc->klass = 0) at vm_ccs_free().
+ const VALUE klass; // Weak reference. When klass is collected, `cc->klass = Qundef`.
/* inline cache: values */
const struct rb_callable_method_entry_struct * const cme_;
@@ -324,12 +322,20 @@ vm_cc_attr_index_initialize(const struct rb_callcache *cc, shape_id_t shape_id)
vm_cc_attr_index_set(cc, (attr_index_t)-1, shape_id);
}
+static inline VALUE
+cc_check_class(VALUE klass)
+{
+ VM_ASSERT(klass == Qundef || RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
+ return klass;
+}
+
static inline const struct rb_callcache *
vm_cc_new(VALUE klass,
const struct rb_callable_method_entry_struct *cme,
vm_call_handler call,
enum vm_cc_type type)
{
+ cc_check_class(klass);
struct rb_callcache *cc = IMEMO_NEW(struct rb_callcache, imemo_callcache, klass);
*((struct rb_callable_method_entry_struct **)&cc->cme_) = (struct rb_callable_method_entry_struct *)cme;
*((vm_call_handler *)&cc->call_) = call;
@@ -374,7 +380,7 @@ vm_cc_refinement_p(const struct rb_callcache *cc)
(imemo_callcache << FL_USHIFT) | \
VM_CALLCACHE_UNMARKABLE | \
VM_CALLCACHE_ON_STACK, \
- .klass = clazz, \
+ .klass = cc_check_class(clazz), \
.cme_ = cme, \
.call_ = call, \
.aux_ = aux, \
@@ -384,8 +390,7 @@ static inline bool
vm_cc_class_check(const struct rb_callcache *cc, VALUE klass)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
- VM_ASSERT(cc->klass == 0 ||
- RB_TYPE_P(cc->klass, T_CLASS) || RB_TYPE_P(cc->klass, T_ICLASS));
+ VM_ASSERT(cc_check_class(cc->klass));
return cc->klass == klass;
}
@@ -396,6 +401,15 @@ vm_cc_markable(const struct rb_callcache *cc)
return FL_TEST_RAW((VALUE)cc, VM_CALLCACHE_UNMARKABLE) == 0;
}
+static inline bool
+vm_cc_valid(const struct rb_callcache *cc)
+{
+ VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
+ VM_ASSERT(cc_check_class(cc->klass));
+
+ return !UNDEF_P(cc->klass);
+}
+
static inline const struct rb_callable_method_entry_struct *
vm_cc_cme(const struct rb_callcache *cc)
{
@@ -447,7 +461,7 @@ vm_cc_cmethod_missing_reason(const struct rb_callcache *cc)
static inline bool
vm_cc_invalidated_p(const struct rb_callcache *cc)
{
- if (cc->klass && !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc))) {
+ if (vm_cc_valid(cc) && !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc))) {
return false;
}
else {
@@ -543,9 +557,9 @@ vm_cc_invalidate(const struct rb_callcache *cc)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc != vm_cc_empty());
- VM_ASSERT(cc->klass != 0); // should be enable
+ VM_ASSERT(cc->klass != Qundef); // should be enable
- *(VALUE *)&cc->klass = 0;
+ *(VALUE *)&cc->klass = Qundef;
RB_DEBUG_COUNTER_INC(cc_ent_invalidate);
}
diff --git a/vm_eval.c b/vm_eval.c
index dff0e47c7e..68b692ac9c 100644
--- a/vm_eval.c
+++ b/vm_eval.c
@@ -57,7 +57,7 @@ static inline VALUE vm_call0_cc(rb_execution_context_t *ec, VALUE recv, ID id, i
VALUE
rb_vm_call0(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE *argv, const rb_callable_method_entry_t *cme, int kw_splat)
{
- const struct rb_callcache cc = VM_CC_ON_STACK(Qfalse, vm_call_general, {{ 0 }}, cme);
+ const struct rb_callcache cc = VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
return vm_call0_cc(ec, recv, id, argc, argv, &cc, kw_splat);
}
@@ -104,7 +104,7 @@ vm_call0_cc(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE
static VALUE
vm_call0_cme(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv, const rb_callable_method_entry_t *cme)
{
- calling->cc = &VM_CC_ON_STACK(Qfalse, vm_call_general, {{ 0 }}, cme);
+ calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
return vm_call0_body(ec, calling, argv);
}
diff --git a/vm_method.c b/vm_method.c
index faf327b36c..327fcbafdd 100644
--- a/vm_method.c
+++ b/vm_method.c
@@ -409,7 +409,7 @@ invalidate_cc_refinement(st_data_t key, st_data_t data)
VM_ASSERT(vm_cc_refinement_p(cc));
- if (cc->klass) {
+ if (vm_cc_valid(cc)) {
vm_cc_invalidate(cc);
}
}