summaryrefslogtreecommitdiff
path: root/vm_method.c
diff options
context:
space:
mode:
Diffstat (limited to 'vm_method.c')
-rw-r--r--vm_method.c1196
1 files changed, 885 insertions, 311 deletions
diff --git a/vm_method.c b/vm_method.c
index 802e327da2..dcf35527f7 100644
--- a/vm_method.c
+++ b/vm_method.c
@@ -4,7 +4,6 @@
#include "id_table.h"
#include "yjit.h"
-#include "mjit.h"
#define METHOD_DEBUG 0
@@ -19,12 +18,204 @@ static inline rb_method_entry_t *lookup_method_table(VALUE klass, ID id);
#define singleton_removed idSingleton_method_removed
#define undefined idMethod_undefined
#define singleton_undefined idSingleton_method_undefined
-#define attached id__attached__
#define ruby_running (GET_VM()->running)
/* int ruby_running = 0; */
static enum rb_id_table_iterator_result
+mark_cc_entry_i(VALUE ccs_ptr, void *data)
+{
+ struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
+
+ VM_ASSERT(vm_ccs_p(ccs));
+
+ if (METHOD_ENTRY_INVALIDATED(ccs->cme)) {
+ /* Before detaching the CCs from this class, we need to invalidate the cc
+ * since we will no longer be marking the cme on their behalf.
+ */
+ for (int i = 0; i < ccs->len; i++) {
+ const struct rb_callcache *cc = ccs->entries[i].cc;
+ if (cc->klass == Qundef) continue; // already invalidated
+ VM_ASSERT(cc->klass == Qundef || vm_cc_check_cme(cc, ccs->cme));
+ VM_ASSERT(!vm_cc_super_p(cc) && !vm_cc_refinement_p(cc));
+ vm_cc_invalidate(cc);
+ }
+ ruby_xfree(ccs);
+ return ID_TABLE_DELETE;
+ }
+ else {
+ rb_gc_mark_movable((VALUE)ccs->cme);
+
+ for (int i = 0; i < ccs->len; i++) {
+ const struct rb_callcache *cc = ccs->entries[i].cc;
+ VM_ASSERT(cc->klass == Qundef || vm_cc_check_cme(cc, ccs->cme));
+
+ rb_gc_mark_movable((VALUE)cc);
+ }
+ return ID_TABLE_CONTINUE;
+ }
+}
+
+static void
+vm_cc_table_mark(void *data)
+{
+ struct rb_id_table *tbl = (struct rb_id_table *)data;
+ if (tbl) {
+ rb_id_table_foreach_values(tbl, mark_cc_entry_i, NULL);
+ }
+}
+
+static enum rb_id_table_iterator_result
+cc_table_free_i(VALUE ccs_ptr, void *data)
+{
+ struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
+ VM_ASSERT(vm_ccs_p(ccs));
+
+ ruby_xfree(ccs);
+
+ return ID_TABLE_CONTINUE;
+}
+
+static void
+vm_cc_table_free(void *data)
+{
+ struct rb_id_table *tbl = (struct rb_id_table *)data;
+
+ rb_id_table_foreach_values(tbl, cc_table_free_i, NULL);
+ rb_managed_id_table_type.function.dfree(data);
+}
+
+static enum rb_id_table_iterator_result
+cc_table_memsize_i(VALUE ccs_ptr, void *data_ptr)
+{
+ size_t *total_size = data_ptr;
+ struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
+ *total_size += sizeof(*ccs);
+ *total_size += sizeof(ccs->entries[0]) * ccs->capa;
+ return ID_TABLE_CONTINUE;
+}
+
+static size_t
+vm_cc_table_memsize(const void *data)
+{
+ size_t memsize = rb_managed_id_table_type.function.dsize(data);
+ struct rb_id_table *tbl = (struct rb_id_table *)data;
+ rb_id_table_foreach_values(tbl, cc_table_memsize_i, &memsize);
+ return memsize;
+}
+
+static enum rb_id_table_iterator_result
+compact_cc_entry_i(VALUE ccs_ptr, void *data)
+{
+ struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
+
+ ccs->cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)ccs->cme);
+ VM_ASSERT(vm_ccs_p(ccs));
+
+ for (int i=0; i<ccs->len; i++) {
+ ccs->entries[i].cc = (const struct rb_callcache *)rb_gc_location((VALUE)ccs->entries[i].cc);
+ }
+
+ return ID_TABLE_CONTINUE;
+}
+
+static void
+vm_cc_table_compact(void *data)
+{
+ struct rb_id_table *tbl = (struct rb_id_table *)data;
+ rb_id_table_foreach_values(tbl, compact_cc_entry_i, NULL);
+}
+
+static const rb_data_type_t cc_table_type = {
+ .wrap_struct_name = "VM/cc_table",
+ .function = {
+ .dmark = vm_cc_table_mark,
+ .dfree = vm_cc_table_free,
+ .dsize = vm_cc_table_memsize,
+ .dcompact = vm_cc_table_compact,
+ },
+ .parent = &rb_managed_id_table_type,
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE,
+};
+
+VALUE
+rb_vm_cc_table_create(size_t capa)
+{
+ return rb_managed_id_table_create(&cc_table_type, capa);
+}
+
+static enum rb_id_table_iterator_result
+vm_cc_table_dup_i(ID key, VALUE old_ccs_ptr, void *data)
+{
+ VALUE new_table = (VALUE)data;
+ struct rb_class_cc_entries *old_ccs = (struct rb_class_cc_entries *)old_ccs_ptr;
+
+ if (METHOD_ENTRY_INVALIDATED(old_ccs->cme)) {
+ // Invalidated CME. This entry will be removed from the old table on
+ // the next GC mark, so it's unsafe (and undesirable) to copy
+ return ID_TABLE_CONTINUE;
+ }
+
+ size_t memsize = vm_ccs_alloc_size(old_ccs->capa);
+ struct rb_class_cc_entries *new_ccs = ruby_xcalloc(1, memsize);
+ rb_managed_id_table_insert(new_table, key, (VALUE)new_ccs);
+
+ // We hold the VM lock, so invalidation should not have happened between
+ // our earlier invalidation check and now.
+ VM_ASSERT(!METHOD_ENTRY_INVALIDATED(old_ccs->cme));
+
+ memcpy(new_ccs, old_ccs, memsize);
+
+#if VM_CHECK_MODE > 0
+ new_ccs->debug_sig = ~(VALUE)new_ccs;
+#endif
+
+ RB_OBJ_WRITTEN(new_table, Qundef, (VALUE)new_ccs->cme);
+ for (int index = 0; index < new_ccs->len; index++) {
+ RB_OBJ_WRITTEN(new_table, Qundef, new_ccs->entries[index].cc);
+ }
+ return ID_TABLE_CONTINUE;
+}
+
+VALUE
+rb_vm_cc_table_dup(VALUE old_table)
+{
+ ASSERT_vm_locking();
+ VALUE new_table = rb_vm_cc_table_create(rb_managed_id_table_size(old_table));
+ rb_managed_id_table_foreach(old_table, vm_cc_table_dup_i, (void *)new_table);
+ return new_table;
+}
+
+static void
+vm_ccs_invalidate(struct rb_class_cc_entries *ccs)
+{
+ for (int i=0; i<ccs->len; i++) {
+ const struct rb_callcache *cc = ccs->entries[i].cc;
+ VM_ASSERT(!vm_cc_super_p(cc) && !vm_cc_refinement_p(cc));
+ vm_cc_invalidate(cc);
+ }
+}
+
+static void
+rb_vm_ccs_invalidate_and_free(struct rb_class_cc_entries *ccs)
+{
+ RB_DEBUG_COUNTER_INC(ccs_free);
+ vm_ccs_invalidate(ccs);
+ ruby_xfree(ccs);
+}
+
+void
+rb_vm_cc_table_delete(VALUE table, ID mid)
+{
+ VALUE ccs_obj;
+ if (rb_managed_id_table_lookup(table, mid, &ccs_obj)) {
+ struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_obj;
+ rb_managed_id_table_delete(table, mid);
+ rb_vm_ccs_invalidate_and_free(ccs);
+ }
+}
+
+static enum rb_id_table_iterator_result
vm_ccs_dump_i(ID mid, VALUE val, void *data)
{
const struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)val;
@@ -32,7 +223,6 @@ vm_ccs_dump_i(ID mid, VALUE val, void *data)
rp(ccs->cme);
for (int i=0; i<ccs->len; i++) {
- fprintf(stderr, " | [%d]\t", i); vm_ci_dump(ccs->entries[i].ci);
rp_m( " | \t", ccs->entries[i].cc);
}
@@ -42,18 +232,18 @@ vm_ccs_dump_i(ID mid, VALUE val, void *data)
static void
vm_ccs_dump(VALUE klass, ID target_mid)
{
- struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
+ VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
if (cc_tbl) {
VALUE ccs;
if (target_mid) {
- if (rb_id_table_lookup(cc_tbl, target_mid, &ccs)) {
+ if (rb_managed_id_table_lookup(cc_tbl, target_mid, &ccs)) {
fprintf(stderr, " [CCTB] %p\n", (void *)cc_tbl);
vm_ccs_dump_i(target_mid, ccs, NULL);
}
}
else {
fprintf(stderr, " [CCTB] %p\n", (void *)cc_tbl);
- rb_id_table_foreach(cc_tbl, vm_ccs_dump_i, (void *)target_mid);
+ rb_managed_id_table_foreach(cc_tbl, vm_ccs_dump_i, (void *)target_mid);
}
}
}
@@ -90,18 +280,18 @@ vm_mtbl_dump(VALUE klass, ID target_mid)
else {
fprintf(stderr, " MTBL: NULL\n");
}
- if (RCLASS_CALLABLE_M_TBL(klass)) {
+ if (RCLASS_WRITABLE_CALLABLE_M_TBL(klass)) {
if (target_mid != 0) {
- if (rb_id_table_lookup(RCLASS_CALLABLE_M_TBL(klass), target_mid, &me)) {
+ if (rb_id_table_lookup(RCLASS_WRITABLE_CALLABLE_M_TBL(klass), target_mid, &me)) {
rp_m(" [CM**] ", me);
}
}
else {
fprintf(stderr, " ## RCLASS_CALLABLE_M_TBL\n");
- rb_id_table_foreach(RCLASS_CALLABLE_M_TBL(klass), vm_cme_dump_i, NULL);
+ rb_id_table_foreach(RCLASS_WRITABLE_CALLABLE_M_TBL(klass), vm_cme_dump_i, NULL);
}
}
- if (RCLASS_CC_TBL(klass)) {
+ if (RCLASS_WRITABLE_CC_TBL(klass)) {
vm_ccs_dump(klass, target_mid);
}
klass = RCLASS_SUPER(klass);
@@ -119,25 +309,22 @@ rb_vm_mtbl_dump(const char *msg, VALUE klass, ID target_mid)
static inline void
vm_cme_invalidate(rb_callable_method_entry_t *cme)
{
- VM_ASSERT(IMEMO_TYPE_P(cme, imemo_ment));
+ VM_ASSERT(IMEMO_TYPE_P(cme, imemo_ment), "cme: %d", imemo_type((VALUE)cme));
VM_ASSERT(callable_method_entry_p(cme));
METHOD_ENTRY_INVALIDATED_SET(cme);
RB_DEBUG_COUNTER_INC(cc_cme_invalidate);
rb_yjit_cme_invalidate(cme);
- rb_mjit_cme_invalidate(cme);
+ rb_zjit_cme_invalidate(cme);
}
static int
-rb_clear_constant_cache_for_id_i(st_data_t ic, st_data_t idx, st_data_t arg)
+rb_clear_constant_cache_for_id_i(st_data_t ic, st_data_t arg)
{
((IC) ic)->entry = NULL;
return ST_CONTINUE;
}
-// Here for backward compat.
-void rb_clear_constant_cache(void) {}
-
void
rb_clear_constant_cache_for_id(ID id)
{
@@ -145,13 +332,13 @@ rb_clear_constant_cache_for_id(ID id)
rb_vm_t *vm = GET_VM();
if (rb_id_table_lookup(vm->constant_cache, id, &lookup_result)) {
- st_table *ics = (st_table *)lookup_result;
- st_foreach(ics, rb_clear_constant_cache_for_id_i, (st_data_t) NULL);
+ set_table *ics = (set_table *)lookup_result;
+ set_table_foreach(ics, rb_clear_constant_cache_for_id_i, (st_data_t) NULL);
ruby_vm_constant_cache_invalidations += ics->num_entries;
}
rb_yjit_constant_state_changed(id);
- rb_mjit_constant_state_changed(id);
+ rb_zjit_constant_state_changed(id);
}
static void
@@ -167,110 +354,180 @@ invalidate_negative_cache(ID mid)
}
}
-static rb_method_entry_t *rb_method_entry_alloc(ID called_id, VALUE owner, VALUE defined_class, const rb_method_definition_t *def);
const rb_method_entry_t * rb_method_entry_clone(const rb_method_entry_t *src_me);
static const rb_callable_method_entry_t *complemented_callable_method_entry(VALUE klass, ID id);
static const rb_callable_method_entry_t *lookup_overloaded_cme(const rb_callable_method_entry_t *cme);
+static void
+invalidate_method_cache_in_cc_table(VALUE tbl, ID mid)
+{
+ VALUE ccs_data;
+ if (tbl && rb_managed_id_table_lookup(tbl, mid, &ccs_data)) {
+ struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
+ rb_yjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
+ rb_zjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
+ if (NIL_P(ccs->cme->owner)) invalidate_negative_cache(mid);
+ rb_vm_ccs_invalidate_and_free(ccs);
+ rb_managed_id_table_delete(tbl, mid);
+ RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_ccs);
+ }
+}
+
+static void
+invalidate_callable_method_entry_in_callable_m_table(struct rb_id_table *tbl, ID mid)
+{
+ VALUE cme;
+ if (tbl && rb_id_table_lookup(tbl, mid, &cme)) {
+ rb_yjit_cme_invalidate((rb_callable_method_entry_t *)cme);
+ rb_zjit_cme_invalidate((rb_callable_method_entry_t *)cme);
+ rb_id_table_delete(tbl, mid);
+ RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_callable);
+ }
+}
+
+struct invalidate_callable_method_entry_foreach_arg {
+ VALUE klass;
+ ID mid;
+ const rb_method_entry_t *cme;
+ const rb_method_entry_t *newer;
+};
+
+static void
+invalidate_callable_method_entry_in_every_m_table_i(rb_classext_t *ext, bool is_prime, VALUE box_value, void *data)
+{
+ st_data_t me;
+ struct invalidate_callable_method_entry_foreach_arg *arg = (struct invalidate_callable_method_entry_foreach_arg *)data;
+ struct rb_id_table *tbl = RCLASSEXT_M_TBL(ext);
+
+ if (rb_id_table_lookup(tbl, arg->mid, &me) && arg->cme == (const rb_method_entry_t *)me) {
+ rb_method_table_insert(arg->klass, tbl, arg->mid, arg->newer);
+ }
+}
+
+static void
+invalidate_callable_method_entry_in_every_m_table(VALUE klass, ID mid, const rb_callable_method_entry_t *cme)
+{
+ // The argument cme must be invalidated later in the caller side
+ const rb_method_entry_t *newer = rb_method_entry_clone((const rb_method_entry_t *)cme);
+ struct invalidate_callable_method_entry_foreach_arg arg = {
+ .klass = klass,
+ .mid = mid,
+ .cme = (const rb_method_entry_t *) cme,
+ .newer = newer,
+ };
+ rb_class_classext_foreach(klass, invalidate_callable_method_entry_in_every_m_table_i, (void *)&arg);
+}
+
+static void
+invalidate_complemented_method_entry_in_callable_m_table(struct rb_id_table *tbl, ID mid)
+{
+ VALUE cme;
+ if (tbl && rb_id_table_lookup(tbl, mid, &cme)) {
+ rb_yjit_cme_invalidate((rb_callable_method_entry_t *)cme);
+ rb_zjit_cme_invalidate((rb_callable_method_entry_t *)cme);
+ rb_id_table_delete(tbl, mid);
+ RB_DEBUG_COUNTER_INC(cc_invalidate_tree_callable);
+ }
+}
static void
clear_method_cache_by_id_in_class(VALUE klass, ID mid)
{
- VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
+ VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
if (rb_objspace_garbage_object_p(klass)) return;
- RB_VM_LOCK_ENTER();
- if (LIKELY(RCLASS_SUBCLASSES(klass) == NULL)) {
- // no subclasses
- // check only current class
+ RB_VM_LOCKING() {
+ rb_vm_barrier();
- struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
- VALUE ccs_data;
+ if (LIKELY(RCLASS_SUBCLASSES_FIRST(klass) == NULL)) {
+ // no subclasses
+ // check only current class
- // invalidate CCs
- if (cc_tbl && rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
- struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
- rb_yjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
- rb_mjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
- if (NIL_P(ccs->cme->owner)) invalidate_negative_cache(mid);
- rb_vm_ccs_free(ccs);
- rb_id_table_delete(cc_tbl, mid);
- RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_ccs);
- }
-
- // remove from callable_m_tbl, if exists
- struct rb_id_table *cm_tbl;
- if ((cm_tbl = RCLASS_CALLABLE_M_TBL(klass)) != NULL) {
- VALUE cme;
- if (rb_yjit_enabled_p() && rb_id_table_lookup(cm_tbl, mid, &cme)) {
- rb_yjit_cme_invalidate((rb_callable_method_entry_t *)cme);
- rb_mjit_cme_invalidate((rb_callable_method_entry_t *)cme);
+ // invalidate CCs
+ VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
+ invalidate_method_cache_in_cc_table(cc_tbl, mid);
+ if (RCLASS_CC_TBL_NOT_PRIME_P(klass, cc_tbl)) {
+ invalidate_method_cache_in_cc_table(RCLASS_PRIME_CC_TBL(klass), mid);
}
- rb_id_table_delete(cm_tbl, mid);
- RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_callable);
+
+ // remove from callable_m_tbl, if exists
+ struct rb_id_table *cm_tbl = RCLASS_WRITABLE_CALLABLE_M_TBL(klass);
+ invalidate_callable_method_entry_in_callable_m_table(cm_tbl, mid);
+ if (RCLASS_CALLABLE_M_TBL_NOT_PRIME_P(klass, cm_tbl)) {
+ invalidate_callable_method_entry_in_callable_m_table(RCLASS_PRIME_CALLABLE_M_TBL(klass), mid);
+ }
+
+ RB_DEBUG_COUNTER_INC(cc_invalidate_leaf);
}
- RB_DEBUG_COUNTER_INC(cc_invalidate_leaf);
- }
- else {
- const rb_callable_method_entry_t *cme = complemented_callable_method_entry(klass, mid);
+ else {
+ const rb_callable_method_entry_t *cme = complemented_callable_method_entry(klass, mid);
- if (cme) {
- // invalidate cme if found to invalidate the inline method cache.
- if (METHOD_ENTRY_CACHED(cme)) {
- if (METHOD_ENTRY_COMPLEMENTED(cme)) {
- // do nothing
- }
- else {
- // invalidate cc by invalidating cc->cme
- VALUE owner = cme->owner;
- VM_ASSERT(BUILTIN_TYPE(owner) == T_CLASS);
- VALUE klass_housing_cme;
- if (cme->def->type == VM_METHOD_TYPE_REFINED && !cme->def->body.refined.orig_me) {
- klass_housing_cme = owner;
+ if (cme) {
+ // invalidate cme if found to invalidate the inline method cache.
+ if (METHOD_ENTRY_CACHED(cme)) {
+ if (METHOD_ENTRY_COMPLEMENTED(cme)) {
+ // do nothing
}
else {
- klass_housing_cme = RCLASS_ORIGIN(owner);
+ // invalidate cc by invalidating cc->cme
+ VALUE owner = cme->owner;
+ VM_ASSERT_TYPE(owner, T_CLASS);
+ VALUE klass_housing_cme;
+ if (cme->def->type == VM_METHOD_TYPE_REFINED && !cme->def->body.refined.orig_me) {
+ klass_housing_cme = owner;
+ }
+ else {
+ klass_housing_cme = RCLASS_ORIGIN(owner);
+ }
+
+ // replace the cme that will be invalid in the all classexts
+ invalidate_callable_method_entry_in_every_m_table(klass_housing_cme, mid, cme);
}
- // replace the cme that will be invalid
- VM_ASSERT(lookup_method_table(klass_housing_cme, mid) == (const rb_method_entry_t *)cme);
- const rb_method_entry_t *new_cme = rb_method_entry_clone((const rb_method_entry_t *)cme);
- rb_method_table_insert(klass_housing_cme, RCLASS_M_TBL(klass_housing_cme), mid, new_cme);
- }
- vm_cme_invalidate((rb_callable_method_entry_t *)cme);
- RB_DEBUG_COUNTER_INC(cc_invalidate_tree_cme);
+ vm_cme_invalidate((rb_callable_method_entry_t *)cme);
+ RB_DEBUG_COUNTER_INC(cc_invalidate_tree_cme);
+
+ // In case of refinement ME, also invalidate the wrapped ME that
+ // could be cached at some callsite and is unreachable from any
+ // RCLASS_WRITABLE_CC_TBL.
+ if (cme->def->type == VM_METHOD_TYPE_REFINED && cme->def->body.refined.orig_me) {
+ vm_cme_invalidate((rb_callable_method_entry_t *)cme->def->body.refined.orig_me);
+ }
- if (cme->def->iseq_overload) {
- rb_callable_method_entry_t *monly_cme = (rb_callable_method_entry_t *)lookup_overloaded_cme(cme);
- if (monly_cme) {
- vm_cme_invalidate(monly_cme);
+ if (cme->def->iseq_overload) {
+ rb_callable_method_entry_t *monly_cme = (rb_callable_method_entry_t *)lookup_overloaded_cme(cme);
+ if (monly_cme) {
+ vm_cme_invalidate(monly_cme);
+ }
}
}
- }
- // invalidate complement tbl
- if (METHOD_ENTRY_COMPLEMENTED(cme)) {
- VALUE defined_class = cme->defined_class;
- struct rb_id_table *cm_tbl = RCLASS_CALLABLE_M_TBL(defined_class);
- VM_ASSERT(cm_tbl != NULL);
- int r = rb_id_table_delete(cm_tbl, mid);
- VM_ASSERT(r == TRUE); (void)r;
- RB_DEBUG_COUNTER_INC(cc_invalidate_tree_callable);
- }
+ // invalidate complement tbl
+ if (METHOD_ENTRY_COMPLEMENTED(cme)) {
+ VALUE defined_class = cme->defined_class;
+ struct rb_id_table *cm_tbl = RCLASS_WRITABLE_CALLABLE_M_TBL(defined_class);
+ invalidate_complemented_method_entry_in_callable_m_table(cm_tbl, mid);
+ if (RCLASS_CALLABLE_M_TBL_NOT_PRIME_P(defined_class, cm_tbl)) {
+ struct rb_id_table *prime_cm_table = RCLASS_PRIME_CALLABLE_M_TBL(defined_class);
+ invalidate_complemented_method_entry_in_callable_m_table(prime_cm_table, mid);
+ }
+ }
- RB_DEBUG_COUNTER_INC(cc_invalidate_tree);
- }
- else {
- invalidate_negative_cache(mid);
+ RB_DEBUG_COUNTER_INC(cc_invalidate_tree);
+ }
+ else {
+ invalidate_negative_cache(mid);
+ }
}
+
+ rb_gccct_clear_table(Qnil);
}
- RB_VM_LOCK_LEAVE();
}
static void
clear_iclass_method_cache_by_id(VALUE iclass, VALUE d)
{
- VM_ASSERT(RB_TYPE_P(iclass, T_ICLASS));
+ VM_ASSERT_TYPE(iclass, T_ICLASS);
ID mid = (ID)d;
clear_method_cache_by_id_in_class(iclass, mid);
}
@@ -294,6 +551,7 @@ rb_clear_method_cache(VALUE klass_or_module, ID mid)
VALUE refined_class = rb_refinement_module_get_refined_class(module);
rb_clear_method_cache(refined_class, mid);
rb_class_foreach_subclass(refined_class, clear_iclass_method_cache_by_id_for_refinements, mid);
+ rb_clear_all_refinement_method_cache();
}
rb_class_foreach_subclass(module, clear_iclass_method_cache_by_id, mid);
}
@@ -302,36 +560,204 @@ rb_clear_method_cache(VALUE klass_or_module, ID mid)
}
}
-// gc.c
-void rb_cc_table_free(VALUE klass);
+static enum rb_id_table_iterator_result
+invalidate_method_entry_in_iclass_callable_m_tbl(VALUE cme, void *data)
+{
+ vm_cme_invalidate((rb_callable_method_entry_t *)cme);
+ return ID_TABLE_DELETE;
+}
+
+static enum rb_id_table_iterator_result
+invalidate_ccs_in_iclass_cc_tbl(VALUE value, void *data)
+{
+ struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)value;
+ vm_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
+ xfree(ccs);
+ return ID_TABLE_DELETE;
+}
+
+void
+rb_invalidate_method_caches(struct rb_id_table *cm_tbl, VALUE cc_tbl)
+{
+ if (cm_tbl) {
+ rb_id_table_foreach_values(cm_tbl, invalidate_method_entry_in_iclass_callable_m_tbl, NULL);
+ }
+ if (cc_tbl) {
+ rb_managed_id_table_foreach_values(cc_tbl, invalidate_ccs_in_iclass_cc_tbl, NULL);
+ }
+}
static int
-invalidate_all_cc(void *vstart, void *vend, size_t stride, void *data)
-{
- VALUE v = (VALUE)vstart;
- for (; v != (VALUE)vend; v += stride) {
- void *ptr = asan_poisoned_object_p(v);
- asan_unpoison_object(v, false);
- if (RBASIC(v)->flags) { // liveness check
- if (RB_TYPE_P(v, T_CLASS) ||
- RB_TYPE_P(v, T_ICLASS)) {
- if (RCLASS_CC_TBL(v)) {
- rb_cc_table_free(v);
- }
- RCLASS_CC_TBL(v) = NULL;
+invalidate_cc_refinement(st_data_t key, st_data_t data)
+{
+ VALUE v = (VALUE)key;
+ void *ptr = rb_asan_poisoned_object_p(v);
+ rb_asan_unpoison_object(v, false);
+
+ if (rb_gc_pointer_to_heap_p(v) &&
+ !rb_objspace_garbage_object_p(v) &&
+ RBASIC(v)->flags) { // liveness check
+ const struct rb_callcache *cc = (const struct rb_callcache *)v;
+
+ VM_ASSERT(vm_cc_refinement_p(cc));
+
+ if (vm_cc_valid(cc)) {
+ vm_cc_invalidate(cc);
+ }
+ }
+
+ if (ptr) {
+ rb_asan_poison_object(v);
+ }
+
+ return ST_CONTINUE;
+}
+
+static st_index_t
+vm_ci_hash(VALUE v)
+{
+ const struct rb_callinfo *ci = (const struct rb_callinfo *)v;
+ st_index_t h;
+ h = rb_hash_start(ci->mid);
+ h = rb_hash_uint(h, ci->flag);
+ h = rb_hash_uint(h, ci->argc);
+ if (ci->kwarg) {
+ for (int i = 0; i < ci->kwarg->keyword_len; i++) {
+ h = rb_hash_uint(h, ci->kwarg->keywords[i]);
+ }
+ }
+ return h;
+}
+
+static int
+vm_ci_hash_cmp(VALUE v1, VALUE v2)
+{
+ const struct rb_callinfo *ci1 = (const struct rb_callinfo *)v1;
+ const struct rb_callinfo *ci2 = (const struct rb_callinfo *)v2;
+ if (ci1->mid != ci2->mid) return 1;
+ if (ci1->flag != ci2->flag) return 1;
+ if (ci1->argc != ci2->argc) return 1;
+ if (ci1->kwarg != NULL) {
+ VM_ASSERT(ci2->kwarg != NULL); // implied by matching flags
+
+ if (ci1->kwarg->keyword_len != ci2->kwarg->keyword_len)
+ return 1;
+
+ for (int i = 0; i < ci1->kwarg->keyword_len; i++) {
+ if (ci1->kwarg->keywords[i] != ci2->kwarg->keywords[i]) {
+ return 1;
}
}
- if (ptr) {
- asan_poison_object(v);
+ }
+ else {
+ VM_ASSERT(ci2->kwarg == NULL); // implied by matching flags
+ }
+ return 0;
+}
+
+static const struct st_hash_type vm_ci_hashtype = {
+ vm_ci_hash_cmp,
+ vm_ci_hash
+};
+
+static int
+ci_lookup_i(st_data_t *key, st_data_t *value, st_data_t data, int existing)
+{
+ const struct rb_callinfo *ci = (const struct rb_callinfo *)*key;
+ st_data_t *ret = (st_data_t *)data;
+
+ if (existing) {
+ if (rb_objspace_garbage_object_p((VALUE)ci)) {
+ *ret = (st_data_t)NULL;
+ return ST_DELETE;
+ }
+ else {
+ *ret = *key;
+ return ST_STOP;
}
}
- return 0; // continue to iteration
+ else {
+ *key = *value = *ret = (st_data_t)ci;
+ return ST_CONTINUE;
+ }
+}
+
+const struct rb_callinfo *
+rb_vm_ci_lookup(ID mid, unsigned int flag, unsigned int argc, const struct rb_callinfo_kwarg *kwarg)
+{
+ rb_vm_t *vm = GET_VM();
+ const struct rb_callinfo *ci = NULL;
+
+ if (kwarg) {
+ ((struct rb_callinfo_kwarg *)kwarg)->references++;
+ }
+
+ struct rb_callinfo *new_ci = SHAREABLE_IMEMO_NEW(struct rb_callinfo, imemo_callinfo, (VALUE)kwarg);
+ new_ci->mid = mid;
+ new_ci->flag = flag;
+ new_ci->argc = argc;
+
+ RB_VM_LOCKING() {
+ st_table *ci_table = vm->ci_table;
+ VM_ASSERT(ci_table);
+
+ do {
+ st_update(ci_table, (st_data_t)new_ci, ci_lookup_i, (st_data_t)&ci);
+ } while (ci == NULL);
+ }
+
+ VM_ASSERT(ci);
+
+ return ci;
+}
+
+void
+rb_vm_ci_free(const struct rb_callinfo *ci)
+{
+ ASSERT_vm_locking();
+
+ rb_vm_t *vm = GET_VM();
+
+ st_data_t key = (st_data_t)ci;
+ st_delete(vm->ci_table, &key, NULL);
+}
+
+void
+rb_vm_insert_cc_refinement(const struct rb_callcache *cc)
+{
+ st_data_t key = (st_data_t)cc;
+
+ rb_vm_t *vm = GET_VM();
+ RB_VM_LOCK_ENTER();
+ {
+ rb_set_insert(vm->cc_refinement_table, key);
+ }
+ RB_VM_LOCK_LEAVE();
}
void
-rb_clear_method_cache_all(void)
+rb_vm_delete_cc_refinement(const struct rb_callcache *cc)
{
- rb_objspace_each_objects(invalidate_all_cc, NULL);
+ ASSERT_vm_locking();
+
+ rb_vm_t *vm = GET_VM();
+ st_data_t key = (st_data_t)cc;
+
+ rb_set_table_delete(vm->cc_refinement_table, &key);
+}
+
+void
+rb_clear_all_refinement_method_cache(void)
+{
+ rb_vm_t *vm = GET_VM();
+
+ RB_VM_LOCK_ENTER();
+ {
+ rb_set_table_foreach(vm->cc_refinement_table, invalidate_cc_refinement, (st_data_t)NULL);
+ rb_set_table_clear(vm->cc_refinement_table);
+ rb_set_compact_table(vm->cc_refinement_table);
+ }
+ RB_VM_LOCK_LEAVE();
rb_yjit_invalidate_all_method_lookup_assumptions();
}
@@ -339,12 +765,19 @@ rb_clear_method_cache_all(void)
void
rb_method_table_insert(VALUE klass, struct rb_id_table *table, ID method_id, const rb_method_entry_t *me)
{
+ RB_VM_LOCKING() {
+ rb_method_table_insert0(klass, table, method_id, me, RB_TYPE_P(klass, T_ICLASS) && !RICLASS_OWNS_M_TBL_P(klass));
+ }
+}
+
+void
+rb_method_table_insert0(VALUE klass, struct rb_id_table *table, ID method_id, const rb_method_entry_t *me, bool iclass_shared_mtbl)
+{
VALUE table_owner = klass;
- if (RB_TYPE_P(klass, T_ICLASS) && !RICLASS_OWNS_M_TBL_P(klass)) {
+ if (iclass_shared_mtbl) {
table_owner = RBASIC(table_owner)->klass;
}
- VM_ASSERT(RB_TYPE_P(table_owner, T_CLASS) || RB_TYPE_P(table_owner, T_ICLASS) || RB_TYPE_P(table_owner, T_MODULE));
- VM_ASSERT(table == RCLASS_M_TBL(table_owner));
+ VM_ASSERT_TYPE3(table_owner, T_CLASS, T_ICLASS, T_MODULE);
rb_id_table_insert(table, method_id, (VALUE)me);
RB_OBJ_WRITTEN(table_owner, Qundef, (VALUE)me);
}
@@ -402,46 +835,56 @@ rb_add_method_optimized(VALUE klass, ID mid, enum method_optimized_type opt_type
}
static void
-rb_method_definition_release(rb_method_definition_t *def, int complemented)
+method_definition_release(rb_method_definition_t *def)
{
if (def != NULL) {
- const int alias_count = def->alias_count;
- const int complemented_count = def->complemented_count;
- VM_ASSERT(alias_count >= 0);
- VM_ASSERT(complemented_count >= 0);
-
- if (alias_count + complemented_count == 0) {
- if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:%d,%d (remove)\n", (void *)def,
- rb_id2name(def->original_id), alias_count, complemented_count);
- if (def->type == VM_METHOD_TYPE_BMETHOD && def->body.bmethod.hooks) {
- xfree(def->body.bmethod.hooks);
- }
+ const unsigned int reference_count_was = RUBY_ATOMIC_FETCH_SUB(def->reference_count, 1);
+
+ RUBY_ASSERT_ALWAYS(reference_count_was != 0);
+
+ if (reference_count_was == 1) {
+ if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:1->0 (remove)\n", (void *)def,
+ rb_id2name(def->original_id));
xfree(def);
}
else {
- if (complemented) {
- VM_ASSERT(def->complemented_count > 0);
- def->complemented_count--;
- }
- else if (def->alias_count > 0) {
- def->alias_count--;
- }
-
- if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:%d->%d,%d->%d (dec)\n", (void *)def, rb_id2name(def->original_id),
- alias_count, def->alias_count, complemented_count, def->complemented_count);
+ if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:%d->%d (dec)\n", (void *)def, rb_id2name(def->original_id),
+ reference_count_was, reference_count_was - 1);
}
}
}
+void
+rb_method_definition_release(rb_method_definition_t *def)
+{
+ method_definition_release(def);
+}
+
static void delete_overloaded_cme(const rb_callable_method_entry_t *cme);
void
-rb_free_method_entry(const rb_method_entry_t *me)
+rb_free_method_entry_vm_weak_references(const rb_method_entry_t *me)
{
if (me->def && me->def->iseq_overload) {
delete_overloaded_cme((const rb_callable_method_entry_t *)me);
}
- rb_method_definition_release(me->def, METHOD_ENTRY_COMPLEMENTED(me));
+}
+
+void
+rb_free_method_entry(const rb_method_entry_t *me)
+{
+#if USE_ZJIT
+ if (METHOD_ENTRY_CACHED(me)) {
+ rb_zjit_cme_free((const rb_callable_method_entry_t *)me);
+ }
+#endif
+
+#if USE_YJIT
+ // YJIT rb_yjit_root_mark() roots CMEs in `Invariants`,
+ // to remove from `Invariants` here.
+#endif
+
+ method_definition_release(me->def);
}
static inline rb_method_entry_t *search_method(VALUE klass, ID id, VALUE *defined_class_ptr);
@@ -508,10 +951,33 @@ setup_method_cfunc_struct(rb_method_cfunc_t *cfunc, VALUE (*func)(ANYARGS), int
cfunc->invoker = call_cfunc_invoker_func(argc);
}
-MJIT_FUNC_EXPORTED void
+
+static rb_method_definition_t *
+method_definition_addref(rb_method_definition_t *def, bool complemented)
+{
+ unsigned int reference_count_was = RUBY_ATOMIC_FETCH_ADD(def->reference_count, 1);
+ if (!complemented && reference_count_was > 0) {
+ /* TODO: A Ractor can reach this via UnboundMethod#bind */
+ def->aliased = true;
+ }
+ if (METHOD_DEBUG) fprintf(stderr, "+%p-%s:%d->%d\n", (void *)def, rb_id2name(def->original_id), reference_count_was, reference_count_was+1);
+
+ return def;
+}
+
+void
+rb_method_definition_addref(rb_method_definition_t *def)
+{
+ method_definition_addref(def, false);
+}
+
+void
rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts)
{
- *(rb_method_definition_t **)&me->def = def;
+ method_definition_release(me->def);
+ *(rb_method_definition_t **)&me->def = method_definition_addref(def, METHOD_ENTRY_COMPLEMENTED(me));
+
+ if (!ruby_running) add_opt_method_entry(me);
if (opts != NULL) {
switch (def->type) {
@@ -524,6 +990,11 @@ rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *de
/* setup iseq first (before invoking GC) */
RB_OBJ_WRITE(me, &def->body.iseq.iseqptr, iseq);
+ // Methods defined in `with_jit` should be considered METHOD_ENTRY_BASIC
+ if (rb_iseq_attr_p(iseq, BUILTIN_ATTR_C_TRACE)) {
+ METHOD_ENTRY_BASIC_SET((rb_method_entry_t *)me, TRUE);
+ }
+
if (ISEQ_BODY(iseq)->mandatory_only_iseq) def->iseq_overload = 1;
if (0) vm_cref_dump("rb_method_definition_create", cref);
@@ -557,7 +1028,9 @@ rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *de
if (cfp && (line = rb_vm_get_sourceline(cfp))) {
VALUE location = rb_ary_new3(2, rb_iseq_path(cfp->iseq), INT2FIX(line));
- RB_OBJ_WRITE(me, &def->body.attr.location, rb_ary_freeze(location));
+ rb_ary_freeze(location);
+ RB_OBJ_SET_SHAREABLE(location);
+ RB_OBJ_WRITE(me, &def->body.attr.location, location);
}
else {
VM_ASSERT(def->body.attr.location == 0);
@@ -566,7 +1039,7 @@ rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *de
}
case VM_METHOD_TYPE_BMETHOD:
RB_OBJ_WRITE(me, &def->body.bmethod.proc, (VALUE)opts);
- RB_OBJ_WRITE(me, &def->body.bmethod.defined_ractor, rb_ractor_self(GET_RACTOR()));
+ def->body.bmethod.defined_ractor_id = rb_ec_ractor_id(GET_EC());
return;
case VM_METHOD_TYPE_NOTIMPLEMENTED:
setup_method_cfunc_struct(UNALIGNED_MEMBER_PTR(def, body.cfunc), (VALUE(*)(ANYARGS))rb_f_notimplement_internal, -1);
@@ -576,9 +1049,7 @@ rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *de
return;
case VM_METHOD_TYPE_REFINED:
{
- const rb_method_refined_t *refined = (rb_method_refined_t *)opts;
- RB_OBJ_WRITE(me, &def->body.refined.orig_me, refined->orig_me);
- RB_OBJ_WRITE(me, &def->body.refined.owner, refined->owner);
+ RB_OBJ_WRITE(me, &def->body.refined.orig_me, (rb_method_entry_t *)opts);
return;
}
case VM_METHOD_TYPE_ALIAS:
@@ -608,13 +1079,9 @@ method_definition_reset(const rb_method_entry_t *me)
break;
case VM_METHOD_TYPE_BMETHOD:
RB_OBJ_WRITTEN(me, Qundef, def->body.bmethod.proc);
- RB_OBJ_WRITTEN(me, Qundef, def->body.bmethod.defined_ractor);
- /* give up to check all in a list */
- if (def->body.bmethod.hooks) rb_gc_writebarrier_remember((VALUE)me);
break;
case VM_METHOD_TYPE_REFINED:
RB_OBJ_WRITTEN(me, Qundef, def->body.refined.orig_me);
- RB_OBJ_WRITTEN(me, Qundef, def->body.refined.owner);
break;
case VM_METHOD_TYPE_ALIAS:
RB_OBJ_WRITTEN(me, Qundef, def->body.alias.original_me);
@@ -629,38 +1096,37 @@ method_definition_reset(const rb_method_entry_t *me)
}
}
-MJIT_FUNC_EXPORTED rb_method_definition_t *
+static rb_atomic_t method_serial = 1;
+
+rb_method_definition_t *
rb_method_definition_create(rb_method_type_t type, ID mid)
{
rb_method_definition_t *def;
def = ZALLOC(rb_method_definition_t);
def->type = type;
def->original_id = mid;
- static uintptr_t method_serial = 1;
- def->method_serial = method_serial++;
+ def->method_serial = (uintptr_t)RUBY_ATOMIC_FETCH_ADD(method_serial, 1);
+ def->box = rb_current_box();
return def;
}
-static rb_method_definition_t *
-method_definition_addref(rb_method_definition_t *def)
+static rb_method_entry_t *
+rb_method_entry_alloc(ID called_id, VALUE owner, VALUE defined_class, rb_method_definition_t *def, bool complement)
{
- def->alias_count++;
- if (METHOD_DEBUG) fprintf(stderr, "+%p-%s:%d\n", (void *)def, rb_id2name(def->original_id), def->alias_count);
- return def;
-}
+ if (def) method_definition_addref(def, complement);
+ if (RTEST(defined_class)) {
+ // not negative cache
+ VM_ASSERT_TYPE2(defined_class, T_CLASS, T_ICLASS);
+ }
+ rb_method_entry_t *me = SHAREABLE_IMEMO_NEW(rb_method_entry_t, imemo_ment, defined_class);
-static rb_method_definition_t *
-method_definition_addref_complement(rb_method_definition_t *def)
-{
- def->complemented_count++;
- if (METHOD_DEBUG) fprintf(stderr, "+%p-%s:%d\n", (void *)def, rb_id2name(def->original_id), def->complemented_count);
- return def;
-}
+ // mark_and_move_method_entry pins itself when it is in the overloaded_cme table
+ rb_gc_register_pinning_obj((VALUE)me);
+
+ *((rb_method_definition_t **)&me->def) = def;
+ me->called_id = called_id;
+ me->owner = owner;
-static rb_method_entry_t *
-rb_method_entry_alloc(ID called_id, VALUE owner, VALUE defined_class, const rb_method_definition_t *def)
-{
- rb_method_entry_t *me = (rb_method_entry_t *)rb_imemo_new(imemo_ment, (VALUE)def, (VALUE)called_id, owner, defined_class);
return me;
}
@@ -681,36 +1147,48 @@ filter_defined_class(VALUE klass)
}
rb_method_entry_t *
-rb_method_entry_create(ID called_id, VALUE klass, rb_method_visibility_t visi, const rb_method_definition_t *def)
+rb_method_entry_create(ID called_id, VALUE klass, rb_method_visibility_t visi, rb_method_definition_t *def)
{
- rb_method_entry_t *me = rb_method_entry_alloc(called_id, klass, filter_defined_class(klass), def);
+ rb_method_entry_t *me = rb_method_entry_alloc(called_id, klass, filter_defined_class(klass), def, false);
METHOD_ENTRY_FLAGS_SET(me, visi, ruby_running ? FALSE : TRUE);
if (def != NULL) method_definition_reset(me);
return me;
}
+// Return a cloned ME that's not invalidated (MEs are disposable for caching).
const rb_method_entry_t *
rb_method_entry_clone(const rb_method_entry_t *src_me)
{
- rb_method_entry_t *me = rb_method_entry_alloc(src_me->called_id, src_me->owner, src_me->defined_class,
- method_definition_addref(src_me->def));
- if (METHOD_ENTRY_COMPLEMENTED(src_me)) {
- method_definition_addref_complement(src_me->def);
- }
+ rb_method_entry_t *me = rb_method_entry_alloc(src_me->called_id, src_me->owner, src_me->defined_class, src_me->def, METHOD_ENTRY_COMPLEMENTED(src_me));
METHOD_ENTRY_FLAGS_COPY(me, src_me);
+
+ // Also clone inner ME in case of refinement ME
+ if (src_me->def &&
+ src_me->def->type == VM_METHOD_TYPE_REFINED &&
+ src_me->def->body.refined.orig_me) {
+ const rb_method_entry_t *orig_me = src_me->def->body.refined.orig_me;
+ VM_ASSERT(orig_me->def->type != VM_METHOD_TYPE_REFINED);
+
+ rb_method_entry_t *orig_clone = rb_method_entry_alloc(orig_me->called_id,
+ orig_me->owner, orig_me->defined_class, orig_me->def, METHOD_ENTRY_COMPLEMENTED(orig_me));
+ METHOD_ENTRY_FLAGS_COPY(orig_clone, orig_me);
+
+ // Clone definition, since writing a VALUE to a shared definition
+ // can create reference edges we can't run WBs for.
+ rb_method_definition_t *clone_def =
+ rb_method_definition_create(VM_METHOD_TYPE_REFINED, src_me->called_id);
+ rb_method_definition_set(me, clone_def, orig_clone);
+ }
return me;
}
-MJIT_FUNC_EXPORTED const rb_callable_method_entry_t *
+const rb_callable_method_entry_t *
rb_method_entry_complement_defined_class(const rb_method_entry_t *src_me, ID called_id, VALUE defined_class)
{
rb_method_definition_t *def = src_me->def;
rb_method_entry_t *me;
- struct {
- const struct rb_method_entry_struct *orig_me;
- VALUE owner;
- } refined = {0};
+ const rb_method_entry_t *refined_orig_me = NULL;
if (!src_me->defined_class &&
def->type == VM_METHOD_TYPE_REFINED &&
@@ -718,22 +1196,19 @@ rb_method_entry_complement_defined_class(const rb_method_entry_t *src_me, ID cal
const rb_method_entry_t *orig_me =
rb_method_entry_clone(def->body.refined.orig_me);
RB_OBJ_WRITE((VALUE)orig_me, &orig_me->defined_class, defined_class);
- refined.orig_me = orig_me;
- refined.owner = orig_me->owner;
+ refined_orig_me = orig_me;
def = NULL;
}
- else {
- def = method_definition_addref_complement(def);
- }
- me = rb_method_entry_alloc(called_id, src_me->owner, defined_class, def);
+
+ me = rb_method_entry_alloc(called_id, src_me->owner, defined_class, def, true);
METHOD_ENTRY_FLAGS_COPY(me, src_me);
METHOD_ENTRY_COMPLEMENTED_SET(me);
if (!def) {
def = rb_method_definition_create(VM_METHOD_TYPE_REFINED, called_id);
- rb_method_definition_set(me, def, &refined);
+ rb_method_definition_set(me, def, (void *)refined_orig_me);
}
- VM_ASSERT(RB_TYPE_P(me->owner, T_MODULE));
+ VM_ASSERT_TYPE(me->owner, T_MODULE);
return (rb_callable_method_entry_t *)me;
}
@@ -741,7 +1216,8 @@ rb_method_entry_complement_defined_class(const rb_method_entry_t *src_me, ID cal
void
rb_method_entry_copy(rb_method_entry_t *dst, const rb_method_entry_t *src)
{
- *(rb_method_definition_t **)&dst->def = method_definition_addref(src->def);
+ method_definition_release(dst->def);
+ *(rb_method_definition_t **)&dst->def = method_definition_addref(src->def, METHOD_ENTRY_COMPLEMENTED(src));
method_definition_reset(dst);
dst->called_id = src->called_id;
RB_OBJ_WRITE((VALUE)dst, &dst->owner, src->owner);
@@ -756,24 +1232,20 @@ make_method_entry_refined(VALUE owner, rb_method_entry_t *me)
return;
}
else {
- struct {
- struct rb_method_entry_struct *orig_me;
- VALUE owner;
- } refined;
rb_method_definition_t *def;
rb_vm_check_redefinition_opt_method(me, me->owner);
- refined.orig_me =
- rb_method_entry_alloc(me->called_id, me->owner,
- me->defined_class ?
- me->defined_class : owner,
- method_definition_addref(me->def));
- METHOD_ENTRY_FLAGS_COPY(refined.orig_me, me);
- refined.owner = owner;
+ struct rb_method_entry_struct *orig_me =
+ rb_method_entry_alloc(me->called_id,
+ me->owner,
+ me->defined_class,
+ me->def,
+ true);
+ METHOD_ENTRY_FLAGS_COPY(orig_me, me);
def = rb_method_definition_create(VM_METHOD_TYPE_REFINED, me->called_id);
- rb_method_definition_set(me, def, (void *)&refined);
+ rb_method_definition_set(me, def, orig_me);
METHOD_ENTRY_VISI_SET(me, METHOD_VISI_PUBLIC);
}
}
@@ -830,6 +1302,7 @@ check_override_opt_method(VALUE klass, VALUE mid)
}
}
+static inline rb_method_entry_t* search_method0(VALUE klass, ID id, VALUE *defined_class_ptr, bool skip_refined);
/*
* klass->method_table[mid] = method_entry(defined_class, visi, def)
*
@@ -851,7 +1324,7 @@ rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibil
}
orig_klass = klass;
- if (!FL_TEST(klass, FL_SINGLETON) &&
+ if (!RCLASS_SINGLETON_P(klass) &&
type != VM_METHOD_TYPE_NOTIMPLEMENTED &&
type != VM_METHOD_TYPE_ZSUPER) {
switch (mid) {
@@ -870,7 +1343,12 @@ rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibil
if (RB_TYPE_P(klass, T_MODULE) && FL_TEST(klass, RMODULE_IS_REFINEMENT)) {
VALUE refined_class = rb_refinement_module_get_refined_class(klass);
+ bool search_superclass = type == VM_METHOD_TYPE_ZSUPER && !lookup_method_table(refined_class, mid);
rb_add_refined_method_entry(refined_class, mid);
+ if (search_superclass) {
+ rb_method_entry_t *me = lookup_method_table(refined_class, mid);
+ me->def->body.refined.orig_me = search_method0(refined_class, mid, NULL, true);
+ }
}
if (type == VM_METHOD_TYPE_REFINED) {
rb_method_entry_t *old_me = lookup_method_table(RCLASS_ORIGIN(klass), mid);
@@ -882,7 +1360,7 @@ rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibil
rb_clear_method_cache(orig_klass, mid);
}
}
- mtbl = RCLASS_M_TBL(klass);
+ mtbl = RCLASS_WRITABLE_M_TBL(klass);
/* check re-definition */
if (rb_id_table_lookup(mtbl, mid, &data)) {
@@ -896,7 +1374,7 @@ rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibil
if (RTEST(ruby_verbose) &&
type != VM_METHOD_TYPE_UNDEF &&
- (old_def->alias_count == 0) &&
+ (old_def->aliased == false) &&
(!old_def->no_redef_warning) &&
!make_refined &&
old_def->type != VM_METHOD_TYPE_UNDEF &&
@@ -904,7 +1382,6 @@ rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibil
old_def->type != VM_METHOD_TYPE_ALIAS) {
const rb_iseq_t *iseq = 0;
- rb_warning("method redefined; discarding old %"PRIsVALUE, rb_id2str(mid));
switch (old_def->type) {
case VM_METHOD_TYPE_ISEQ:
iseq = def_iseq_ptr(old_def);
@@ -916,17 +1393,25 @@ rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibil
break;
}
if (iseq) {
- rb_compile_warning(RSTRING_PTR(rb_iseq_path(iseq)),
- ISEQ_BODY(iseq)->location.first_lineno,
- "previous definition of %"PRIsVALUE" was here",
- rb_id2str(old_def->original_id));
+ rb_warning(
+ "method redefined; discarding old %"PRIsVALUE"\n%s:%d: warning: previous definition of %"PRIsVALUE" was here",
+ rb_id2str(mid),
+ RSTRING_PTR(rb_iseq_path(iseq)),
+ ISEQ_BODY(iseq)->location.first_lineno,
+ rb_id2str(old_def->original_id)
+ );
+ }
+ else {
+ rb_warning("method redefined; discarding old %"PRIsVALUE, rb_id2str(mid));
}
}
}
/* create method entry */
me = rb_method_entry_create(mid, defined_class, visi, NULL);
- if (def == NULL) def = rb_method_definition_create(type, original_id);
+ if (def == NULL) {
+ def = rb_method_definition_create(type, original_id);
+ }
rb_method_definition_set(me, def, opts);
rb_clear_method_cache(klass, mid);
@@ -942,9 +1427,9 @@ rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibil
}
}
/* check mid */
- if (mid == object_id || mid == id__send__) {
- if (type == VM_METHOD_TYPE_ISEQ && search_method(klass, mid, 0)) {
- rb_warn("redefining `%s' may cause serious problems", rb_id2name(mid));
+ if (mid == object_id || mid == id__id__ || mid == id__send__) {
+ if (type != VM_METHOD_TYPE_CFUNC && search_method(klass, mid, 0)) {
+ rb_warn("redefining '%s' may cause serious problems", rb_id2name(mid));
}
}
@@ -964,8 +1449,6 @@ rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibil
return me;
}
-static rb_method_entry_t *rb_method_entry_alloc(ID called_id, VALUE owner, VALUE defined_class, const rb_method_definition_t *def);
-
static st_table *
overloaded_cme_table(void)
{
@@ -1022,7 +1505,7 @@ lookup_overloaded_cme(const rb_callable_method_entry_t *cme)
}
#if VM_CHECK_MODE > 0
-MJIT_FUNC_EXPORTED const rb_callable_method_entry_t *
+const rb_callable_method_entry_t *
rb_vm_lookup_overloaded_cme(const rb_callable_method_entry_t *cme)
{
return lookup_overloaded_cme(cme);
@@ -1048,13 +1531,14 @@ get_overloaded_cme(const rb_callable_method_entry_t *cme)
else {
// create
rb_method_definition_t *def = rb_method_definition_create(VM_METHOD_TYPE_ISEQ, cme->def->original_id);
- def->body.iseq.cref = cme->def->body.iseq.cref;
- def->body.iseq.iseqptr = ISEQ_BODY(cme->def->body.iseq.iseqptr)->mandatory_only_iseq;
-
rb_method_entry_t *me = rb_method_entry_alloc(cme->called_id,
cme->owner,
cme->defined_class,
- def);
+ def,
+ false);
+
+ RB_OBJ_WRITE(me, &def->body.iseq.cref, cme->def->body.iseq.cref);
+ RB_OBJ_WRITE(me, &def->body.iseq.iseqptr, ISEQ_BODY(cme->def->body.iseq.iseqptr)->mandatory_only_iseq);
ASSERT_vm_locking();
st_insert(overloaded_cme_table(), (st_data_t)cme, (st_data_t)me);
@@ -1064,13 +1548,14 @@ get_overloaded_cme(const rb_callable_method_entry_t *cme)
}
}
-static const rb_callable_method_entry_t *
-check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci)
+const rb_callable_method_entry_t *
+rb_check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci)
{
if (UNLIKELY(cme->def->iseq_overload) &&
(vm_ci_flag(ci) & (VM_CALL_ARGS_SIMPLE)) &&
+ (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) &&
(int)vm_ci_argc(ci) == ISEQ_BODY(method_entry_iseqptr(cme))->param.lead_num) {
- VM_ASSERT(cme->def->type == VM_METHOD_TYPE_ISEQ); // iseq_overload is marked only on ISEQ methods
+ VM_ASSERT(cme->def->type == VM_METHOD_TYPE_ISEQ, "type: %d", cme->def->type); // iseq_overload is marked only on ISEQ methods
cme = get_overloaded_cme(cme);
@@ -1085,8 +1570,8 @@ check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_call
const VALUE arg = ID2SYM(mid); \
VALUE recv_class = (klass); \
ID hook_id = (hook); \
- if (FL_TEST((klass), FL_SINGLETON)) { \
- recv_class = rb_ivar_get((klass), attached); \
+ if (RCLASS_SINGLETON_P((klass))) { \
+ recv_class = RCLASS_ATTACHED_OBJECT((klass)); \
hook_id = singleton_##hook; \
} \
rb_funcallv(recv_class, hook_id, 1, &arg); \
@@ -1103,14 +1588,16 @@ method_added(VALUE klass, ID mid)
void
rb_add_method(VALUE klass, ID mid, rb_method_type_t type, void *opts, rb_method_visibility_t visi)
{
- rb_method_entry_make(klass, mid, klass, visi, type, NULL, mid, opts);
+ RB_VM_LOCKING() {
+ rb_method_entry_make(klass, mid, klass, visi, type, NULL, mid, opts);
+ }
if (type != VM_METHOD_TYPE_UNDEF && type != VM_METHOD_TYPE_REFINED) {
method_added(klass, mid);
}
}
-MJIT_FUNC_EXPORTED void
+void
rb_add_method_iseq(VALUE klass, ID mid, const rb_iseq_t *iseq, rb_cref_t *cref, rb_method_visibility_t visi)
{
struct { /* should be same fields with rb_method_iseq_struct */
@@ -1128,14 +1615,16 @@ static rb_method_entry_t *
method_entry_set(VALUE klass, ID mid, const rb_method_entry_t *me,
rb_method_visibility_t visi, VALUE defined_class)
{
- rb_method_entry_t *newme = rb_method_entry_make(klass, mid, defined_class, visi,
- me->def->type, me->def, 0, NULL);
- if (newme == me) {
- me->def->no_redef_warning = TRUE;
- }
- else {
- method_definition_addref(me->def);
+ rb_method_entry_t *newme;
+ RB_VM_LOCKING() {
+ newme = rb_method_entry_make(klass, mid, defined_class, visi,
+ me->def->type, me->def, 0, NULL);
+ if (newme == me) {
+ me->def->no_redef_warning = TRUE;
+ METHOD_ENTRY_FLAGS_SET(newme, visi, FALSE);
+ }
}
+
method_added(klass, mid);
return newme;
}
@@ -1152,7 +1641,10 @@ void
rb_define_alloc_func(VALUE klass, VALUE (*func)(VALUE))
{
Check_Type(klass, T_CLASS);
- RCLASS_ALLOCATOR(klass) = func;
+ if (RCLASS_SINGLETON_P(klass)) {
+ rb_raise(rb_eTypeError, "can't define an allocator for a singleton class");
+ }
+ RCLASS_SET_ALLOCATOR(klass, func);
}
void
@@ -1164,10 +1656,20 @@ rb_undef_alloc_func(VALUE klass)
rb_alloc_func_t
rb_get_alloc_func(VALUE klass)
{
- Check_Type(klass, T_CLASS);
+ RBIMPL_ASSERT_TYPE(klass, T_CLASS);
- for (; klass; klass = RCLASS_SUPER(klass)) {
- rb_alloc_func_t allocator = RCLASS_ALLOCATOR(klass);
+ rb_alloc_func_t allocator = RCLASS_ALLOCATOR(klass);
+ if (allocator == UNDEF_ALLOC_FUNC) return 0;
+ if (allocator) return allocator;
+
+ VALUE *superclasses = RCLASS_SUPERCLASSES(klass);
+ size_t depth = RCLASS_SUPERCLASS_DEPTH(klass);
+
+ for (size_t i = depth; i > 0; i--) {
+ klass = superclasses[i - 1];
+ RBIMPL_ASSERT_TYPE(klass, T_CLASS);
+
+ allocator = RCLASS_ALLOCATOR(klass);
if (allocator == UNDEF_ALLOC_FUNC) break;
if (allocator) return allocator;
}
@@ -1201,7 +1703,11 @@ search_method0(VALUE klass, ID id, VALUE *defined_class_ptr, bool skip_refined)
if (me == NULL) RB_DEBUG_COUNTER_INC(mc_search_notfound);
- VM_ASSERT(me == NULL || !METHOD_ENTRY_INVALIDATED(me));
+ VM_ASSERT(me == NULL || !METHOD_ENTRY_INVALIDATED(me),
+ "invalid me, mid:%s, klass:%s(%s)",
+ rb_id2name(id),
+ RTEST(rb_mod_name(klass)) ? RSTRING_PTR(rb_mod_name(klass)) : "anonymous",
+ rb_obj_info(klass));
return me;
}
@@ -1224,7 +1730,7 @@ search_method_protect(VALUE klass, ID id, VALUE *defined_class_ptr)
}
}
-MJIT_FUNC_EXPORTED const rb_method_entry_t *
+const rb_method_entry_t *
rb_method_entry(VALUE klass, ID id)
{
return search_method_protect(klass, id, NULL);
@@ -1236,24 +1742,27 @@ prepare_callable_method_entry(VALUE defined_class, ID id, const rb_method_entry_
struct rb_id_table *mtbl;
const rb_callable_method_entry_t *cme;
VALUE cme_data;
+ int cme_found = 0;
if (me) {
if (me->defined_class == 0) {
RB_DEBUG_COUNTER_INC(mc_cme_complement);
- VM_ASSERT(RB_TYPE_P(defined_class, T_ICLASS) || RB_TYPE_P(defined_class, T_MODULE));
- VM_ASSERT(me->defined_class == 0);
-
- mtbl = RCLASS_CALLABLE_M_TBL(defined_class);
+ VM_ASSERT_TYPE2(defined_class, T_ICLASS, T_MODULE);
+ mtbl = RCLASS_WRITABLE_CALLABLE_M_TBL(defined_class);
if (mtbl && rb_id_table_lookup(mtbl, id, &cme_data)) {
cme = (rb_callable_method_entry_t *)cme_data;
+ cme_found = 1;
+ }
+ if (cme_found) {
RB_DEBUG_COUNTER_INC(mc_cme_complement_hit);
VM_ASSERT(callable_method_entry_p(cme));
VM_ASSERT(!METHOD_ENTRY_INVALIDATED(cme));
}
else if (create) {
if (!mtbl) {
- mtbl = RCLASS_EXT(defined_class)->callable_m_tbl = rb_id_table_create(0);
+ mtbl = rb_id_table_create(0);
+ RCLASS_WRITE_CALLABLE_M_TBL(defined_class, mtbl);
}
cme = rb_method_entry_complement_defined_class(me, me->called_id, defined_class);
rb_id_table_insert(mtbl, id, (VALUE)cme);
@@ -1289,10 +1798,10 @@ cached_callable_method_entry(VALUE klass, ID mid)
{
ASSERT_vm_locking();
- struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
+ VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
VALUE ccs_data;
- if (cc_tbl && rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
+ if (cc_tbl && rb_managed_id_table_lookup(cc_tbl, mid, &ccs_data)) {
struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
VM_ASSERT(vm_ccs_p(ccs));
@@ -1302,8 +1811,10 @@ cached_callable_method_entry(VALUE klass, ID mid)
return ccs->cme;
}
else {
- rb_vm_ccs_free(ccs);
- rb_id_table_delete(cc_tbl, mid);
+ rb_vm_barrier();
+
+ rb_managed_id_table_delete(cc_tbl, mid);
+ rb_vm_ccs_invalidate_and_free(ccs);
}
}
@@ -1317,21 +1828,29 @@ cache_callable_method_entry(VALUE klass, ID mid, const rb_callable_method_entry_
ASSERT_vm_locking();
VM_ASSERT(cme != NULL);
- struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
+ VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
VALUE ccs_data;
if (!cc_tbl) {
- cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
+ cc_tbl = rb_vm_cc_table_create(2);
+ RCLASS_WRITE_CC_TBL(klass, cc_tbl);
}
- if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
+ if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_data)) {
#if VM_CHECK_MODE > 0
struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
VM_ASSERT(ccs->cme == cme);
#endif
}
else {
- vm_ccs_create(klass, cc_tbl, mid, cme);
+ if (rb_multi_ractor_p()) {
+ VALUE new_cc_tbl = rb_vm_cc_table_dup(cc_tbl);
+ vm_ccs_create(klass, new_cc_tbl, mid, cme);
+ RB_OBJ_ATOMIC_WRITE(klass, &RCLASSEXT_CC_TBL(RCLASS_EXT_WRITABLE(klass)), new_cc_tbl);
+ }
+ else {
+ vm_ccs_create(klass, cc_tbl, mid, cme);
+ }
}
}
@@ -1346,7 +1865,7 @@ negative_cme(ID mid)
cme = (rb_callable_method_entry_t *)cme_data;
}
else {
- cme = (rb_callable_method_entry_t *)rb_method_entry_alloc(mid, Qnil, Qnil, NULL);
+ cme = (rb_callable_method_entry_t *)rb_method_entry_alloc(mid, Qnil, Qnil, NULL, false);
rb_id_table_insert(vm->negative_cme_table, mid, (VALUE)cme);
}
@@ -1359,9 +1878,27 @@ callable_method_entry_or_negative(VALUE klass, ID mid, VALUE *defined_class_ptr)
{
const rb_callable_method_entry_t *cme;
- VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
- RB_VM_LOCK_ENTER();
- {
+ VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
+
+ /* Fast path: lock-free read from cache */
+ VALUE cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
+ if (cc_tbl) {
+ VALUE ccs_data;
+ if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_data)) {
+ struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
+ VM_ASSERT(vm_ccs_p(ccs));
+
+ if (LIKELY(!METHOD_ENTRY_INVALIDATED(ccs->cme))) {
+ VM_ASSERT(ccs->cme->called_id == mid);
+ if (defined_class_ptr != NULL) *defined_class_ptr = ccs->cme->defined_class;
+ RB_DEBUG_COUNTER_INC(ccs_found);
+ return ccs->cme;
+ }
+ }
+ }
+
+ /* Slow path: need to lock and potentially populate cache */
+ RB_VM_LOCKING() {
cme = cached_callable_method_entry(klass, mid);
if (cme) {
@@ -1382,7 +1919,6 @@ callable_method_entry_or_negative(VALUE klass, ID mid, VALUE *defined_class_ptr)
cache_callable_method_entry(klass, mid, cme);
}
}
- RB_VM_LOCK_LEAVE();
return cme;
}
@@ -1403,7 +1939,7 @@ callable_method_entry(VALUE klass, ID mid, VALUE *defined_class_ptr)
return !UNDEFINED_METHOD_ENTRY_P(cme) ? cme : NULL;
}
-MJIT_FUNC_EXPORTED const rb_callable_method_entry_t *
+const rb_callable_method_entry_t *
rb_callable_method_entry(VALUE klass, ID mid)
{
return callable_method_entry(klass, mid, NULL);
@@ -1434,14 +1970,14 @@ method_entry_resolve_refinement(VALUE klass, ID id, int with_refinement, VALUE *
return me;
}
-MJIT_FUNC_EXPORTED const rb_method_entry_t *
+const rb_method_entry_t *
rb_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
{
return method_entry_resolve_refinement(klass, id, TRUE, defined_class_ptr);
}
static const rb_callable_method_entry_t *
-callable_method_entry_refeinements0(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements,
+callable_method_entry_refinements0(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements,
const rb_callable_method_entry_t *cme)
{
if (cme == NULL || LIKELY(cme->def->type != VM_METHOD_TYPE_REFINED)) {
@@ -1458,10 +1994,10 @@ static const rb_callable_method_entry_t *
callable_method_entry_refinements(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements)
{
const rb_callable_method_entry_t *cme = callable_method_entry(klass, id, defined_class_ptr);
- return callable_method_entry_refeinements0(klass, id, defined_class_ptr, with_refinements, cme);
+ return callable_method_entry_refinements0(klass, id, defined_class_ptr, with_refinements, cme);
}
-MJIT_FUNC_EXPORTED const rb_callable_method_entry_t *
+const rb_callable_method_entry_t *
rb_callable_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
{
return callable_method_entry_refinements(klass, id, defined_class_ptr, true);
@@ -1479,7 +2015,7 @@ rb_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class_ptr
return method_entry_resolve_refinement(klass, id, FALSE, defined_class_ptr);
}
-MJIT_FUNC_EXPORTED const rb_callable_method_entry_t *
+const rb_callable_method_entry_t *
rb_callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
{
VALUE defined_class, *dcp = defined_class_ptr ? defined_class_ptr : &defined_class;
@@ -1506,7 +2042,12 @@ resolve_refined_method(VALUE refinements, const rb_method_entry_t *me, VALUE *de
tmp_me = me->def->body.refined.orig_me;
if (tmp_me) {
- if (defined_class_ptr) *defined_class_ptr = tmp_me->defined_class;
+ if (!tmp_me->defined_class) {
+ VM_ASSERT_TYPE(tmp_me->owner, T_MODULE);
+ }
+ else if (defined_class_ptr) {
+ *defined_class_ptr = tmp_me->defined_class;
+ }
return tmp_me;
}
@@ -1526,7 +2067,6 @@ rb_resolve_refined_method(VALUE refinements, const rb_method_entry_t *me)
return resolve_refined_method(refinements, me, NULL);
}
-MJIT_FUNC_EXPORTED
const rb_callable_method_entry_t *
rb_resolve_refined_method_callable(VALUE refinements, const rb_callable_method_entry_t *me)
{
@@ -1550,15 +2090,15 @@ remove_method(VALUE klass, ID mid)
rb_class_modify_check(klass);
klass = RCLASS_ORIGIN(klass);
- if (mid == object_id || mid == id__send__ || mid == idInitialize) {
- rb_warn("removing `%s' may cause serious problems", rb_id2name(mid));
+ if (mid == object_id || mid == id__id__ || mid == id__send__ || mid == idInitialize) {
+ rb_warn("removing '%s' may cause serious problems", rb_id2name(mid));
}
if (!rb_id_table_lookup(RCLASS_M_TBL(klass), mid, &data) ||
!(me = (rb_method_entry_t *)data) ||
(!me->def || me->def->type == VM_METHOD_TYPE_UNDEF) ||
UNDEFINED_REFINED_METHOD_P(me->def)) {
- rb_name_err_raise("method `%1$s' not defined in %2$s",
+ rb_name_err_raise("method '%1$s' not defined in %2$s",
klass, ID2SYM(mid));
}
@@ -1566,7 +2106,7 @@ remove_method(VALUE klass, ID mid)
rb_clear_method_cache(self, mid);
}
rb_clear_method_cache(klass, mid);
- rb_id_table_delete(RCLASS_M_TBL(klass), mid);
+ rb_id_table_delete(RCLASS_WRITABLE_M_TBL(klass), mid);
rb_vm_check_redefinition_opt_method(me, klass);
@@ -1608,7 +2148,7 @@ rb_mod_remove_method(int argc, VALUE *argv, VALUE mod)
VALUE v = argv[i];
ID id = rb_check_id(&v);
if (!id) {
- rb_name_err_raise("method `%1$s' not defined in %2$s",
+ rb_name_err_raise("method '%1$s' not defined in %2$s",
mod, v);
}
remove_method(mod, id);
@@ -1664,7 +2204,7 @@ method_boundp(VALUE klass, ID id, int ex)
{
const rb_callable_method_entry_t *cme;
- VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
+ VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
if (ex & BOUND_RESPONDS) {
cme = rb_callable_method_entry_with_refinements(klass, id, NULL);
@@ -1780,8 +2320,8 @@ rb_undef(VALUE klass, ID id)
rb_raise(rb_eTypeError, "no class to undef method");
}
rb_class_modify_check(klass);
- if (id == object_id || id == id__send__ || id == idInitialize) {
- rb_warn("undefining `%s' may cause serious problems", rb_id2name(id));
+ if (id == object_id || id == id__id__ || id == id__send__ || id == idInitialize) {
+ rb_warn("undefining '%s' may cause serious problems", rb_id2name(id));
}
me = search_method(klass, id, 0);
@@ -1841,7 +2381,7 @@ rb_undef(VALUE klass, ID id)
*
* In child
* In parent
- * prog.rb:23: undefined method `hello' for #<Child:0x401b3bb4> (NoMethodError)
+ * prog.rb:23: undefined method 'hello' for #<Child:0x401b3bb4> (NoMethodError)
*/
static VALUE
@@ -2075,7 +2615,7 @@ original_method_definition(const rb_method_definition_t *def)
return def;
}
-MJIT_FUNC_EXPORTED int
+int
rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2)
{
d1 = original_method_definition(d1);
@@ -2110,7 +2650,7 @@ rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_defini
case VM_METHOD_TYPE_ALIAS:
break;
}
- rb_bug("rb_method_definition_eq: unsupported type: %d\n", d1->type);
+ rb_bug("rb_method_definition_eq: unsupported type: %d", d1->type);
}
static st_index_t
@@ -2123,7 +2663,7 @@ rb_hash_method_definition(st_index_t hash, const rb_method_definition_t *def)
switch (def->type) {
case VM_METHOD_TYPE_ISEQ:
- return rb_hash_uint(hash, (st_index_t)def->body.iseq.iseqptr);
+ return rb_hash_uint(hash, (st_index_t)def->body.iseq.iseqptr->body);
case VM_METHOD_TYPE_CFUNC:
hash = rb_hash_uint(hash, (st_index_t)def->body.cfunc.func);
return rb_hash_uint(hash, def->body.cfunc.argc);
@@ -2145,7 +2685,7 @@ rb_hash_method_definition(st_index_t hash, const rb_method_definition_t *def)
case VM_METHOD_TYPE_ALIAS:
break; /* unreachable */
}
- rb_bug("rb_hash_method_definition: unsupported method type (%d)\n", def->type);
+ rb_bug("rb_hash_method_definition: unsupported method type (%d)", def->type);
}
st_index_t
@@ -2180,7 +2720,7 @@ rb_alias(VALUE klass, ID alias_name, ID original_name)
if ((!RB_TYPE_P(klass, T_MODULE)) ||
(orig_me = search_method(rb_cObject, original_name, &defined_class),
UNDEFINED_METHOD_ENTRY_P(orig_me))) {
- rb_print_undef(klass, original_name, METHOD_VISI_UNDEF);
+ rb_print_undef(target_klass, original_name, METHOD_VISI_UNDEF);
}
}
@@ -2211,7 +2751,13 @@ rb_alias(VALUE klass, ID alias_name, ID original_name)
alias_me = method_entry_set(target_klass, alias_name, orig_me, visi, orig_me->owner);
RB_OBJ_WRITE(alias_me, &alias_me->owner, target_klass);
- RB_OBJ_WRITE(alias_me, &alias_me->defined_class, orig_me->defined_class);
+
+ if (RB_TYPE_P(target_klass, T_MODULE)) {
+ // defined_class should not be set
+ }
+ else {
+ RB_OBJ_WRITE(alias_me, &alias_me->defined_class, orig_me->defined_class);
+ }
}
}
@@ -2334,21 +2880,53 @@ rb_mod_public(int argc, VALUE *argv, VALUE module)
* protected(method_name, method_name, ...) -> array
* protected(array) -> array
*
- * With no arguments, sets the default visibility for subsequently
- * defined methods to protected. With arguments, sets the named methods
- * to have protected visibility.
- * String arguments are converted to symbols.
- * An Array of Symbols and/or Strings is also accepted.
- * If a single argument is passed, it is returned.
- * If no argument is passed, nil is returned.
- * If multiple arguments are passed, the arguments are returned as an array.
+ * Sets the visibility of a section or of a list of method names as protected.
+ * Accepts no arguments, a splat of method names (symbols or strings) or an
+ * array of method names. Returns the arguments that it received.
+ *
+ * == Important difference between protected in other languages
+ *
+ * Protected methods in Ruby are different from other languages such as Java,
+ * where methods are marked as protected to give access to subclasses. In Ruby,
+ * subclasses <b>already have access to all methods defined in the parent
+ * class</b>, even private ones.
+ *
+ * Marking a method as protected allows <b>different objects of the same
+ * class</b> to call it.
+ *
+ * One use case is for comparison methods, such as <code>==</code>, if we want
+ * to expose a method for comparison between objects of the same class without
+ * making the method public to objects of other classes.
+ *
+ * == Performance considerations
+ *
+ * Protected methods are slower than others because they can't use inline
+ * cache.
*
- * If a method has protected visibility, it is callable only where
- * <code>self</code> of the context is the same as the method.
- * (method definition or instance_eval). This behavior is different from
- * Java's protected method. Usually <code>private</code> should be used.
+ * == Example
*
- * Note that a protected method is slow because it can't use inline cache.
+ * class Account
+ * # Mark balance as protected, so that we can compare between accounts
+ * # without making it public.
+ * attr_reader :balance
+ * protected :balance
+ *
+ * def initialize(balance)
+ * @balance = balance
+ * end
+ *
+ * def >(other)
+ * # The invocation to `other.balance` is allowed because `other` is a
+ * # different object of the same class (Account).
+ * balance > other.balance
+ * end
+ * end
+ *
+ * account1 = Account.new(100)
+ * account2 = Account.new(50)
+ *
+ * account1 > account2 # => true (works)
+ * account1.balance # => NoMethodError (fails because balance is not public)
*
* To show a private method on RDoc, use <code>:doc:</code> instead of this.
*/
@@ -2462,13 +3040,14 @@ rb_mod_ruby2_keywords(int argc, VALUE *argv, VALUE module)
switch (me->def->type) {
case VM_METHOD_TYPE_ISEQ:
if (ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_rest &&
+ !ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_post &&
!ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_kw &&
!ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_kwrest) {
ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.ruby2_keywords = 1;
rb_clear_method_cache(module, name);
}
else {
- rb_warn("Skipping set of ruby2_keywords flag for %s (method accepts keywords or method does not accept argument splat)", rb_id2name(name));
+ rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (method accepts keywords or post arguments or method does not accept argument splat)", QUOTE_ID(name));
}
break;
case VM_METHOD_TYPE_BMETHOD: {
@@ -2481,25 +3060,26 @@ rb_mod_ruby2_keywords(int argc, VALUE *argv, VALUE module)
const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(procval);
const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
if (ISEQ_BODY(iseq)->param.flags.has_rest &&
+ !ISEQ_BODY(iseq)->param.flags.has_post &&
!ISEQ_BODY(iseq)->param.flags.has_kw &&
!ISEQ_BODY(iseq)->param.flags.has_kwrest) {
ISEQ_BODY(iseq)->param.flags.ruby2_keywords = 1;
rb_clear_method_cache(module, name);
}
else {
- rb_warn("Skipping set of ruby2_keywords flag for %s (method accepts keywords or method does not accept argument splat)", rb_id2name(name));
+ rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (method accepts keywords or post arguments or method does not accept argument splat)", QUOTE_ID(name));
}
break;
}
}
/* fallthrough */
default:
- rb_warn("Skipping set of ruby2_keywords flag for %s (method not defined in Ruby)", rb_id2name(name));
+ rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (method not defined in Ruby)", QUOTE_ID(name));
break;
}
}
else {
- rb_warn("Skipping set of ruby2_keywords flag for %s (can only set in method defining module)", rb_id2name(name));
+ rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (can only set in method defining module)", QUOTE_ID(name));
}
}
return Qnil;
@@ -2570,7 +3150,7 @@ rb_mod_private_method(int argc, VALUE *argv, VALUE obj)
static VALUE
top_public(int argc, VALUE *argv, VALUE _)
{
- return rb_mod_public(argc, argv, rb_cObject);
+ return rb_mod_public(argc, argv, rb_top_main_class("public"));
}
/*
@@ -2590,7 +3170,7 @@ top_public(int argc, VALUE *argv, VALUE _)
static VALUE
top_private(int argc, VALUE *argv, VALUE _)
{
- return rb_mod_private(argc, argv, rb_cObject);
+ return rb_mod_private(argc, argv, rb_top_main_class("private"));
}
/*
@@ -2603,7 +3183,7 @@ top_private(int argc, VALUE *argv, VALUE _)
static VALUE
top_ruby2_keywords(int argc, VALUE *argv, VALUE module)
{
- return rb_mod_ruby2_keywords(argc, argv, rb_cObject);
+ return rb_mod_ruby2_keywords(argc, argv, rb_top_main_class("ruby2_keywords"));
}
/*
@@ -2784,8 +3364,8 @@ vm_respond_to(rb_execution_context_t *ec, VALUE klass, VALUE obj, ID id, int pri
rb_category_warn(RB_WARN_CATEGORY_DEPRECATED,
"%"PRIsVALUE"%c""respond_to?(:%"PRIsVALUE") uses"
" the deprecated method signature, which takes one parameter",
- (FL_TEST(klass, FL_SINGLETON) ? obj : klass),
- (FL_TEST(klass, FL_SINGLETON) ? '.' : '#'),
+ (RCLASS_SINGLETON_P(klass) ? obj : klass),
+ (RCLASS_SINGLETON_P(klass) ? '.' : '#'),
QUOTE_ID(id));
if (!NIL_P(location)) {
VALUE path = RARRAY_AREF(location, 0);
@@ -2885,12 +3465,6 @@ obj_respond_to_missing(VALUE obj, VALUE mid, VALUE priv)
}
void
-Init_Method(void)
-{
- //
-}
-
-void
Init_eval_method(void)
{
rb_define_method(rb_mKernel, "respond_to?", obj_respond_to, -1);