summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNobuyoshi Nakada <nobu@ruby-lang.org>2022-10-12 19:38:29 +0900
committerAaron Patterson <aaron.patterson@gmail.com>2022-10-12 09:14:55 -0700
commitb55e3b842a8cf4349914b05cebf00ab53024ae69 (patch)
tree9c93b5338a892bdb927df4734d8a2fffb1d01c3b
parent80da7250c5d7c862e3c1e1431683a1f1211a4d9c (diff)
Initialize shape attr index also in non-markable CC
Notes
Notes: Merged: https://github.com/ruby/ruby/pull/6532
-rw-r--r--vm_callinfo.h14
-rw-r--r--vm_insnhelper.c24
2 files changed, 14 insertions, 24 deletions
diff --git a/vm_callinfo.h b/vm_callinfo.h
index 3fb0fa8ca7..e59f25ca59 100644
--- a/vm_callinfo.h
+++ b/vm_callinfo.h
@@ -301,12 +301,12 @@ extern const struct rb_callcache *rb_vm_empty_cc_for_super(void);
#define vm_cc_empty() rb_vm_empty_cc()
+static inline void vm_cc_attr_index_set(const struct rb_callcache *cc, attr_index_t index, shape_id_t dest_shape_id);
+
static inline void
vm_cc_attr_index_initialize(const struct rb_callcache *cc, shape_id_t shape_id)
{
- VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
- VM_ASSERT(cc != vm_cc_empty());
- *(uintptr_t *)&cc->aux_.attr.value = (uintptr_t)(shape_id) << SHAPE_FLAG_SHIFT;
+ vm_cc_attr_index_set(cc, (attr_index_t)-1, shape_id);
}
static inline const struct rb_callcache *
@@ -385,7 +385,6 @@ vm_cc_attr_index_dest_shape_id(const struct rb_callcache *cc)
static inline void
vm_cc_atomic_shape_and_index(const struct rb_callcache *cc, shape_id_t * shape_id, attr_index_t * index)
{
- VM_ASSERT(vm_cc_markable(cc));
uintptr_t cache_value = cc->aux_.attr.value; // Atomically read 64 bits
*shape_id = (shape_id_t)(cache_value >> SHAPE_FLAG_SHIFT);
*index = (attr_index_t)(cache_value & SHAPE_FLAG_MASK) - 1;
@@ -451,9 +450,14 @@ vm_cc_call_set(const struct rb_callcache *cc, vm_call_handler call)
static inline void
vm_cc_attr_index_set(const struct rb_callcache *cc, attr_index_t index, shape_id_t dest_shape_id)
{
+ uintptr_t *attr_value = (uintptr_t *)&cc->aux_.attr.value;
+ if (!vm_cc_markable(cc)) {
+ *attr_value = (uintptr_t)INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT;
+ return;
+ }
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc != vm_cc_empty());
- *(uintptr_t *)&cc->aux_.attr.value = (index + 1) | ((uintptr_t)(dest_shape_id) << SHAPE_FLAG_SHIFT);
+ *attr_value = (index + 1) | ((uintptr_t)(dest_shape_id) << SHAPE_FLAG_SHIFT);
}
static inline void
diff --git a/vm_insnhelper.c b/vm_insnhelper.c
index 0a62b0d86c..3f1337c36c 100644
--- a/vm_insnhelper.c
+++ b/vm_insnhelper.c
@@ -1096,9 +1096,7 @@ static inline void
fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id)
{
if (is_attr) {
- if (vm_cc_markable(cc)) {
- vm_cc_attr_index_set(cc, index, shape_id);
- }
+ vm_cc_attr_index_set(cc, index, shape_id);
}
else {
vm_ic_attr_index_set(iseq, ic, index, shape_id);
@@ -1161,13 +1159,7 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
attr_index_t index;
if (is_attr) {
- if (vm_cc_markable(cc)) {
- vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
- }
- else {
- cached_id = INVALID_SHAPE_ID;
- index = ATTR_INDEX_NOT_SET;
- }
+ vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
}
else {
vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
@@ -1214,9 +1206,7 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
}
else {
if (is_attr) {
- if (vm_cc_markable(cc)) {
- vm_cc_attr_index_initialize(cc, shape_id);
- }
+ vm_cc_attr_index_initialize(cc, shape_id);
}
else {
vm_ic_attr_index_initialize(ic, shape_id);
@@ -1248,9 +1238,7 @@ populate_cache(attr_index_t index, shape_id_t next_shape_id, ID id, const rb_ise
{
// Cache population code
if (is_attr) {
- if (vm_cc_markable(cc)) {
- vm_cc_attr_index_set(cc, index, next_shape_id);
- }
+ vm_cc_attr_index_set(cc, index, next_shape_id);
}
else {
vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
@@ -3927,9 +3915,7 @@ vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, st
CALLER_SETUP_ARG(cfp, calling, ci);
CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
rb_check_arity(calling->argc, 0, 0);
- if (vm_cc_markable(cc)) {
- vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
- }
+ vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
VM_CALL_METHOD_ATTR(v,
vm_call_ivar(ec, cfp, calling),