summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--common.mk2
-rw-r--r--template/Makefile.in1
-rw-r--r--test/.excludes-zjit/TestRubyOptimization.rb1
-rw-r--r--test/ruby/test_zjit.rb47
-rw-r--r--tool/ruby_vm/views/_insn_leaf_info.erb18
-rw-r--r--tool/ruby_vm/views/insns_info.inc.erb1
-rw-r--r--vm.c2
-rw-r--r--vm_eval.c4
-rw-r--r--zjit.c6
-rw-r--r--zjit.h4
-rw-r--r--zjit/bindgen/src/main.rs1
-rw-r--r--zjit/src/codegen.rs39
-rw-r--r--zjit/src/cruby_bindings.inc.rs1
-rw-r--r--zjit/src/hir.rs150
-rw-r--r--zjit/src/invariants.rs66
15 files changed, 236 insertions, 107 deletions
diff --git a/common.mk b/common.mk
index dac615271e..5cc7886796 100644
--- a/common.mk
+++ b/common.mk
@@ -1133,7 +1133,7 @@ $(srcs_vpath)insns_info.inc: $(tooldir)/ruby_vm/views/insns_info.inc.erb $(inc_c
$(tooldir)/ruby_vm/views/_insn_type_chars.erb $(tooldir)/ruby_vm/views/_insn_name_info.erb \
$(tooldir)/ruby_vm/views/_insn_len_info.erb $(tooldir)/ruby_vm/views/_insn_operand_info.erb \
$(tooldir)/ruby_vm/views/_attributes.erb $(tooldir)/ruby_vm/views/_comptime_insn_stack_increase.erb \
- $(tooldir)/ruby_vm/views/_zjit_helpers.erb
+ $(tooldir)/ruby_vm/views/_zjit_helpers.erb $(tooldir)/ruby_vm/views/_insn_leaf_info.erb
$(srcs_vpath)vmtc.inc: $(tooldir)/ruby_vm/views/vmtc.inc.erb $(inc_common_headers)
$(srcs_vpath)vm.inc: $(tooldir)/ruby_vm/views/vm.inc.erb $(inc_common_headers) \
$(tooldir)/ruby_vm/views/_insn_entry.erb $(tooldir)/ruby_vm/views/_trace_instruction.erb \
diff --git a/template/Makefile.in b/template/Makefile.in
index 39f702b66d..67e0978956 100644
--- a/template/Makefile.in
+++ b/template/Makefile.in
@@ -679,6 +679,7 @@ $(INSNS): $(srcdir)/insns.def vm_opts.h \
$(tooldir)/ruby_vm/views/_comptime_insn_stack_increase.erb \
$(tooldir)/ruby_vm/views/_copyright.erb \
$(tooldir)/ruby_vm/views/_insn_entry.erb \
+ $(tooldir)/ruby_vm/views/_insn_leaf_info.erb \
$(tooldir)/ruby_vm/views/_insn_len_info.erb \
$(tooldir)/ruby_vm/views/_insn_name_info.erb \
$(tooldir)/ruby_vm/views/_insn_operand_info.erb \
diff --git a/test/.excludes-zjit/TestRubyOptimization.rb b/test/.excludes-zjit/TestRubyOptimization.rb
deleted file mode 100644
index 5361ab02c7..0000000000
--- a/test/.excludes-zjit/TestRubyOptimization.rb
+++ /dev/null
@@ -1 +0,0 @@
-exclude(:test_side_effect_in_popped_splat, 'Test fails with ZJIT due to locals invalidation')
diff --git a/test/ruby/test_zjit.rb b/test/ruby/test_zjit.rb
index f430ff8c44..2f19708466 100644
--- a/test/ruby/test_zjit.rb
+++ b/test/ruby/test_zjit.rb
@@ -196,6 +196,50 @@ class TestZJIT < Test::Unit::TestCase
}, insns: [:setglobal]
end
+ def test_getlocal_after_eval
+ assert_compiles '2', %q{
+ def test
+ a = 1
+ eval('a = 2')
+ a
+ end
+ test
+ }
+ end
+
+ def test_getlocal_after_instance_eval
+ assert_compiles '2', %q{
+ def test
+ a = 1
+ instance_eval('a = 2')
+ a
+ end
+ test
+ }
+ end
+
+ def test_getlocal_after_module_eval
+ assert_compiles '2', %q{
+ def test
+ a = 1
+ Kernel.module_eval('a = 2')
+ a
+ end
+ test
+ }
+ end
+
+ def test_getlocal_after_class_eval
+ assert_compiles '2', %q{
+ def test
+ a = 1
+ Kernel.class_eval('a = 2')
+ a
+ end
+ test
+ }
+ end
+
def test_setlocal
assert_compiles '3', %q{
def test(n)
@@ -1453,8 +1497,7 @@ class TestZJIT < Test::Unit::TestCase
end
def test_bop_invalidation
- omit 'Invalidation on BOP redefinition is not implemented yet'
- assert_compiles '', %q{
+ assert_compiles '100', %q{
def test
eval(<<~RUBY)
class Integer
diff --git a/tool/ruby_vm/views/_insn_leaf_info.erb b/tool/ruby_vm/views/_insn_leaf_info.erb
new file mode 100644
index 0000000000..79642b8f66
--- /dev/null
+++ b/tool/ruby_vm/views/_insn_leaf_info.erb
@@ -0,0 +1,18 @@
+MAYBE_UNUSED(static bool insn_leaf(int insn, const VALUE *opes));
+static bool
+insn_leaf(int insn, const VALUE *opes)
+{
+ switch (insn) {
+% RubyVM::Instructions.each do |insn|
+% next if insn.is_a?(RubyVM::TraceInstructions) || insn.is_a?(RubyVM::ZJITInstructions)
+ case <%= insn.bin %>:
+ return attr_leaf_<%= insn.name %>(<%=
+ insn.operands.map.with_index do |ope, i|
+ "(#{ope[:type]})opes[#{i}]"
+ end.join(', ')
+ %>);
+% end
+ default:
+ return false;
+ }
+}
diff --git a/tool/ruby_vm/views/insns_info.inc.erb b/tool/ruby_vm/views/insns_info.inc.erb
index 0a6f71fee3..48dd0e8832 100644
--- a/tool/ruby_vm/views/insns_info.inc.erb
+++ b/tool/ruby_vm/views/insns_info.inc.erb
@@ -21,5 +21,6 @@
<%= render 'sp_inc_helpers' %>
<%= render 'zjit_helpers' %>
<%= render 'attributes' %>
+<%= render 'insn_leaf_info' %>
<%= render 'comptime_insn_stack_increase' %>
#endif
diff --git a/vm.c b/vm.c
index 7f0a477ad6..e97619cc14 100644
--- a/vm.c
+++ b/vm.c
@@ -1061,7 +1061,7 @@ vm_make_env_each(const rb_execution_context_t * const ec, rb_control_frame_t *co
// Invalidate JIT code that assumes cfp->ep == vm_base_ptr(cfp).
if (env->iseq) {
rb_yjit_invalidate_ep_is_bp(env->iseq);
- rb_zjit_invalidate_ep_is_bp(env->iseq);
+ rb_zjit_invalidate_no_ep_escape(env->iseq);
}
return (VALUE)env;
diff --git a/vm_eval.c b/vm_eval.c
index 68b692ac9c..81b6bed725 100644
--- a/vm_eval.c
+++ b/vm_eval.c
@@ -1985,6 +1985,10 @@ eval_string_with_cref(VALUE self, VALUE src, rb_cref_t *cref, VALUE file, int li
block.as.captured.code.iseq = cfp->iseq;
block.type = block_type_iseq;
+ // EP is not escaped to the heap here, but captured and reused by another frame.
+ // ZJIT's locals are incompatible with it unlike YJIT's, so invalidate the ISEQ for ZJIT.
+ rb_zjit_invalidate_no_ep_escape(cfp->iseq);
+
iseq = eval_make_iseq(src, file, line, &block);
if (!iseq) {
rb_exc_raise(ec->errinfo);
diff --git a/zjit.c b/zjit.c
index de5d859ba1..313cced2aa 100644
--- a/zjit.c
+++ b/zjit.c
@@ -333,6 +333,12 @@ rb_zjit_defined_ivar(VALUE obj, ID id, VALUE pushval)
return result ? pushval : Qnil;
}
+bool
+rb_zjit_insn_leaf(int insn, const VALUE *opes)
+{
+ return insn_leaf(insn, opes);
+}
+
// Primitives used by zjit.rb. Don't put other functions below, which wouldn't use them.
VALUE rb_zjit_assert_compiles(rb_execution_context_t *ec, VALUE self);
VALUE rb_zjit_stats(rb_execution_context_t *ec, VALUE self, VALUE target_key);
diff --git a/zjit.h b/zjit.h
index 27a7b8a001..9735cab6d4 100644
--- a/zjit.h
+++ b/zjit.h
@@ -18,7 +18,7 @@ void rb_zjit_profile_insn(uint32_t insn, rb_execution_context_t *ec);
void rb_zjit_profile_enable(const rb_iseq_t *iseq);
void rb_zjit_bop_redefined(int redefined_flag, enum ruby_basic_operators bop);
void rb_zjit_cme_invalidate(const rb_callable_method_entry_t *cme);
-void rb_zjit_invalidate_ep_is_bp(const rb_iseq_t *iseq);
+void rb_zjit_invalidate_no_ep_escape(const rb_iseq_t *iseq);
void rb_zjit_constant_state_changed(ID id);
void rb_zjit_iseq_mark(void *payload);
void rb_zjit_iseq_update_references(void *payload);
@@ -31,7 +31,7 @@ static inline void rb_zjit_profile_insn(uint32_t insn, rb_execution_context_t *e
static inline void rb_zjit_profile_enable(const rb_iseq_t *iseq) {}
static inline void rb_zjit_bop_redefined(int redefined_flag, enum ruby_basic_operators bop) {}
static inline void rb_zjit_cme_invalidate(const rb_callable_method_entry_t *cme) {}
-static inline void rb_zjit_invalidate_ep_is_bp(const rb_iseq_t *iseq) {}
+static inline void rb_zjit_invalidate_no_ep_escape(const rb_iseq_t *iseq) {}
static inline void rb_zjit_constant_state_changed(ID id) {}
static inline void rb_zjit_before_ractor_spawn(void) {}
static inline void rb_zjit_tracing_invalidate_all(void) {}
diff --git a/zjit/bindgen/src/main.rs b/zjit/bindgen/src/main.rs
index 354ea6338f..e3609e237e 100644
--- a/zjit/bindgen/src/main.rs
+++ b/zjit/bindgen/src/main.rs
@@ -358,6 +358,7 @@ fn main() {
.allowlist_function("rb_zjit_print_exception")
.allowlist_function("rb_zjit_singleton_class_p")
.allowlist_function("rb_zjit_defined_ivar")
+ .allowlist_function("rb_zjit_insn_leaf")
.allowlist_type("robject_offsets")
.allowlist_type("rstring_offsets")
.allowlist_var("RB_INVALID_SHAPE_ID")
diff --git a/zjit/src/codegen.rs b/zjit/src/codegen.rs
index 0bb3ea3e93..5c13d278fb 100644
--- a/zjit/src/codegen.rs
+++ b/zjit/src/codegen.rs
@@ -9,7 +9,7 @@ use std::slice;
use crate::asm::Label;
use crate::backend::current::{Reg, ALLOC_REGS};
-use crate::invariants::{track_bop_assumption, track_cme_assumption, track_single_ractor_assumption, track_stable_constant_names_assumption, track_no_trace_point_assumption};
+use crate::invariants::{track_bop_assumption, track_cme_assumption, track_no_ep_escape_assumption, track_no_trace_point_assumption, track_single_ractor_assumption, track_stable_constant_names_assumption};
use crate::gc::{append_gc_offsets, get_or_create_iseq_payload, get_or_create_iseq_payload_ptr, IseqPayload, IseqStatus};
use crate::state::ZJITState;
use crate::stats::{exit_counter_for_compile_error, incr_counter, incr_counter_by, CompileError};
@@ -581,25 +581,24 @@ fn gen_patch_point(jit: &mut JITState, asm: &mut Assembler, invariant: &Invarian
// Remember the current address as a patch point
asm.pos_marker(move |code_ptr, cb| {
+ let side_exit_ptr = cb.resolve_label(label);
match invariant {
Invariant::BOPRedefined { klass, bop } => {
- let side_exit_ptr = cb.resolve_label(label);
track_bop_assumption(klass, bop, code_ptr, side_exit_ptr, payload_ptr);
}
Invariant::MethodRedefined { klass: _, method: _, cme } => {
- let side_exit_ptr = cb.resolve_label(label);
track_cme_assumption(cme, code_ptr, side_exit_ptr, payload_ptr);
}
Invariant::StableConstantNames { idlist } => {
- let side_exit_ptr = cb.resolve_label(label);
track_stable_constant_names_assumption(idlist, code_ptr, side_exit_ptr, payload_ptr);
}
Invariant::NoTracePoint => {
- let side_exit_ptr = cb.resolve_label(label);
track_no_trace_point_assumption(code_ptr, side_exit_ptr, payload_ptr);
}
+ Invariant::NoEPEscape(iseq) => {
+ track_no_ep_escape_assumption(iseq, code_ptr, side_exit_ptr, payload_ptr);
+ }
Invariant::SingleRactorMode => {
- let side_exit_ptr = cb.resolve_label(label);
track_single_ractor_assumption(code_ptr, side_exit_ptr, payload_ptr);
}
}
@@ -871,7 +870,7 @@ fn gen_entry_param(asm: &mut Assembler, iseq: IseqPtr, local_idx: usize) -> lir:
// If the ISEQ does not escape EP, we can optimize the local variable access using the SP register.
if !iseq_entry_escapes_ep(iseq) {
// Create a reference to the local variable using the SP register. We assume EP == BP.
- // TODO: Implement the invalidation in rb_zjit_invalidate_ep_is_bp()
+ // TODO: Implement the invalidation in rb_zjit_invalidate_no_ep_escape()
let offs = -(SIZEOF_VALUE_I32 * (ep_offset + 1));
Opnd::mem(64, SP, offs)
} else {
@@ -966,14 +965,10 @@ fn gen_send(
unsafe extern "C" {
fn rb_vm_send(ec: EcPtr, cfp: CfpPtr, cd: VALUE, blockiseq: IseqPtr) -> VALUE;
}
- let ret = asm.ccall(
+ asm.ccall(
rb_vm_send as *const u8,
vec![EC, CFP, (cd as usize).into(), VALUE(blockiseq as usize).into()],
- );
- // TODO: Add a PatchPoint here that can side-exit the function if the callee messed with
- // the frame's locals
-
- ret
+ )
}
/// Compile a dynamic dispatch without block
@@ -1002,14 +997,10 @@ fn gen_send_without_block(
unsafe extern "C" {
fn rb_vm_opt_send_without_block(ec: EcPtr, cfp: CfpPtr, cd: VALUE) -> VALUE;
}
- let ret = asm.ccall(
+ asm.ccall(
rb_vm_opt_send_without_block as *const u8,
vec![EC, CFP, (cd as usize).into()],
- );
- // TODO(max): Add a PatchPoint here that can side-exit the function if the callee messed with
- // the frame's locals
-
- ret
+ )
}
/// Compile a direct jump to an ISEQ call without block
@@ -1058,8 +1049,6 @@ fn gen_send_without_block_direct(
let iseq_call = IseqCall::new(iseq);
let dummy_ptr = cb.get_write_ptr().raw_ptr(cb);
jit.iseq_calls.push(iseq_call.clone());
- // TODO(max): Add a PatchPoint here that can side-exit the function if the callee messed with
- // the frame's locals
let ret = asm.ccall_with_iseq_call(dummy_ptr, c_args, &iseq_call);
// If a callee side-exits, i.e. returns Qundef, propagate the return value to the caller.
@@ -1099,14 +1088,10 @@ fn gen_invokesuper(
unsafe extern "C" {
fn rb_vm_invokesuper(ec: EcPtr, cfp: CfpPtr, cd: VALUE, blockiseq: IseqPtr) -> VALUE;
}
- let ret = asm.ccall(
+ asm.ccall(
rb_vm_invokesuper as *const u8,
vec![EC, CFP, (cd as usize).into(), VALUE(blockiseq as usize).into()],
- );
- // TODO: Add a PatchPoint here that can side-exit the function if the callee messed with
- // the frame's locals
-
- ret
+ )
}
/// Compile a string resurrection
diff --git a/zjit/src/cruby_bindings.inc.rs b/zjit/src/cruby_bindings.inc.rs
index 071ec28247..0ff09a3c09 100644
--- a/zjit/src/cruby_bindings.inc.rs
+++ b/zjit/src/cruby_bindings.inc.rs
@@ -939,6 +939,7 @@ unsafe extern "C" {
pub fn rb_zjit_print_exception();
pub fn rb_zjit_singleton_class_p(klass: VALUE) -> bool;
pub fn rb_zjit_defined_ivar(obj: VALUE, id: ID, pushval: VALUE) -> VALUE;
+ pub fn rb_zjit_insn_leaf(insn: ::std::os::raw::c_int, opes: *const VALUE) -> bool;
pub fn rb_iseq_encoded_size(iseq: *const rb_iseq_t) -> ::std::os::raw::c_uint;
pub fn rb_iseq_pc_at_idx(iseq: *const rb_iseq_t, insn_idx: u32) -> *mut VALUE;
pub fn rb_iseq_opcode_at_pc(iseq: *const rb_iseq_t, pc: *const VALUE) -> ::std::os::raw::c_int;
diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs
index 759585b8fe..98381fa090 100644
--- a/zjit/src/hir.rs
+++ b/zjit/src/hir.rs
@@ -136,6 +136,8 @@ pub enum Invariant {
},
/// TracePoint is not enabled. If TracePoint is enabled, this is invalidated.
NoTracePoint,
+ /// cfp->ep is not escaped to the heap on the ISEQ
+ NoEPEscape(IseqPtr),
/// There is one ractor running. If a non-root ractor gets spawned, this is invalidated.
SingleRactorMode,
}
@@ -250,6 +252,7 @@ impl<'a> std::fmt::Display for InvariantPrinter<'a> {
write!(f, ")")
}
Invariant::NoTracePoint => write!(f, "NoTracePoint"),
+ Invariant::NoEPEscape(iseq) => write!(f, "NoEPEscape({})", &iseq_name(iseq)),
Invariant::SingleRactorMode => write!(f, "SingleRactorMode"),
}
}
@@ -2705,6 +2708,15 @@ pub struct FrameState {
locals: Vec<InsnId>,
}
+impl FrameState {
+ /// Return itself without locals. Useful for side-exiting without spilling locals.
+ fn without_locals(&self) -> Self {
+ let mut state = self.clone();
+ state.locals.clear();
+ state
+ }
+}
+
/// Print adaptor for [`FrameState`]. See [`PtrPrintMap`].
pub struct FrameStatePrinter<'a> {
inner: &'a FrameState,
@@ -3049,13 +3061,13 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
}
fun.param_types.push(param_type);
}
- queue.push_back((entry_state, fun.entry_block, /*insn_idx=*/0_u32));
+ queue.push_back((entry_state, fun.entry_block, /*insn_idx=*/0_u32, /*local_inval=*/false));
let mut visited = HashSet::new();
let iseq_size = unsafe { get_iseq_encoded_size(iseq) };
let iseq_type = unsafe { get_iseq_body_type(iseq) };
- while let Some((incoming_state, block, mut insn_idx)) = queue.pop_front() {
+ while let Some((incoming_state, block, mut insn_idx, mut local_inval)) = queue.pop_front() {
if visited.contains(&block) { continue; }
visited.insert(block);
let (self_param, mut state) = if insn_idx == 0 {
@@ -3096,12 +3108,16 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
profiles.profile_stack(&exit_state);
}
- unsafe extern "C" {
- fn rb_iseq_event_flags(iseq: IseqPtr, pos: usize) -> rb_event_flag_t;
+ // Flag a future getlocal/setlocal to add a patch point if this instruction is not leaf.
+ if unsafe { !rb_zjit_insn_leaf(opcode as i32, pc.offset(1)) } {
+ local_inval = true;
}
// We add NoTracePoint patch points before every instruction that could be affected by TracePoint.
// This ensures that if TracePoint is enabled, we can exit the generated code as fast as possible.
+ unsafe extern "C" {
+ fn rb_iseq_event_flags(iseq: IseqPtr, pos: usize) -> rb_event_flag_t;
+ }
if unsafe { rb_iseq_event_flags(iseq, insn_idx as usize) } != 0 {
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state.clone() });
fun.push_insn(block, Insn::PatchPoint { invariant: Invariant::NoTracePoint, state: exit_id });
@@ -3283,7 +3299,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
val: test_id,
target: BranchEdge { target, args: state.as_args(self_param) }
});
- queue.push_back((state.clone(), target, target_idx));
+ queue.push_back((state.clone(), target, target_idx, local_inval));
}
YARVINSN_branchif => {
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state });
@@ -3297,7 +3313,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
val: test_id,
target: BranchEdge { target, args: state.as_args(self_param) }
});
- queue.push_back((state.clone(), target, target_idx));
+ queue.push_back((state.clone(), target, target_idx, local_inval));
}
YARVINSN_branchnil => {
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state });
@@ -3311,7 +3327,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
val: test_id,
target: BranchEdge { target, args: state.as_args(self_param) }
});
- queue.push_back((state.clone(), target, target_idx));
+ queue.push_back((state.clone(), target, target_idx, local_inval));
}
YARVINSN_opt_case_dispatch => {
// TODO: Some keys are visible at compile time, so in the future we can
@@ -3328,7 +3344,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
// Skip the fast-path and go straight to the fallback code. We will let the
// optimizer take care of the converting Class#new->alloc+initialize instead.
fun.push_insn(block, Insn::Jump(BranchEdge { target, args: state.as_args(self_param) }));
- queue.push_back((state.clone(), target, target_idx));
+ queue.push_back((state.clone(), target, target_idx, local_inval));
break; // Don't enqueue the next block as a successor
}
YARVINSN_jump => {
@@ -3340,7 +3356,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
let _branch_id = fun.push_insn(block, Insn::Jump(
BranchEdge { target, args: state.as_args(self_param) }
));
- queue.push_back((state.clone(), target, target_idx));
+ queue.push_back((state.clone(), target, target_idx, local_inval));
break; // Don't enqueue the next block as a successor
}
YARVINSN_getlocal_WC_0 => {
@@ -3348,27 +3364,34 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
if iseq_type == ISEQ_TYPE_EVAL || has_blockiseq {
// On eval, the locals are always on the heap, so read the local using EP.
let val = fun.push_insn(block, Insn::GetLocal { ep_offset, level: 0 });
- state.setlocal(ep_offset, val);
+ state.setlocal(ep_offset, val); // remember the result to spill on side-exits
state.stack_push(val);
} else {
- // TODO(alan): This implementation doesn't read from EP, so will miss writes
- // from nested ISeqs. This will need to be amended when we add codegen for
- // Send.
+ if local_inval {
+ // If there has been any non-leaf call since JIT entry or the last patch point,
+ // add a patch point to make sure locals have not been escaped.
+ let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state.without_locals() }); // skip spilling locals
+ fun.push_insn(block, Insn::PatchPoint { invariant: Invariant::NoEPEscape(iseq), state: exit_id });
+ local_inval = false;
+ }
let val = state.getlocal(ep_offset);
state.stack_push(val);
}
}
YARVINSN_setlocal_WC_0 => {
- // TODO(alan): This implementation doesn't write to EP, where nested scopes
- // read, so they'll miss these writes. This will need to be amended when we
- // add codegen for Send.
let ep_offset = get_arg(pc, 0).as_u32();
let val = state.stack_pop()?;
- state.setlocal(ep_offset, val);
if iseq_type == ISEQ_TYPE_EVAL || has_blockiseq {
// On eval, the locals are always on the heap, so write the local using EP.
fun.push_insn(block, Insn::SetLocal { val, ep_offset, level: 0 });
+ } else if local_inval {
+ // If there has been any non-leaf call since JIT entry or the last patch point,
+ // add a patch point to make sure locals have not been escaped.
+ let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state.without_locals() }); // skip spilling locals
+ fun.push_insn(block, Insn::PatchPoint { invariant: Invariant::NoEPEscape(iseq), state: exit_id });
+ local_inval = false;
}
+ state.setlocal(ep_offset, val);
}
YARVINSN_getlocal_WC_1 => {
let ep_offset = get_arg(pc, 0).as_u32();
@@ -3731,7 +3754,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
if insn_idx_to_block.contains_key(&insn_idx) {
let target = insn_idx_to_block[&insn_idx];
fun.push_insn(block, Insn::Jump(BranchEdge { target, args: state.as_args(self_param) }));
- queue.push_back((state, target, insn_idx));
+ queue.push_back((state, target, insn_idx, local_inval));
break; // End the block
}
}
@@ -4656,14 +4679,17 @@ mod tests {
v8:CBool = Test v1
IfFalse v8, bb1(v0, v1, v2)
v12:Fixnum[3] = Const Value(3)
+ PatchPoint NoEPEscape(test)
CheckInterrupts
Jump bb2(v0, v1, v12)
- bb1(v16:BasicObject, v17:BasicObject, v18:NilClass):
- v22:Fixnum[4] = Const Value(4)
- Jump bb2(v16, v17, v22)
- bb2(v24:BasicObject, v25:BasicObject, v26:Fixnum):
+ bb1(v18:BasicObject, v19:BasicObject, v20:NilClass):
+ v24:Fixnum[4] = Const Value(4)
+ PatchPoint NoEPEscape(test)
+ Jump bb2(v18, v19, v24)
+ bb2(v28:BasicObject, v29:BasicObject, v30:Fixnum):
+ PatchPoint NoEPEscape(test)
CheckInterrupts
- Return v26
+ Return v30
");
}
@@ -4851,20 +4877,23 @@ mod tests {
CheckInterrupts
Jump bb2(v0, v6, v9)
bb2(v15:BasicObject, v16:BasicObject, v17:BasicObject):
- v19:Fixnum[0] = Const Value(0)
- v23:BasicObject = SendWithoutBlock v17, :>, v19
+ PatchPoint NoEPEscape(test)
+ v21:Fixnum[0] = Const Value(0)
+ v25:BasicObject = SendWithoutBlock v17, :>, v21
CheckInterrupts
- v26:CBool = Test v23
- IfTrue v26, bb1(v15, v16, v17)
- v28:NilClass = Const Value(nil)
+ v28:CBool = Test v25
+ IfTrue v28, bb1(v15, v16, v17)
+ v30:NilClass = Const Value(nil)
+ PatchPoint NoEPEscape(test)
CheckInterrupts
Return v16
- bb1(v36:BasicObject, v37:BasicObject, v38:BasicObject):
- v42:Fixnum[1] = Const Value(1)
- v46:BasicObject = SendWithoutBlock v37, :+, v42
- v49:Fixnum[1] = Const Value(1)
- v53:BasicObject = SendWithoutBlock v38, :-, v49
- Jump bb2(v36, v46, v53)
+ bb1(v40:BasicObject, v41:BasicObject, v42:BasicObject):
+ PatchPoint NoEPEscape(test)
+ v48:Fixnum[1] = Const Value(1)
+ v52:BasicObject = SendWithoutBlock v41, :+, v48
+ v55:Fixnum[1] = Const Value(1)
+ v59:BasicObject = SendWithoutBlock v42, :-, v55
+ Jump bb2(v40, v52, v59)
");
}
@@ -5121,11 +5150,12 @@ mod tests {
bb0(v0:BasicObject, v1:BasicObject):
v5:Class[VMFrozenCore] = Const Value(VALUE(0x1000))
v7:HashExact = NewHash
- v9:BasicObject = SendWithoutBlock v5, :core#hash_merge_kwd, v7, v1
- v10:Class[VMFrozenCore] = Const Value(VALUE(0x1000))
- v11:StaticSymbol[:b] = Const Value(VALUE(0x1008))
- v12:Fixnum[1] = Const Value(1)
- v14:BasicObject = SendWithoutBlock v10, :core#hash_merge_ptr, v9, v11, v12
+ PatchPoint NoEPEscape(test)
+ v11:BasicObject = SendWithoutBlock v5, :core#hash_merge_kwd, v7, v1
+ v12:Class[VMFrozenCore] = Const Value(VALUE(0x1000))
+ v13:StaticSymbol[:b] = Const Value(VALUE(0x1008))
+ v14:Fixnum[1] = Const Value(1)
+ v16:BasicObject = SendWithoutBlock v12, :core#hash_merge_ptr, v11, v13, v14
SideExit UnhandledCallType(KwSplatMut)
");
}
@@ -5639,9 +5669,10 @@ mod tests {
v12:BasicObject = GetIvar v0, :@b
PatchPoint SingleRactorMode
v15:BasicObject = GetIvar v0, :@c
- v19:ArrayExact = NewArray v9, v12, v15
+ PatchPoint NoEPEscape(reverse_odd)
+ v21:ArrayExact = NewArray v9, v12, v15
CheckInterrupts
- Return v19
+ Return v21
");
assert_contains_opcode("reverse_even", YARVINSN_opt_reverse);
assert_snapshot!(hir_string("reverse_even"), @r"
@@ -5659,9 +5690,10 @@ mod tests {
v16:BasicObject = GetIvar v0, :@c
PatchPoint SingleRactorMode
v19:BasicObject = GetIvar v0, :@d
- v23:ArrayExact = NewArray v10, v13, v16, v19
+ PatchPoint NoEPEscape(reverse_even)
+ v25:ArrayExact = NewArray v10, v13, v16, v19
CheckInterrupts
- Return v23
+ Return v25
");
}
@@ -5724,6 +5756,7 @@ mod tests {
bb0(v0:BasicObject, v1:BasicObject, v2:BasicObject, v3:BasicObject, v4:BasicObject):
v5:NilClass = Const Value(nil)
v10:BasicObject = InvokeBuiltin dir_s_open, v0, v1, v2
+ PatchPoint NoEPEscape(open)
SideExit UnhandledYARVInsn(getblockparamproxy)
");
}
@@ -6877,9 +6910,10 @@ mod opt_tests {
v8:StringExact[VALUE(0x1008)] = Const Value(VALUE(0x1008))
v10:StringExact = StringCopy v8
v12:RangeExact = NewRange v6 NewRangeInclusive v10
- v15:Fixnum[0] = Const Value(0)
+ PatchPoint NoEPEscape(test)
+ v17:Fixnum[0] = Const Value(0)
CheckInterrupts
- Return v15
+ Return v17
");
}
@@ -6916,9 +6950,10 @@ mod opt_tests {
bb0(v0:BasicObject):
v1:NilClass = Const Value(nil)
v6:HashExact = NewHash
- v9:Fixnum[5] = Const Value(5)
+ PatchPoint NoEPEscape(test)
+ v11:Fixnum[5] = Const Value(5)
CheckInterrupts
- Return v9
+ Return v11
");
}
@@ -6937,9 +6972,10 @@ mod opt_tests {
v7:StaticSymbol[:a] = Const Value(VALUE(0x1000))
v8:StaticSymbol[:b] = Const Value(VALUE(0x1008))
v10:HashExact = NewHash v7: v1, v8: v2
- v13:Fixnum[5] = Const Value(5)
+ PatchPoint NoEPEscape(test)
+ v15:Fixnum[5] = Const Value(5)
CheckInterrupts
- Return v13
+ Return v15
");
}
@@ -7324,10 +7360,11 @@ mod opt_tests {
v1:NilClass = Const Value(nil)
v6:ArrayExact = NewArray
PatchPoint MethodRedefined(Array@0x1000, itself@0x1008, cme:0x1010)
- v18:BasicObject = CCall itself@0x1038, v6
- v11:Fixnum[1] = Const Value(1)
+ v20:BasicObject = CCall itself@0x1038, v6
+ PatchPoint NoEPEscape(test)
+ v13:Fixnum[1] = Const Value(1)
CheckInterrupts
- Return v11
+ Return v13
");
}
@@ -7347,12 +7384,13 @@ mod opt_tests {
v1:NilClass = Const Value(nil)
PatchPoint SingleRactorMode
PatchPoint StableConstantNames(0x1000, M)
- v19:ModuleExact[VALUE(0x1008)] = Const Value(VALUE(0x1008))
+ v21:ModuleExact[VALUE(0x1008)] = Const Value(VALUE(0x1008))
PatchPoint MethodRedefined(Module@0x1010, name@0x1018, cme:0x1020)
- v21:StringExact|NilClass = CCall name@0x1048, v19
- v11:Fixnum[1] = Const Value(1)
+ v23:StringExact|NilClass = CCall name@0x1048, v21
+ PatchPoint NoEPEscape(test)
+ v13:Fixnum[1] = Const Value(1)
CheckInterrupts
- Return v11
+ Return v13
");
}
diff --git a/zjit/src/invariants.rs b/zjit/src/invariants.rs
index 0b44fd4624..80948c696e 100644
--- a/zjit/src/invariants.rs
+++ b/zjit/src/invariants.rs
@@ -2,7 +2,7 @@
use std::{collections::{HashMap, HashSet}, mem};
-use crate::{backend::lir::{asm_comment, Assembler}, cruby::{rb_callable_method_entry_t, rb_gc_location, ruby_basic_operators, src_loc, with_vm_lock, IseqPtr, RedefinitionFlag, ID, VALUE}, gc::IseqPayload, hir::Invariant, options::debug, state::{zjit_enabled_p, ZJITState}, virtualmem::CodePtr};
+use crate::{backend::lir::{asm_comment, Assembler}, cruby::{iseq_name, rb_callable_method_entry_t, rb_gc_location, ruby_basic_operators, src_loc, with_vm_lock, IseqPtr, RedefinitionFlag, ID, VALUE}, gc::IseqPayload, hir::Invariant, options::debug, state::{zjit_enabled_p, ZJITState}, virtualmem::CodePtr};
use crate::stats::with_time_stat;
use crate::stats::Counter::invalidation_time_ns;
use crate::gc::remove_gc_offsets;
@@ -42,8 +42,8 @@ pub struct Invariants {
/// Set of ISEQs that are known to escape EP
ep_escape_iseqs: HashSet<IseqPtr>,
- /// Set of ISEQs whose JIT code assumes that it doesn't escape EP
- no_ep_escape_iseqs: HashSet<IseqPtr>,
+ /// Map from ISEQ that's assumed to not escape EP to a set of patch points
+ no_ep_escape_iseq_patch_points: HashMap<IseqPtr, HashSet<PatchPoint>>,
/// Map from a class and its associated basic operator to a set of patch points
bop_patch_points: HashMap<(RedefinitionFlag, ruby_basic_operators), HashSet<PatchPoint>>,
@@ -64,15 +64,15 @@ pub struct Invariants {
impl Invariants {
/// Update object references in Invariants
pub fn update_references(&mut self) {
- Self::update_iseq_references(&mut self.ep_escape_iseqs);
- Self::update_iseq_references(&mut self.no_ep_escape_iseqs);
+ self.update_ep_escape_iseqs();
+ self.update_no_ep_escape_iseq_patch_points();
}
- /// Update ISEQ references in a given `HashSet<IseqPtr>`
- fn update_iseq_references(iseqs: &mut HashSet<IseqPtr>) {
- let mut moved: Vec<IseqPtr> = Vec::with_capacity(iseqs.len());
+ /// Update ISEQ references in Invariants::ep_escape_iseqs
+ fn update_ep_escape_iseqs(&mut self) {
+ let mut moved: Vec<IseqPtr> = Vec::with_capacity(self.ep_escape_iseqs.len());
- iseqs.retain(|&old_iseq| {
+ self.ep_escape_iseqs.retain(|&old_iseq| {
let new_iseq = unsafe { rb_gc_location(VALUE(old_iseq as usize)) }.0 as IseqPtr;
if old_iseq != new_iseq {
moved.push(new_iseq);
@@ -81,7 +81,26 @@ impl Invariants {
});
for new_iseq in moved {
- iseqs.insert(new_iseq);
+ self.ep_escape_iseqs.insert(new_iseq);
+ }
+ }
+
+ /// Update ISEQ references in Invariants::no_ep_escape_iseq_patch_points
+ fn update_no_ep_escape_iseq_patch_points(&mut self) {
+ let mut moved: Vec<(IseqPtr, HashSet<PatchPoint>)> = Vec::with_capacity(self.no_ep_escape_iseq_patch_points.len());
+ let iseqs: Vec<IseqPtr> = self.no_ep_escape_iseq_patch_points.keys().cloned().collect();
+
+ for old_iseq in iseqs {
+ let new_iseq = unsafe { rb_gc_location(VALUE(old_iseq as usize)) }.0 as IseqPtr;
+ if old_iseq != new_iseq {
+ let patch_points = self.no_ep_escape_iseq_patch_points.remove(&old_iseq).unwrap();
+ // Do not insert patch points to no_ep_escape_iseq_patch_points yet to avoid corrupting keys that had a different ISEQ
+ moved.push((new_iseq, patch_points));
+ }
+ }
+
+ for (new_iseq, patch_points) in moved {
+ self.no_ep_escape_iseq_patch_points.insert(new_iseq, patch_points);
}
}
}
@@ -114,7 +133,7 @@ pub extern "C" fn rb_zjit_bop_redefined(klass: RedefinitionFlag, bop: ruby_basic
/// Invalidate blocks for a given ISEQ that assumes environment pointer is
/// equal to base pointer.
#[unsafe(no_mangle)]
-pub extern "C" fn rb_zjit_invalidate_ep_is_bp(iseq: IseqPtr) {
+pub extern "C" fn rb_zjit_invalidate_no_ep_escape(iseq: IseqPtr) {
// Skip tracking EP escapes on boot. We don't need to invalidate anything during boot.
if !ZJITState::has_instance() {
return;
@@ -125,17 +144,30 @@ pub extern "C" fn rb_zjit_invalidate_ep_is_bp(iseq: IseqPtr) {
invariants.ep_escape_iseqs.insert(iseq);
// If the ISEQ has been compiled assuming it doesn't escape EP, invalidate the JIT code.
- // Note: Nobody calls track_no_ep_escape_assumption() for now, so this is always false.
- // TODO: Add a PatchPoint that assumes EP == BP in HIR and invalidate it here.
- if invariants.no_ep_escape_iseqs.contains(&iseq) {
- unimplemented!("Invalidation on EP escape is not implemented yet");
+ if let Some(patch_points) = invariants.no_ep_escape_iseq_patch_points.get(&iseq) {
+ debug!("EP is escaped: {}", iseq_name(iseq));
+
+ // Invalidate the patch points for this ISEQ
+ let cb = ZJITState::get_code_block();
+ compile_patch_points!(cb, patch_points, "EP is escaped: {}", iseq_name(iseq));
+
+ cb.mark_all_executable();
}
}
/// Track that JIT code for a ISEQ will assume that base pointer is equal to environment pointer.
-pub fn track_no_ep_escape_assumption(iseq: IseqPtr) {
+pub fn track_no_ep_escape_assumption(
+ iseq: IseqPtr,
+ patch_point_ptr: CodePtr,
+ side_exit_ptr: CodePtr,
+ payload_ptr: *mut IseqPayload,
+) {
let invariants = ZJITState::get_invariants();
- invariants.no_ep_escape_iseqs.insert(iseq);
+ invariants.no_ep_escape_iseq_patch_points.entry(iseq).or_default().insert(PatchPoint {
+ patch_point_ptr,
+ side_exit_ptr,
+ payload_ptr,
+ });
}
/// Returns true if a given ISEQ has previously escaped environment pointer.