summaryrefslogtreecommitdiff
path: root/zjit/src
diff options
context:
space:
mode:
Diffstat (limited to 'zjit/src')
-rw-r--r--zjit/src/codegen.rs34
-rw-r--r--zjit/src/hir.rs20
2 files changed, 48 insertions, 6 deletions
diff --git a/zjit/src/codegen.rs b/zjit/src/codegen.rs
index 6c55b3fb5d..3a7194ec39 100644
--- a/zjit/src/codegen.rs
+++ b/zjit/src/codegen.rs
@@ -355,6 +355,7 @@ fn gen_insn(cb: &mut CodeBlock, jit: &mut JITState, asm: &mut Assembler, functio
Insn::Jump(branch) => no_output!(gen_jump(jit, asm, branch)),
Insn::IfTrue { val, target } => no_output!(gen_if_true(jit, asm, opnd!(val), target)),
Insn::IfFalse { val, target } => no_output!(gen_if_false(jit, asm, opnd!(val), target)),
+ &Insn::Send { cd, blockiseq, state, .. } => gen_send(jit, asm, cd, blockiseq, &function.frame_state(state)),
Insn::SendWithoutBlock { cd, state, .. } => gen_send_without_block(jit, asm, *cd, &function.frame_state(*state)),
// Give up SendWithoutBlockDirect for 6+ args since asm.ccall() doesn't support it.
Insn::SendWithoutBlockDirect { cd, state, args, .. } if args.len() + 1 > C_ARG_OPNDS.len() => // +1 for self
@@ -407,7 +408,6 @@ fn gen_insn(cb: &mut CodeBlock, jit: &mut JITState, asm: &mut Assembler, functio
&Insn::ArrayMax { state, .. }
| &Insn::FixnumDiv { state, .. }
| &Insn::FixnumMod { state, .. }
- | &Insn::Send { state, .. }
| &Insn::Throw { state, .. }
=> return Err(state),
};
@@ -883,6 +883,36 @@ fn gen_if_false(jit: &mut JITState, asm: &mut Assembler, val: lir::Opnd, branch:
asm.write_label(if_true);
}
+/// Compile a dynamic dispatch with block
+fn gen_send(
+ jit: &mut JITState,
+ asm: &mut Assembler,
+ cd: *const rb_call_data,
+ blockiseq: IseqPtr,
+ state: &FrameState,
+) -> lir::Opnd {
+ // Save PC and SP
+ gen_save_pc(asm, state);
+ gen_save_sp(asm, state.stack().len());
+
+ // Spill locals and stack
+ gen_spill_locals(jit, asm, state);
+ gen_spill_stack(jit, asm, state);
+
+ asm_comment!(asm, "call #{} with dynamic dispatch", ruby_call_method_name(cd));
+ unsafe extern "C" {
+ fn rb_vm_send(ec: EcPtr, cfp: CfpPtr, cd: VALUE, blockiseq: IseqPtr) -> VALUE;
+ }
+ let ret = asm.ccall(
+ rb_vm_send as *const u8,
+ vec![EC, CFP, (cd as usize).into(), VALUE(blockiseq as usize).into()],
+ );
+ // TODO: Add a PatchPoint here that can side-exit the function if the callee messed with
+ // the frame's locals
+
+ ret
+}
+
/// Compile a dynamic dispatch without block
fn gen_send_without_block(
jit: &mut JITState,
@@ -1383,7 +1413,7 @@ fn param_opnd(idx: usize) -> Opnd {
}
/// Inverse of ep_offset_to_local_idx(). See ep_offset_to_local_idx() for details.
-fn local_idx_to_ep_offset(iseq: IseqPtr, local_idx: usize) -> i32 {
+pub fn local_idx_to_ep_offset(iseq: IseqPtr, local_idx: usize) -> i32 {
let local_size = unsafe { get_iseq_body_local_table_size(iseq) };
local_size_and_idx_to_ep_offset(local_size as usize, local_idx)
}
diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs
index ee90032ee8..fdefe23e12 100644
--- a/zjit/src/hir.rs
+++ b/zjit/src/hir.rs
@@ -4,7 +4,7 @@
#![allow(non_upper_case_globals)]
use crate::{
- cast::IntoUsize, cruby::*, gc::{get_or_create_iseq_payload, IseqPayload}, options::{get_option, DumpHIR}, state::ZJITState
+ cast::IntoUsize, codegen::local_idx_to_ep_offset, cruby::*, gc::{get_or_create_iseq_payload, IseqPayload}, options::{get_option, DumpHIR}, state::ZJITState
};
use std::{
cell::RefCell, collections::{HashMap, HashSet, VecDeque}, ffi::{c_int, c_void, CStr}, fmt::Display, mem::{align_of, size_of}, ptr, slice::Iter
@@ -3138,7 +3138,9 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
let ep_offset = get_arg(pc, 0).as_u32();
if iseq_type == ISEQ_TYPE_EVAL || has_send {
// On eval, the locals are always on the heap, so read the local using EP.
- state.stack_push(fun.push_insn(block, Insn::GetLocal { ep_offset, level: 0 }));
+ let val = fun.push_insn(block, Insn::GetLocal { ep_offset, level: 0 });
+ state.setlocal(ep_offset, val);
+ state.stack_push(val);
} else {
// TODO(alan): This implementation doesn't read from EP, so will miss writes
// from nested ISeqs. This will need to be amended when we add codegen for
@@ -3324,6 +3326,15 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state });
let send = fun.push_insn(block, Insn::Send { self_val: recv, cd, blockiseq, args, state: exit_id });
state.stack_push(send);
+
+ // Reload locals that may have been modified by the blockiseq.
+ // TODO: Avoid reloading locals that are not referenced by the blockiseq
+ // or not used after this. Max thinks we could eventually DCE them.
+ for local_idx in 0..state.locals.len() {
+ let ep_offset = local_idx_to_ep_offset(iseq, local_idx) as u32;
+ let val = fun.push_insn(block, Insn::GetLocal { ep_offset, level: 0 });
+ state.setlocal(ep_offset, val);
+ }
}
YARVINSN_getglobal => {
let id = ID(get_arg(pc, 0).as_u64());
@@ -4698,6 +4709,7 @@ mod tests {
bb0(v0:BasicObject, v1:BasicObject):
v3:BasicObject = GetLocal l0, EP@3
v5:BasicObject = Send v3, 0x1000, :each
+ v6:BasicObject = GetLocal l0, EP@3
CheckInterrupts
Return v5
");
@@ -7246,9 +7258,9 @@ mod opt_tests {
v3:Fixnum[1] = Const Value(1)
SetLocal l0, EP@3, v3
v6:BasicObject = Send v0, 0x1000, :foo
- v7:BasicObject = GetLocal l0, EP@3
+ v8:BasicObject = GetLocal l0, EP@3
CheckInterrupts
- Return v7
+ Return v8
");
}