summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTakashi Kokubun <takashi.kokubun@shopify.com>2025-09-30 08:15:06 -0700
committerGitHub <noreply@github.com>2025-09-30 08:15:06 -0700
commitd016595387069677c6b992dffe9322f67dc9bc73 (patch)
tree792812ba332a7ae2700b04d0540592910979c931
parentd8c8623f50af8f5324e1679ff95b1a2071c0c61e (diff)
ZJIT: Unify fallback counters for send-ish insns (#14676)
-rw-r--r--zjit.rb10
-rw-r--r--zjit/src/codegen.rs62
-rw-r--r--zjit/src/hir.rs119
-rw-r--r--zjit/src/state.rs15
-rw-r--r--zjit/src/stats.rs80
5 files changed, 209 insertions, 77 deletions
diff --git a/zjit.rb b/zjit.rb
index a46802553c..4438a10c75 100644
--- a/zjit.rb
+++ b/zjit.rb
@@ -39,12 +39,14 @@ class << RubyVM::ZJIT
buf = +"***ZJIT: Printing ZJIT statistics on exit***\n"
stats = self.stats
- # Show non-exit counters
- print_counters_with_prefix(prefix: 'dynamic_send_type_', prompt: 'dynamic send types', buf:, stats:, limit: 20)
- print_counters_with_prefix(prefix: 'unspecialized_def_type_', prompt: 'send fallback unspecialized def_types', buf:, stats:, limit: 20)
- print_counters_with_prefix(prefix: 'send_fallback_', prompt: 'dynamic send types', buf:, stats:, limit: 20)
+ # Show counters independent from exit_* or dynamic_send_*
print_counters_with_prefix(prefix: 'not_optimized_cfuncs_', prompt: 'unoptimized sends to C functions', buf:, stats:, limit: 20)
+ # Show fallback counters, ordered by the typical amount of fallbacks for the prefix at the time
+ print_counters_with_prefix(prefix: 'unspecialized_def_type_', prompt: 'not optimized method types', buf:, stats:, limit: 20)
+ print_counters_with_prefix(prefix: 'not_optimized_yarv_insn_', prompt: 'not optimized instructions', buf:, stats:, limit: 20)
+ print_counters_with_prefix(prefix: 'send_fallback_', prompt: 'send fallback reasons', buf:, stats:, limit: 20)
+
# Show exit counters, ordered by the typical amount of exits for the prefix at the time
print_counters_with_prefix(prefix: 'unhandled_yarv_insn_', prompt: 'unhandled YARV insns', buf:, stats:, limit: 20)
print_counters_with_prefix(prefix: 'compile_error_', prompt: 'compile error reasons', buf:, stats:, limit: 20)
diff --git a/zjit/src/codegen.rs b/zjit/src/codegen.rs
index 7676d7eed4..c62fef73de 100644
--- a/zjit/src/codegen.rs
+++ b/zjit/src/codegen.rs
@@ -12,12 +12,12 @@ use crate::backend::current::{Reg, ALLOC_REGS};
use crate::invariants::{track_bop_assumption, track_cme_assumption, track_no_ep_escape_assumption, track_no_trace_point_assumption, track_single_ractor_assumption, track_stable_constant_names_assumption};
use crate::gc::{append_gc_offsets, get_or_create_iseq_payload, get_or_create_iseq_payload_ptr, IseqCodePtrs, IseqPayload, IseqStatus};
use crate::state::ZJITState;
-use crate::stats::{exit_counter_for_compile_error, incr_counter, incr_counter_by, CompileError};
-use crate::stats::{counter_ptr, with_time_stat, Counter, send_fallback_counter, Counter::{compile_time_ns, exit_compile_error}};
+use crate::stats::{send_fallback_counter, exit_counter_for_compile_error, incr_counter, incr_counter_by, send_fallback_counter_for_method_type, send_fallback_counter_ptr_for_opcode, CompileError};
+use crate::stats::{counter_ptr, with_time_stat, Counter, Counter::{compile_time_ns, exit_compile_error}};
use crate::{asm::CodeBlock, cruby::*, options::debug, virtualmem::CodePtr};
use crate::backend::lir::{self, asm_comment, asm_ccall, Assembler, Opnd, Target, CFP, C_ARG_OPNDS, C_RET_OPND, EC, NATIVE_STACK_PTR, NATIVE_BASE_PTR, SCRATCH_OPND, SP};
-use crate::hir::{iseq_to_hir, BlockId, BranchEdge, Invariant, MethodType, RangeType, SideExitReason::{self, *}, SpecialBackrefSymbol, SpecialObjectType};
-use crate::hir::{Const, FrameState, Function, Insn, InsnId};
+use crate::hir::{iseq_to_hir, BlockId, BranchEdge, Invariant, RangeType, SideExitReason::{self, *}, SpecialBackrefSymbol, SpecialObjectType};
+use crate::hir::{Const, FrameState, Function, Insn, InsnId, SendFallbackReason};
use crate::hir_type::{types, Type};
use crate::options::get_option;
use crate::cast::IntoUsize;
@@ -366,15 +366,15 @@ fn gen_insn(cb: &mut CodeBlock, jit: &mut JITState, asm: &mut Assembler, functio
Insn::Jump(branch) => no_output!(gen_jump(jit, asm, branch)),
Insn::IfTrue { val, target } => no_output!(gen_if_true(jit, asm, opnd!(val), target)),
Insn::IfFalse { val, target } => no_output!(gen_if_false(jit, asm, opnd!(val), target)),
- &Insn::Send { cd, blockiseq, state, .. } => gen_send(jit, asm, cd, blockiseq, &function.frame_state(state)),
- &Insn::SendForward { cd, blockiseq, state, .. } => gen_send_forward(jit, asm, cd, blockiseq, &function.frame_state(state)),
- Insn::SendWithoutBlock { cd, state, def_type, .. } => gen_send_without_block(jit, asm, *cd, *def_type, &function.frame_state(*state)),
+ &Insn::Send { cd, blockiseq, state, reason, .. } => gen_send(jit, asm, cd, blockiseq, &function.frame_state(state), reason),
+ &Insn::SendForward { cd, blockiseq, state, reason, .. } => gen_send_forward(jit, asm, cd, blockiseq, &function.frame_state(state), reason),
+ &Insn::SendWithoutBlock { cd, state, reason, .. } => gen_send_without_block(jit, asm, cd, &function.frame_state(state), reason),
// Give up SendWithoutBlockDirect for 6+ args since asm.ccall() doesn't support it.
Insn::SendWithoutBlockDirect { cd, state, args, .. } if args.len() + 1 > C_ARG_OPNDS.len() => // +1 for self
- gen_send_without_block(jit, asm, *cd, None, &function.frame_state(*state)),
+ gen_send_without_block(jit, asm, *cd, &function.frame_state(*state), SendFallbackReason::SendWithoutBlockDirectTooManyArgs),
Insn::SendWithoutBlockDirect { cme, iseq, recv, args, state, .. } => gen_send_without_block_direct(cb, jit, asm, *cme, *iseq, opnd!(recv), opnds!(args), &function.frame_state(*state)),
- &Insn::InvokeSuper { cd, blockiseq, state, .. } => gen_invokesuper(jit, asm, cd, blockiseq, &function.frame_state(state)),
- Insn::InvokeBlock { cd, state, .. } => gen_invokeblock(jit, asm, *cd, &function.frame_state(*state)),
+ &Insn::InvokeSuper { cd, blockiseq, state, reason, .. } => gen_invokesuper(jit, asm, cd, blockiseq, &function.frame_state(state), reason),
+ &Insn::InvokeBlock { cd, state, reason, .. } => gen_invokeblock(jit, asm, cd, &function.frame_state(state), reason),
// Ensure we have enough room fit ec, self, and arguments
// TODO remove this check when we have stack args (we can use Time.new to test it)
Insn::InvokeBuiltin { bf, state, .. } if bf.argc + 2 > (C_ARG_OPNDS.len() as i32) => return Err(*state),
@@ -981,9 +981,9 @@ fn gen_send(
cd: *const rb_call_data,
blockiseq: IseqPtr,
state: &FrameState,
+ reason: SendFallbackReason,
) -> lir::Opnd {
- gen_incr_counter(asm, Counter::dynamic_send_count);
- gen_incr_counter(asm, Counter::dynamic_send_type_send);
+ gen_incr_send_fallback_counter(asm, reason);
gen_prepare_non_leaf_call(jit, asm, state);
asm_comment!(asm, "call #{} with dynamic dispatch", ruby_call_method_name(cd));
@@ -1003,9 +1003,9 @@ fn gen_send_forward(
cd: *const rb_call_data,
blockiseq: IseqPtr,
state: &FrameState,
+ reason: SendFallbackReason,
) -> lir::Opnd {
- gen_incr_counter(asm, Counter::dynamic_send_count);
- gen_incr_counter(asm, Counter::dynamic_send_type_send_forward);
+ gen_incr_send_fallback_counter(asm, reason);
gen_prepare_non_leaf_call(jit, asm, state);
@@ -1024,15 +1024,10 @@ fn gen_send_without_block(
jit: &mut JITState,
asm: &mut Assembler,
cd: *const rb_call_data,
- def_type: Option<MethodType>,
state: &FrameState,
+ reason: SendFallbackReason,
) -> lir::Opnd {
- gen_incr_counter(asm, Counter::dynamic_send_count);
- gen_incr_counter(asm, Counter::dynamic_send_type_send_without_block);
-
- if let Some(def_type) = def_type {
- gen_incr_counter(asm, send_fallback_counter(def_type));
- }
+ gen_incr_send_fallback_counter(asm, reason);
gen_prepare_non_leaf_call(jit, asm, state);
asm_comment!(asm, "call #{} with dynamic dispatch", ruby_call_method_name(cd));
@@ -1118,9 +1113,9 @@ fn gen_invokeblock(
asm: &mut Assembler,
cd: *const rb_call_data,
state: &FrameState,
+ reason: SendFallbackReason,
) -> lir::Opnd {
- gen_incr_counter(asm, Counter::dynamic_send_count);
- gen_incr_counter(asm, Counter::dynamic_send_type_invokeblock);
+ gen_incr_send_fallback_counter(asm, reason);
gen_prepare_non_leaf_call(jit, asm, state);
@@ -1141,9 +1136,9 @@ fn gen_invokesuper(
cd: *const rb_call_data,
blockiseq: IseqPtr,
state: &FrameState,
+ reason: SendFallbackReason,
) -> lir::Opnd {
- gen_incr_counter(asm, Counter::dynamic_send_count);
- gen_incr_counter(asm, Counter::dynamic_send_type_invokesuper);
+ gen_incr_send_fallback_counter(asm, reason);
gen_prepare_non_leaf_call(jit, asm, state);
asm_comment!(asm, "call super with dynamic dispatch");
@@ -1548,6 +1543,23 @@ fn gen_incr_counter(asm: &mut Assembler, counter: Counter) {
}
}
+/// Increment a counter for each DynamicSendReason. If the variant has
+/// a counter prefix to break down the details, increment that as well.
+fn gen_incr_send_fallback_counter(asm: &mut Assembler, reason: SendFallbackReason) {
+ gen_incr_counter(asm, send_fallback_counter(reason));
+
+ use SendFallbackReason::*;
+ match reason {
+ NotOptimizedInstruction(opcode) => {
+ gen_incr_counter_ptr(asm, send_fallback_counter_ptr_for_opcode(opcode));
+ }
+ SendWithoutBlockNotOptimizedMethodType(method_type) => {
+ gen_incr_counter(asm, send_fallback_counter_for_method_type(method_type));
+ }
+ _ => {}
+ }
+}
+
/// Save the current PC on the CFP as a preparation for calling a C function
/// that may allocate objects and trigger GC. Use gen_prepare_non_leaf_call()
/// if it may raise exceptions or call arbitrary methods.
diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs
index d81231e282..8f6e92d653 100644
--- a/zjit/src/hir.rs
+++ b/zjit/src/hir.rs
@@ -15,6 +15,7 @@ use crate::hir_type::{Type, types};
use crate::bitset::BitSet;
use crate::profile::{TypeDistributionSummary, ProfiledType};
use crate::stats::Counter;
+use SendFallbackReason::*;
/// An index of an [`Insn`] in a [`Function`]. This is a popular
/// type since this effectively acts as a pointer to an [`Insn`].
@@ -514,6 +515,21 @@ impl std::fmt::Display for SideExitReason {
}
}
+/// Reason why a send-ish instruction cannot be optimized from a fallback instruction
+#[derive(Debug, Clone, Copy)]
+pub enum SendFallbackReason {
+ SendWithoutBlockPolymorphic,
+ SendWithoutBlockNoProfiles,
+ SendWithoutBlockCfuncNotVariadic,
+ SendWithoutBlockCfuncArrayVariadic,
+ SendWithoutBlockNotOptimizedMethodType(MethodType),
+ SendWithoutBlockDirectTooManyArgs,
+ ObjToStringNotString,
+ /// Initial fallback reason for every instruction, which should be mutated to
+ /// a more actionable reason when an attempt to specialize the instruction fails.
+ NotOptimizedInstruction(ruby_vminsn_type),
+}
+
/// An instruction in the SSA IR. The output of an instruction is referred to by the index of
/// the instruction ([`InsnId`]). SSA form enables this, and [`UnionFind`] ([`Function::find`])
/// helps with editing.
@@ -638,13 +654,39 @@ pub enum Insn {
recv: InsnId,
cd: *const rb_call_data,
args: Vec<InsnId>,
- def_type: Option<MethodType>, // Assigned in `optimize_direct_sends` if it's not optimized
state: InsnId,
+ reason: SendFallbackReason,
+ },
+ Send {
+ recv: InsnId,
+ cd: *const rb_call_data,
+ blockiseq: IseqPtr,
+ args: Vec<InsnId>,
+ state: InsnId,
+ reason: SendFallbackReason,
+ },
+ SendForward {
+ recv: InsnId,
+ cd: *const rb_call_data,
+ blockiseq: IseqPtr,
+ args: Vec<InsnId>,
+ state: InsnId,
+ reason: SendFallbackReason,
+ },
+ InvokeSuper {
+ recv: InsnId,
+ cd: *const rb_call_data,
+ blockiseq: IseqPtr,
+ args: Vec<InsnId>,
+ state: InsnId,
+ reason: SendFallbackReason,
+ },
+ InvokeBlock {
+ cd: *const rb_call_data,
+ args: Vec<InsnId>,
+ state: InsnId,
+ reason: SendFallbackReason,
},
- Send { recv: InsnId, cd: *const rb_call_data, blockiseq: IseqPtr, args: Vec<InsnId>, state: InsnId },
- SendForward { recv: InsnId, cd: *const rb_call_data, blockiseq: IseqPtr, args: Vec<InsnId>, state: InsnId },
- InvokeSuper { recv: InsnId, cd: *const rb_call_data, blockiseq: IseqPtr, args: Vec<InsnId>, state: InsnId },
- InvokeBlock { cd: *const rb_call_data, args: Vec<InsnId>, state: InsnId },
/// Optimized ISEQ call
SendWithoutBlockDirect {
@@ -1442,12 +1484,12 @@ impl Function {
str: find!(str),
state,
},
- &SendWithoutBlock { recv, cd, ref args, def_type, state } => SendWithoutBlock {
+ &SendWithoutBlock { recv, cd, ref args, state, reason } => SendWithoutBlock {
recv: find!(recv),
cd,
args: find_vec!(args),
- def_type,
state,
+ reason,
},
&SendWithoutBlockDirect { recv, cd, cme, iseq, ref args, state } => SendWithoutBlockDirect {
recv: find!(recv),
@@ -1457,31 +1499,35 @@ impl Function {
args: find_vec!(args),
state,
},
- &Send { recv, cd, blockiseq, ref args, state } => Send {
+ &Send { recv, cd, blockiseq, ref args, state, reason } => Send {
recv: find!(recv),
cd,
blockiseq,
args: find_vec!(args),
state,
+ reason,
},
- &SendForward { recv, cd, blockiseq, ref args, state } => SendForward {
+ &SendForward { recv, cd, blockiseq, ref args, state, reason } => SendForward {
recv: find!(recv),
cd,
blockiseq,
args: find_vec!(args),
state,
+ reason,
},
- &InvokeSuper { recv, cd, blockiseq, ref args, state } => InvokeSuper {
+ &InvokeSuper { recv, cd, blockiseq, ref args, state, reason } => InvokeSuper {
recv: find!(recv),
cd,
blockiseq,
args: find_vec!(args),
state,
+ reason,
},
- &InvokeBlock { cd, ref args, state } => InvokeBlock {
+ &InvokeBlock { cd, ref args, state, reason } => InvokeBlock {
cd,
args: find_vec!(args),
state,
+ reason,
},
&InvokeBuiltin { bf, ref args, state, return_type } => InvokeBuiltin { bf, args: find_vec!(args), state, return_type },
&ArrayDup { val, state } => ArrayDup { val: find!(val), state },
@@ -1515,6 +1561,22 @@ impl Function {
}
}
+ /// Update DynamicSendReason for the instruction at insn_id
+ fn set_dynamic_send_reason(&mut self, insn_id: InsnId, dynamic_send_reason: SendFallbackReason) {
+ use Insn::*;
+ if get_option!(stats) {
+ match self.insns.get_mut(insn_id.0).unwrap() {
+ Send { reason, .. }
+ | SendForward { reason, .. }
+ | SendWithoutBlock { reason, .. }
+ | InvokeSuper { reason, .. }
+ | InvokeBlock { reason, .. }
+ => *reason = dynamic_send_reason,
+ _ => unreachable!("unexpected instruction {} at {insn_id}", self.find(insn_id))
+ }
+ }
+ }
+
/// Replace `insn` with the new instruction `replacement`, which will get appended to `insns`.
fn make_equal_to(&mut self, insn: InsnId, replacement: InsnId) {
// Don't push it to the block
@@ -1927,12 +1989,11 @@ impl Function {
let Some(recv_type) = self.profiled_type_of_at(recv, frame_state.insn_idx) else {
if get_option!(stats) {
match self.is_polymorphic_at(recv, frame_state.insn_idx) {
- Some(true) => self.push_insn(block, Insn::IncrCounter(Counter::send_fallback_polymorphic)),
+ Some(true) => self.set_dynamic_send_reason(insn_id, SendWithoutBlockPolymorphic),
// If the class isn't known statically, then it should not also be monomorphic
Some(false) => panic!("Should not have monomorphic profile at this point in this branch"),
- None => self.push_insn(block, Insn::IncrCounter(Counter::send_fallback_no_profiles)),
-
- };
+ None => self.set_dynamic_send_reason(insn_id, SendWithoutBlockNoProfiles),
+ }
}
self.push_insn_id(block, insn_id); continue;
};
@@ -1943,9 +2004,7 @@ impl Function {
// Do method lookup
let mut cme = unsafe { rb_callable_method_entry(klass, mid) };
if cme.is_null() {
- if let Insn::SendWithoutBlock { def_type: insn_def_type, .. } = &mut self.insns[insn_id.0] {
- *insn_def_type = Some(MethodType::Null);
- }
+ self.set_dynamic_send_reason(insn_id, SendWithoutBlockNotOptimizedMethodType(MethodType::Null));
self.push_insn_id(block, insn_id); continue;
}
// Load an overloaded cme if applicable. See vm_search_cc().
@@ -1958,9 +2017,7 @@ impl Function {
// TODO(max): Handle other kinds of parameter passing
let iseq = unsafe { get_def_iseq_ptr((*cme).def) };
if !can_direct_send(iseq) {
- if let Insn::SendWithoutBlock { def_type: insn_def_type, .. } = &mut self.insns[insn_id.0] {
- *insn_def_type = Some(MethodType::from(def_type));
- }
+ self.set_dynamic_send_reason(insn_id, SendWithoutBlockNotOptimizedMethodType(MethodType::Iseq));
self.push_insn_id(block, insn_id); continue;
}
self.push_insn(block, Insn::PatchPoint { invariant: Invariant::MethodRedefined { klass, method: mid, cme }, state });
@@ -1987,9 +2044,7 @@ impl Function {
let getivar = self.push_insn(block, Insn::GetIvar { self_val: recv, id, state });
self.make_equal_to(insn_id, getivar);
} else {
- if let Insn::SendWithoutBlock { def_type: insn_def_type, .. } = &mut self.insns[insn_id.0] {
- *insn_def_type = Some(MethodType::from(def_type));
- }
+ self.set_dynamic_send_reason(insn_id, SendWithoutBlockNotOptimizedMethodType(MethodType::from(def_type)));
self.push_insn_id(block, insn_id); continue;
}
}
@@ -2031,7 +2086,7 @@ impl Function {
self.make_equal_to(insn_id, guard);
} else {
self.push_insn(block, Insn::GuardTypeNot { val, guard_type: types::String, state});
- let send_to_s = self.push_insn(block, Insn::SendWithoutBlock { recv: val, cd, args: vec![], def_type: None, state});
+ let send_to_s = self.push_insn(block, Insn::SendWithoutBlock { recv: val, cd, args: vec![], state, reason: ObjToStringNotString });
self.make_equal_to(insn_id, send_to_s);
}
}
@@ -2206,6 +2261,7 @@ impl Function {
let Some(FnProperties { leaf: true, no_gc: true, return_type, elidable }) =
ZJITState::get_method_annotations().get_cfunc_properties(method)
else {
+ fun.set_dynamic_send_reason(send_insn_id, SendWithoutBlockCfuncNotVariadic);
return Err(Some(method));
};
@@ -2269,6 +2325,7 @@ impl Function {
-2 => {
// (self, args_ruby_array) parameter form
// Falling through for now
+ fun.set_dynamic_send_reason(send_insn_id, SendWithoutBlockCfuncArrayVariadic);
}
_ => unreachable!("unknown cfunc kind: argc={argc}")
}
@@ -3787,7 +3844,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
let args = state.stack_pop_n(argc as usize)?;
let recv = state.stack_pop()?;
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state });
- let send = fun.push_insn(block, Insn::SendWithoutBlock { recv, cd, args, def_type: None, state: exit_id });
+ let send = fun.push_insn(block, Insn::SendWithoutBlock { recv, cd, args, state: exit_id, reason: NotOptimizedInstruction(opcode) });
state.stack_push(send);
}
YARVINSN_opt_hash_freeze => {
@@ -3895,7 +3952,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
let args = state.stack_pop_n(argc as usize)?;
let recv = state.stack_pop()?;
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state });
- let send = fun.push_insn(block, Insn::SendWithoutBlock { recv, cd, args, def_type: None, state: exit_id });
+ let send = fun.push_insn(block, Insn::SendWithoutBlock { recv, cd, args, state: exit_id, reason: NotOptimizedInstruction(opcode) });
state.stack_push(send);
}
YARVINSN_send => {
@@ -3915,7 +3972,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
let args = state.stack_pop_n(argc as usize + usize::from(block_arg))?;
let recv = state.stack_pop()?;
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state });
- let send = fun.push_insn(block, Insn::Send { recv, cd, blockiseq, args, state: exit_id });
+ let send = fun.push_insn(block, Insn::Send { recv, cd, blockiseq, args, state: exit_id, reason: NotOptimizedInstruction(opcode) });
state.stack_push(send);
if !blockiseq.is_null() {
@@ -3947,7 +4004,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
let args = state.stack_pop_n(argc as usize + usize::from(forwarding))?;
let recv = state.stack_pop()?;
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state });
- let send_forward = fun.push_insn(block, Insn::SendForward { recv, cd, blockiseq, args, state: exit_id });
+ let send_forward = fun.push_insn(block, Insn::SendForward { recv, cd, blockiseq, args, state: exit_id, reason: NotOptimizedInstruction(opcode) });
state.stack_push(send_forward);
if !blockiseq.is_null() {
@@ -3976,7 +4033,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
let recv = state.stack_pop()?;
let blockiseq: IseqPtr = get_arg(pc, 1).as_ptr();
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state });
- let result = fun.push_insn(block, Insn::InvokeSuper { recv, cd, blockiseq, args, state: exit_id });
+ let result = fun.push_insn(block, Insn::InvokeSuper { recv, cd, blockiseq, args, state: exit_id, reason: NotOptimizedInstruction(opcode) });
state.stack_push(result);
if !blockiseq.is_null() {
@@ -4005,7 +4062,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result<Function, ParseError> {
let block_arg = (flags & VM_CALL_ARGS_BLOCKARG) != 0;
let args = state.stack_pop_n(argc as usize + usize::from(block_arg))?;
let exit_id = fun.push_insn(block, Insn::Snapshot { state: exit_state });
- let result = fun.push_insn(block, Insn::InvokeBlock { cd, args, state: exit_id });
+ let result = fun.push_insn(block, Insn::InvokeBlock { cd, args, state: exit_id, reason: NotOptimizedInstruction(opcode) });
state.stack_push(result);
}
YARVINSN_getglobal => {
diff --git a/zjit/src/state.rs b/zjit/src/state.rs
index fa5d3bc83f..81c05f4986 100644
--- a/zjit/src/state.rs
+++ b/zjit/src/state.rs
@@ -6,7 +6,7 @@ use crate::cruby_methods;
use crate::invariants::Invariants;
use crate::asm::CodeBlock;
use crate::options::get_option;
-use crate::stats::{Counters, ExitCounters};
+use crate::stats::{Counters, InsnCounters};
use crate::virtualmem::CodePtr;
use std::collections::HashMap;
@@ -28,7 +28,10 @@ pub struct ZJITState {
counters: Counters,
/// Side-exit counters
- exit_counters: ExitCounters,
+ exit_counters: InsnCounters,
+
+ /// Send fallback counters
+ send_fallback_counters: InsnCounters,
/// Assumptions that require invalidation
invariants: Invariants,
@@ -78,6 +81,7 @@ impl ZJITState {
code_block: cb,
counters: Counters::default(),
exit_counters: [0; VM_INSTRUCTION_SIZE as usize],
+ send_fallback_counters: [0; VM_INSTRUCTION_SIZE as usize],
invariants: Invariants::default(),
assert_compiles: false,
method_annotations: cruby_methods::init(),
@@ -139,10 +143,15 @@ impl ZJITState {
}
/// Get a mutable reference to side-exit counters
- pub fn get_exit_counters() -> &'static mut ExitCounters {
+ pub fn get_exit_counters() -> &'static mut InsnCounters {
&mut ZJITState::get_instance().exit_counters
}
+ /// Get a mutable reference to fallback counters
+ pub fn get_send_fallback_counters() -> &'static mut InsnCounters {
+ &mut ZJITState::get_instance().send_fallback_counters
+ }
+
/// Get a mutable reference to unoptimized cfunc counter pointers
pub fn get_unoptimized_cfunc_counter_pointers() -> &'static mut HashMap<String, Box<u64>> {
&mut ZJITState::get_instance().unoptimized_cfunc_counter_pointers
diff --git a/zjit/src/stats.rs b/zjit/src/stats.rs
index 7329b3442a..f9f9fb9e37 100644
--- a/zjit/src/stats.rs
+++ b/zjit/src/stats.rs
@@ -17,6 +17,9 @@ macro_rules! make_counters {
exit {
$($exit_counter_name:ident,)+
}
+ dynamic_send {
+ $($dynamic_send_counter_name:ident,)+
+ }
$($counter_name:ident,)+
) => {
/// Struct containing the counter values
@@ -24,6 +27,7 @@ macro_rules! make_counters {
pub struct Counters {
$(pub $default_counter_name: u64,)+
$(pub $exit_counter_name: u64,)+
+ $(pub $dynamic_send_counter_name: u64,)+
$(pub $counter_name: u64,)+
}
@@ -33,6 +37,7 @@ macro_rules! make_counters {
pub enum Counter {
$($default_counter_name,)+
$($exit_counter_name,)+
+ $($dynamic_send_counter_name,)+
$($counter_name,)+
}
@@ -41,6 +46,7 @@ macro_rules! make_counters {
match self {
$( Counter::$default_counter_name => stringify!($default_counter_name).to_string(), )+
$( Counter::$exit_counter_name => stringify!($exit_counter_name).to_string(), )+
+ $( Counter::$dynamic_send_counter_name => stringify!($dynamic_send_counter_name).to_string(), )+
$( Counter::$counter_name => stringify!($counter_name).to_string(), )+
}
}
@@ -52,6 +58,7 @@ macro_rules! make_counters {
match counter {
$( Counter::$default_counter_name => std::ptr::addr_of_mut!(counters.$default_counter_name), )+
$( Counter::$exit_counter_name => std::ptr::addr_of_mut!(counters.$exit_counter_name), )+
+ $( Counter::$dynamic_send_counter_name => std::ptr::addr_of_mut!(counters.$dynamic_send_counter_name), )+
$( Counter::$counter_name => std::ptr::addr_of_mut!(counters.$counter_name), )+
}
}
@@ -67,6 +74,11 @@ macro_rules! make_counters {
$( Counter::$exit_counter_name, )+
];
+ /// List of other counters that are summed as dynamic_send_count.
+ pub const DYNAMIC_SEND_COUNTERS: &'static [Counter] = &[
+ $( Counter::$dynamic_send_counter_name, )+
+ ];
+
/// List of other counters that are available only for --zjit-stats.
pub const OTHER_COUNTERS: &'static [Counter] = &[
$( Counter::$counter_name, )+
@@ -114,6 +126,19 @@ make_counters! {
exit_block_param_proxy_not_iseq_or_ifunc,
}
+ // Send fallback counters that are summed as dynamic_send_count
+ dynamic_send {
+ // send_fallback_: Fallback reasons for send-ish instructions
+ send_fallback_send_without_block_polymorphic,
+ send_fallback_send_without_block_no_profiles,
+ send_fallback_send_without_block_cfunc_not_variadic,
+ send_fallback_send_without_block_cfunc_array_variadic,
+ send_fallback_send_without_block_not_optimized_method_type,
+ send_fallback_send_without_block_direct_too_many_args,
+ send_fallback_obj_to_string_not_string,
+ send_fallback_not_optimized_instruction,
+ }
+
// compile_error_: Compile error reasons
compile_error_iseq_stack_too_large,
compile_error_exception_handler,
@@ -134,14 +159,6 @@ make_counters! {
// The number of times YARV instructions are executed on JIT code
zjit_insn_count,
- // The number of times we do a dynamic dispatch from JIT code
- dynamic_send_count,
- dynamic_send_type_send_without_block,
- dynamic_send_type_send,
- dynamic_send_type_send_forward,
- dynamic_send_type_invokeblock,
- dynamic_send_type_invokesuper,
-
// The number of times we do a dynamic ivar lookup from JIT code
dynamic_getivar_count,
dynamic_setivar_count,
@@ -161,9 +178,6 @@ make_counters! {
unspecialized_def_type_refined,
unspecialized_def_type_null,
- send_fallback_polymorphic,
- send_fallback_no_profiles,
-
// Writes to the VM frame
vm_write_pc_count,
vm_write_sp_count,
@@ -190,7 +204,7 @@ macro_rules! incr_counter {
pub(crate) use incr_counter;
/// The number of side exits from each YARV instruction
-pub type ExitCounters = [u64; VM_INSTRUCTION_SIZE as usize];
+pub type InsnCounters = [u64; VM_INSTRUCTION_SIZE as usize];
/// Return a raw pointer to the exit counter for a given YARV opcode
pub fn exit_counter_ptr_for_opcode(opcode: u32) -> *mut u64 {
@@ -198,6 +212,12 @@ pub fn exit_counter_ptr_for_opcode(opcode: u32) -> *mut u64 {
unsafe { exit_counters.get_unchecked_mut(opcode as usize) }
}
+/// Return a raw pointer to the fallback counter for a given YARV opcode
+pub fn send_fallback_counter_ptr_for_opcode(opcode: u32) -> *mut u64 {
+ let fallback_counters = ZJITState::get_send_fallback_counters();
+ unsafe { fallback_counters.get_unchecked_mut(opcode as usize) }
+}
+
/// Reason why ZJIT failed to produce any JIT code
#[derive(Clone, Debug, PartialEq)]
pub enum CompileError {
@@ -268,11 +288,26 @@ pub fn exit_counter_ptr(reason: crate::hir::SideExitReason) -> *mut u64 {
counter_ptr(counter)
}
-pub fn send_fallback_counter(def_type: crate::hir::MethodType) -> Counter {
+pub fn send_fallback_counter(reason: crate::hir::SendFallbackReason) -> Counter {
+ use crate::hir::SendFallbackReason::*;
+ use crate::stats::Counter::*;
+ match reason {
+ SendWithoutBlockPolymorphic => send_fallback_send_without_block_polymorphic,
+ SendWithoutBlockNoProfiles => send_fallback_send_without_block_no_profiles,
+ SendWithoutBlockCfuncNotVariadic => send_fallback_send_without_block_cfunc_not_variadic,
+ SendWithoutBlockCfuncArrayVariadic => send_fallback_send_without_block_cfunc_array_variadic,
+ SendWithoutBlockNotOptimizedMethodType(_) => send_fallback_send_without_block_not_optimized_method_type,
+ SendWithoutBlockDirectTooManyArgs => send_fallback_send_without_block_direct_too_many_args,
+ ObjToStringNotString => send_fallback_obj_to_string_not_string,
+ NotOptimizedInstruction(_) => send_fallback_not_optimized_instruction,
+ }
+}
+
+pub fn send_fallback_counter_for_method_type(method_type: crate::hir::MethodType) -> Counter {
use crate::hir::MethodType::*;
use crate::stats::Counter::*;
- match def_type {
+ match method_type {
Iseq => unspecialized_def_type_iseq,
Cfunc => unspecialized_def_type_cfunc,
Attrset => unspecialized_def_type_attrset,
@@ -376,6 +411,23 @@ pub extern "C" fn rb_zjit_stats(_ec: EcPtr, _self: VALUE, target_key: VALUE) ->
set_stat_usize!(hash, &key_string, *count);
}
+ // Set send fallback counters for each DynamicSendReason
+ let mut dynamic_send_count = 0;
+ for &counter in DYNAMIC_SEND_COUNTERS {
+ let count = unsafe { *counter_ptr(counter) };
+ dynamic_send_count += count;
+ set_stat_usize!(hash, &counter.name(), count);
+ }
+ set_stat_usize!(hash, "dynamic_send_count", dynamic_send_count);
+
+ // Set send fallback counters for NotOptimizedInstruction
+ let send_fallback_counters = ZJITState::get_send_fallback_counters();
+ for (op_idx, count) in send_fallback_counters.iter().enumerate().take(VM_INSTRUCTION_SIZE as usize) {
+ let op_name = insn_name(op_idx);
+ let key_string = "not_optimized_yarv_insn_".to_owned() + &op_name;
+ set_stat_usize!(hash, &key_string, *count);
+ }
+
// Only ZJIT_STATS builds support rb_vm_insn_count
if unsafe { rb_vm_insn_count } > 0 {
let vm_insn_count = unsafe { rb_vm_insn_count };