diff options
| author | Max Bernstein <rubybugs@bernsteinbear.com> | 2025-10-31 15:48:59 -0400 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2025-10-31 15:48:59 -0400 |
| commit | 7688b05098af501642b1930ac1091dbb6241285e (patch) | |
| tree | ce59f9cc85c3800937aaa98c3472746fa6a0185b | |
| parent | ab01fcc5123205cdff6e566c2b686e7ab3ed383f (diff) | |
ZJIT: Optimize VM_METHOD_TYPE_ALIAS (#15018)
Just loop until you find a non-alias.
| -rw-r--r-- | zjit/src/hir.rs | 20 | ||||
| -rw-r--r-- | zjit/src/hir/opt_tests.rs | 87 |
2 files changed, 103 insertions, 4 deletions
diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs index 5a609670f4..955d33a906 100644 --- a/zjit/src/hir.rs +++ b/zjit/src/hir.rs @@ -2276,7 +2276,11 @@ impl Function { // Load an overloaded cme if applicable. See vm_search_cc(). // It allows you to use a faster ISEQ if possible. cme = unsafe { rb_check_overloaded_cme(cme, ci) }; - let def_type = unsafe { get_cme_def_type(cme) }; + let mut def_type = unsafe { get_cme_def_type(cme) }; + while def_type == VM_METHOD_TYPE_ALIAS { + cme = unsafe { rb_aliased_callable_method_entry(cme) }; + def_type = unsafe { get_cme_def_type(cme) }; + } if def_type == VM_METHOD_TYPE_ISEQ { // TODO(max): Allow non-iseq; cache cme // Only specialize positional-positional calls @@ -2453,7 +2457,11 @@ impl Function { // Load an overloaded cme if applicable. See vm_search_cc(). // It allows you to use a faster ISEQ if possible. cme = unsafe { rb_check_overloaded_cme(cme, ci) }; - let def_type = unsafe { get_cme_def_type(cme) }; + let mut def_type = unsafe { get_cme_def_type(cme) }; + while def_type == VM_METHOD_TYPE_ALIAS { + cme = unsafe { rb_aliased_callable_method_entry(cme) }; + def_type = unsafe { get_cme_def_type(cme) }; + } self.set_dynamic_send_reason(insn_id, SendNotOptimizedMethodType(MethodType::from(def_type))); self.push_insn_id(block, insn_id); continue; } @@ -2810,13 +2818,17 @@ impl Function { }; // Do method lookup - let method: *const rb_callable_method_entry_struct = unsafe { rb_callable_method_entry(recv_class, method_id) }; + let mut method: *const rb_callable_method_entry_struct = unsafe { rb_callable_method_entry(recv_class, method_id) }; if method.is_null() { return Err(()); } // Filter for C methods - let def_type = unsafe { get_cme_def_type(method) }; + let mut def_type = unsafe { get_cme_def_type(method) }; + while def_type == VM_METHOD_TYPE_ALIAS { + method = unsafe { rb_aliased_callable_method_entry(method) }; + def_type = unsafe { get_cme_def_type(method) }; + } if def_type != VM_METHOD_TYPE_CFUNC { return Err(()); } diff --git a/zjit/src/hir/opt_tests.rs b/zjit/src/hir/opt_tests.rs index 5247122023..8a9acb59bb 100644 --- a/zjit/src/hir/opt_tests.rs +++ b/zjit/src/hir/opt_tests.rs @@ -639,6 +639,93 @@ mod hir_opt_tests { } #[test] + fn test_optimize_send_without_block_to_aliased_iseq() { + eval(" + def foo = 1 + alias bar foo + alias baz bar + def test = baz + test; test + "); + assert_snapshot!(hir_string("test"), @r" + fn test@<compiled>:5: + bb0(): + EntryPoint interpreter + v1:BasicObject = LoadSelf + Jump bb2(v1) + bb1(v4:BasicObject): + EntryPoint JIT(0) + Jump bb2(v4) + bb2(v6:BasicObject): + PatchPoint MethodRedefined(Object@0x1000, baz@0x1008, cme:0x1010) + PatchPoint NoSingletonClass(Object@0x1000) + v19:HeapObject[class_exact*:Object@VALUE(0x1000)] = GuardType v6, HeapObject[class_exact*:Object@VALUE(0x1000)] + IncrCounter inline_iseq_optimized_send_count + v22:Fixnum[1] = Const Value(1) + CheckInterrupts + Return v22 + "); + } + + #[test] + fn test_optimize_send_without_block_to_aliased_cfunc() { + eval(" + alias bar itself + alias baz bar + def test = baz + test; test + "); + assert_snapshot!(hir_string("test"), @r" + fn test@<compiled>:4: + bb0(): + EntryPoint interpreter + v1:BasicObject = LoadSelf + Jump bb2(v1) + bb1(v4:BasicObject): + EntryPoint JIT(0) + Jump bb2(v4) + bb2(v6:BasicObject): + PatchPoint MethodRedefined(Object@0x1000, baz@0x1008, cme:0x1010) + PatchPoint NoSingletonClass(Object@0x1000) + v20:HeapObject[class_exact*:Object@VALUE(0x1000)] = GuardType v6, HeapObject[class_exact*:Object@VALUE(0x1000)] + IncrCounter inline_cfunc_optimized_send_count + CheckInterrupts + Return v20 + "); + } + + #[test] + fn test_optimize_send_to_aliased_cfunc() { + eval(" + class C < Array + alias fun_new_map map + end + def test(o) = o.fun_new_map {|e| e } + test C.new; test C.new + "); + assert_snapshot!(hir_string("test"), @r" + fn test@<compiled>:5: + bb0(): + EntryPoint interpreter + v1:BasicObject = LoadSelf + v2:BasicObject = GetLocal l0, SP@4 + Jump bb2(v1, v2) + bb1(v5:BasicObject, v6:BasicObject): + EntryPoint JIT(0) + Jump bb2(v5, v6) + bb2(v8:BasicObject, v9:BasicObject): + v13:BasicObject = GetLocal l0, EP@3 + PatchPoint MethodRedefined(C@0x1000, fun_new_map@0x1008, cme:0x1010) + PatchPoint NoSingletonClass(C@0x1000) + v25:ArraySubclass[class_exact:C] = GuardType v13, ArraySubclass[class_exact:C] + v26:BasicObject = CCallWithFrame fun_new_map@0x1038, v25, block=0x1040 + v16:BasicObject = GetLocal l0, EP@3 + CheckInterrupts + Return v26 + "); + } + + #[test] fn test_optimize_nonexistent_top_level_call() { eval(" def foo |
