summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJemma Issroff <jemmaissroff@gmail.com>2022-09-23 13:54:42 -0400
committerAaron Patterson <aaron.patterson@gmail.com>2022-09-26 09:21:30 -0700
commit9ddfd2ca004d1952be79cf1b84c52c79a55978f4 (patch)
treefe5fa943d9a2dc7438db920a09173ab06f869993
parent2e88bca24ff4cafeb6afe5b062ff7181bc4b3a9b (diff)
This commit implements the Object Shapes technique in CRuby.
Object Shapes is used for accessing instance variables and representing the "frozenness" of objects. Object instances have a "shape" and the shape represents some attributes of the object (currently which instance variables are set and the "frozenness"). Shapes form a tree data structure, and when a new instance variable is set on an object, that object "transitions" to a new shape in the shape tree. Each shape has an ID that is used for caching. The shape structure is independent of class, so objects of different types can have the same shape. For example: ```ruby class Foo def initialize # Starts with shape id 0 @a = 1 # transitions to shape id 1 @b = 1 # transitions to shape id 2 end end class Bar def initialize # Starts with shape id 0 @a = 1 # transitions to shape id 1 @b = 1 # transitions to shape id 2 end end foo = Foo.new # `foo` has shape id 2 bar = Bar.new # `bar` has shape id 2 ``` Both `foo` and `bar` instances have the same shape because they both set instance variables of the same name in the same order. This technique can help to improve inline cache hits as well as generate more efficient machine code in JIT compilers. This commit also adds some methods for debugging shapes on objects. See `RubyVM::Shape` for more details. For more context on Object Shapes, see [Feature: #18776] Co-Authored-By: Aaron Patterson <tenderlove@ruby-lang.org> Co-Authored-By: Eileen M. Uchitelle <eileencodes@gmail.com> Co-Authored-By: John Hawthorn <john@hawthorn.email>
Notes
Notes: Merged: https://github.com/ruby/ruby/pull/6386
-rw-r--r--bootstraptest/test_attr.rb16
-rw-r--r--common.mk322
-rw-r--r--compile.c26
-rw-r--r--debug_counter.h10
-rw-r--r--ext/coverage/depend4
-rw-r--r--ext/objspace/depend5
-rw-r--r--ext/objspace/objspace.c1
-rw-r--r--gc.c219
-rw-r--r--include/ruby/internal/core/robject.h3
-rw-r--r--include/ruby/internal/fl_type.h19
-rw-r--r--inits.c1
-rw-r--r--internal.h3
-rw-r--r--internal/class.h11
-rw-r--r--internal/imemo.h1
-rw-r--r--internal/object.h22
-rw-r--r--internal/variable.h5
-rw-r--r--iseq.c16
-rw-r--r--lib/mjit/compiler.rb117
-rw-r--r--marshal.c10
-rwxr-xr-xmisc/lldb_cruby.py6
-rw-r--r--mjit_c.rb37
-rw-r--r--mjit_compiler.h2
-rw-r--r--object.c46
-rw-r--r--ractor_core.h6
-rw-r--r--shape.c571
-rw-r--r--shape.h153
-rw-r--r--spec/ruby/library/objectspace/reachable_objects_from_spec.rb2
-rw-r--r--spec/ruby/optional/capi/shared/rbasic.rb11
-rw-r--r--test/-ext-/marshal/test_internal_ivar.rb1
-rw-r--r--test/objspace/test_objspace.rb14
-rw-r--r--test/ruby/test_mjit.rb4
-rw-r--r--test/ruby/test_shapes.rb171
-rwxr-xr-xtool/mjit/bindgen.rb10
-rw-r--r--variable.c784
-rw-r--r--variable.h10
-rw-r--r--vm.c45
-rw-r--r--vm_callinfo.h108
-rw-r--r--vm_core.h11
-rw-r--r--vm_eval.c4
-rw-r--r--vm_insnhelper.c490
-rw-r--r--yjit/bindgen/src/main.rs7
-rw-r--r--yjit/src/asm/x86_64/mod.rs2
-rw-r--r--yjit/src/codegen.rs135
-rw-r--r--yjit/src/cruby.rs12
-rw-r--r--yjit/src/cruby_bindings.inc.rs41
45 files changed, 2548 insertions, 946 deletions
diff --git a/bootstraptest/test_attr.rb b/bootstraptest/test_attr.rb
index 721a847145..3cb9d3eb39 100644
--- a/bootstraptest/test_attr.rb
+++ b/bootstraptest/test_attr.rb
@@ -34,3 +34,19 @@ assert_equal %{ok}, %{
print "ok"
end
}, '[ruby-core:15120]'
+
+assert_equal %{ok}, %{
+ class Big
+ attr_reader :foo
+ def initialize
+ @foo = "ok"
+ end
+ end
+
+ obj = Big.new
+ 100.times do |i|
+ obj.instance_variable_set(:"@ivar_\#{i}", i)
+ end
+
+ Big.new.foo
+}
diff --git a/common.mk b/common.mk
index 150add3026..cfcd6bd5a0 100644
--- a/common.mk
+++ b/common.mk
@@ -136,6 +136,7 @@ COMMONOBJS = array.$(OBJEXT) \
regsyntax.$(OBJEXT) \
ruby.$(OBJEXT) \
scheduler.$(OBJEXT) \
+ shape.$(OBJEXT) \
signal.$(OBJEXT) \
sprintf.$(OBJEXT) \
st.$(OBJEXT) \
@@ -1832,6 +1833,7 @@ array.$(OBJEXT): $(top_srcdir)/internal/proc.h
array.$(OBJEXT): $(top_srcdir)/internal/rational.h
array.$(OBJEXT): $(top_srcdir)/internal/serial.h
array.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+array.$(OBJEXT): $(top_srcdir)/internal/variable.h
array.$(OBJEXT): $(top_srcdir)/internal/vm.h
array.$(OBJEXT): $(top_srcdir)/internal/warnings.h
array.$(OBJEXT): {$(VPATH)}array.c
@@ -1848,6 +1850,7 @@ array.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
array.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
array.$(OBJEXT): {$(VPATH)}builtin.h
array.$(OBJEXT): {$(VPATH)}config.h
+array.$(OBJEXT): {$(VPATH)}constant.h
array.$(OBJEXT): {$(VPATH)}debug_counter.h
array.$(OBJEXT): {$(VPATH)}defines.h
array.$(OBJEXT): {$(VPATH)}encoding.h
@@ -2010,6 +2013,7 @@ array.$(OBJEXT): {$(VPATH)}oniguruma.h
array.$(OBJEXT): {$(VPATH)}probes.dmyh
array.$(OBJEXT): {$(VPATH)}probes.h
array.$(OBJEXT): {$(VPATH)}ruby_assert.h
+array.$(OBJEXT): {$(VPATH)}shape.h
array.$(OBJEXT): {$(VPATH)}st.h
array.$(OBJEXT): {$(VPATH)}subst.h
array.$(OBJEXT): {$(VPATH)}transient_heap.h
@@ -2028,6 +2032,7 @@ ast.$(OBJEXT): $(top_srcdir)/internal/parse.h
ast.$(OBJEXT): $(top_srcdir)/internal/serial.h
ast.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
ast.$(OBJEXT): $(top_srcdir)/internal/symbol.h
+ast.$(OBJEXT): $(top_srcdir)/internal/variable.h
ast.$(OBJEXT): $(top_srcdir)/internal/vm.h
ast.$(OBJEXT): $(top_srcdir)/internal/warnings.h
ast.$(OBJEXT): {$(VPATH)}assert.h
@@ -2045,9 +2050,11 @@ ast.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
ast.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
ast.$(OBJEXT): {$(VPATH)}builtin.h
ast.$(OBJEXT): {$(VPATH)}config.h
+ast.$(OBJEXT): {$(VPATH)}constant.h
ast.$(OBJEXT): {$(VPATH)}defines.h
ast.$(OBJEXT): {$(VPATH)}encoding.h
ast.$(OBJEXT): {$(VPATH)}id.h
+ast.$(OBJEXT): {$(VPATH)}id_table.h
ast.$(OBJEXT): {$(VPATH)}intern.h
ast.$(OBJEXT): {$(VPATH)}internal.h
ast.$(OBJEXT): {$(VPATH)}internal/abi.h
@@ -2207,6 +2214,7 @@ ast.$(OBJEXT): {$(VPATH)}onigmo.h
ast.$(OBJEXT): {$(VPATH)}oniguruma.h
ast.$(OBJEXT): {$(VPATH)}ruby_assert.h
ast.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+ast.$(OBJEXT): {$(VPATH)}shape.h
ast.$(OBJEXT): {$(VPATH)}st.h
ast.$(OBJEXT): {$(VPATH)}subst.h
ast.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -2390,6 +2398,7 @@ bignum.$(OBJEXT): {$(VPATH)}internal/warning_push.h
bignum.$(OBJEXT): {$(VPATH)}internal/xmalloc.h
bignum.$(OBJEXT): {$(VPATH)}missing.h
bignum.$(OBJEXT): {$(VPATH)}ruby_assert.h
+bignum.$(OBJEXT): {$(VPATH)}shape.h
bignum.$(OBJEXT): {$(VPATH)}st.h
bignum.$(OBJEXT): {$(VPATH)}subst.h
bignum.$(OBJEXT): {$(VPATH)}thread.h
@@ -2405,6 +2414,7 @@ builtin.$(OBJEXT): $(top_srcdir)/internal/gc.h
builtin.$(OBJEXT): $(top_srcdir)/internal/imemo.h
builtin.$(OBJEXT): $(top_srcdir)/internal/serial.h
builtin.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+builtin.$(OBJEXT): $(top_srcdir)/internal/variable.h
builtin.$(OBJEXT): $(top_srcdir)/internal/vm.h
builtin.$(OBJEXT): $(top_srcdir)/internal/warnings.h
builtin.$(OBJEXT): {$(VPATH)}assert.h
@@ -2422,8 +2432,10 @@ builtin.$(OBJEXT): {$(VPATH)}builtin.c
builtin.$(OBJEXT): {$(VPATH)}builtin.h
builtin.$(OBJEXT): {$(VPATH)}builtin_binary.inc
builtin.$(OBJEXT): {$(VPATH)}config.h
+builtin.$(OBJEXT): {$(VPATH)}constant.h
builtin.$(OBJEXT): {$(VPATH)}defines.h
builtin.$(OBJEXT): {$(VPATH)}id.h
+builtin.$(OBJEXT): {$(VPATH)}id_table.h
builtin.$(OBJEXT): {$(VPATH)}intern.h
builtin.$(OBJEXT): {$(VPATH)}internal.h
builtin.$(OBJEXT): {$(VPATH)}internal/abi.h
@@ -2572,6 +2584,7 @@ builtin.$(OBJEXT): {$(VPATH)}missing.h
builtin.$(OBJEXT): {$(VPATH)}node.h
builtin.$(OBJEXT): {$(VPATH)}ruby_assert.h
builtin.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+builtin.$(OBJEXT): {$(VPATH)}shape.h
builtin.$(OBJEXT): {$(VPATH)}st.h
builtin.$(OBJEXT): {$(VPATH)}subst.h
builtin.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -2774,6 +2787,7 @@ class.$(OBJEXT): {$(VPATH)}onigmo.h
class.$(OBJEXT): {$(VPATH)}oniguruma.h
class.$(OBJEXT): {$(VPATH)}ruby_assert.h
class.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+class.$(OBJEXT): {$(VPATH)}shape.h
class.$(OBJEXT): {$(VPATH)}st.h
class.$(OBJEXT): {$(VPATH)}subst.h
class.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -3177,6 +3191,7 @@ compile.$(OBJEXT): {$(VPATH)}re.h
compile.$(OBJEXT): {$(VPATH)}regex.h
compile.$(OBJEXT): {$(VPATH)}ruby_assert.h
compile.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+compile.$(OBJEXT): {$(VPATH)}shape.h
compile.$(OBJEXT): {$(VPATH)}st.h
compile.$(OBJEXT): {$(VPATH)}subst.h
compile.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -3201,6 +3216,7 @@ complex.$(OBJEXT): $(top_srcdir)/internal/object.h
complex.$(OBJEXT): $(top_srcdir)/internal/rational.h
complex.$(OBJEXT): $(top_srcdir)/internal/serial.h
complex.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+complex.$(OBJEXT): $(top_srcdir)/internal/variable.h
complex.$(OBJEXT): $(top_srcdir)/internal/vm.h
complex.$(OBJEXT): $(top_srcdir)/internal/warnings.h
complex.$(OBJEXT): {$(VPATH)}assert.h
@@ -3215,6 +3231,7 @@ complex.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
complex.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
complex.$(OBJEXT): {$(VPATH)}complex.c
complex.$(OBJEXT): {$(VPATH)}config.h
+complex.$(OBJEXT): {$(VPATH)}constant.h
complex.$(OBJEXT): {$(VPATH)}defines.h
complex.$(OBJEXT): {$(VPATH)}id.h
complex.$(OBJEXT): {$(VPATH)}id_table.h
@@ -3362,6 +3379,7 @@ complex.$(OBJEXT): {$(VPATH)}internal/warning_push.h
complex.$(OBJEXT): {$(VPATH)}internal/xmalloc.h
complex.$(OBJEXT): {$(VPATH)}missing.h
complex.$(OBJEXT): {$(VPATH)}ruby_assert.h
+complex.$(OBJEXT): {$(VPATH)}shape.h
complex.$(OBJEXT): {$(VPATH)}st.h
complex.$(OBJEXT): {$(VPATH)}subst.h
cont.$(OBJEXT): $(CCAN_DIR)/check_type/check_type.h
@@ -3379,6 +3397,7 @@ cont.$(OBJEXT): $(top_srcdir)/internal/proc.h
cont.$(OBJEXT): $(top_srcdir)/internal/sanitizers.h
cont.$(OBJEXT): $(top_srcdir)/internal/serial.h
cont.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+cont.$(OBJEXT): $(top_srcdir)/internal/variable.h
cont.$(OBJEXT): $(top_srcdir)/internal/vm.h
cont.$(OBJEXT): $(top_srcdir)/internal/warnings.h
cont.$(OBJEXT): {$(VPATH)}$(COROUTINE_H)
@@ -3394,6 +3413,7 @@ cont.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
cont.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
cont.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
cont.$(OBJEXT): {$(VPATH)}config.h
+cont.$(OBJEXT): {$(VPATH)}constant.h
cont.$(OBJEXT): {$(VPATH)}cont.c
cont.$(OBJEXT): {$(VPATH)}debug_counter.h
cont.$(OBJEXT): {$(VPATH)}defines.h
@@ -3552,6 +3572,7 @@ cont.$(OBJEXT): {$(VPATH)}ractor.h
cont.$(OBJEXT): {$(VPATH)}ractor_core.h
cont.$(OBJEXT): {$(VPATH)}ruby_assert.h
cont.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+cont.$(OBJEXT): {$(VPATH)}shape.h
cont.$(OBJEXT): {$(VPATH)}st.h
cont.$(OBJEXT): {$(VPATH)}subst.h
cont.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -3572,6 +3593,7 @@ debug.$(OBJEXT): $(top_srcdir)/internal/imemo.h
debug.$(OBJEXT): $(top_srcdir)/internal/serial.h
debug.$(OBJEXT): $(top_srcdir)/internal/signal.h
debug.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+debug.$(OBJEXT): $(top_srcdir)/internal/variable.h
debug.$(OBJEXT): $(top_srcdir)/internal/vm.h
debug.$(OBJEXT): $(top_srcdir)/internal/warnings.h
debug.$(OBJEXT): {$(VPATH)}assert.h
@@ -3586,6 +3608,7 @@ debug.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
debug.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
debug.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
debug.$(OBJEXT): {$(VPATH)}config.h
+debug.$(OBJEXT): {$(VPATH)}constant.h
debug.$(OBJEXT): {$(VPATH)}debug.c
debug.$(OBJEXT): {$(VPATH)}debug_counter.h
debug.$(OBJEXT): {$(VPATH)}defines.h
@@ -3756,6 +3779,7 @@ debug.$(OBJEXT): {$(VPATH)}ractor.h
debug.$(OBJEXT): {$(VPATH)}ractor_core.h
debug.$(OBJEXT): {$(VPATH)}ruby_assert.h
debug.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+debug.$(OBJEXT): {$(VPATH)}shape.h
debug.$(OBJEXT): {$(VPATH)}st.h
debug.$(OBJEXT): {$(VPATH)}subst.h
debug.$(OBJEXT): {$(VPATH)}symbol.h
@@ -3940,6 +3964,7 @@ dir.$(OBJEXT): $(top_srcdir)/internal/object.h
dir.$(OBJEXT): $(top_srcdir)/internal/serial.h
dir.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
dir.$(OBJEXT): $(top_srcdir)/internal/string.h
+dir.$(OBJEXT): $(top_srcdir)/internal/variable.h
dir.$(OBJEXT): $(top_srcdir)/internal/vm.h
dir.$(OBJEXT): $(top_srcdir)/internal/warnings.h
dir.$(OBJEXT): {$(VPATH)}assert.h
@@ -3954,6 +3979,7 @@ dir.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
dir.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
dir.$(OBJEXT): {$(VPATH)}builtin.h
dir.$(OBJEXT): {$(VPATH)}config.h
+dir.$(OBJEXT): {$(VPATH)}constant.h
dir.$(OBJEXT): {$(VPATH)}defines.h
dir.$(OBJEXT): {$(VPATH)}dir.c
dir.$(OBJEXT): {$(VPATH)}dir.rbinc
@@ -4116,6 +4142,7 @@ dir.$(OBJEXT): {$(VPATH)}io.h
dir.$(OBJEXT): {$(VPATH)}missing.h
dir.$(OBJEXT): {$(VPATH)}onigmo.h
dir.$(OBJEXT): {$(VPATH)}oniguruma.h
+dir.$(OBJEXT): {$(VPATH)}shape.h
dir.$(OBJEXT): {$(VPATH)}st.h
dir.$(OBJEXT): {$(VPATH)}subst.h
dir.$(OBJEXT): {$(VPATH)}thread.h
@@ -5441,6 +5468,7 @@ encoding.$(OBJEXT): $(top_srcdir)/internal/object.h
encoding.$(OBJEXT): $(top_srcdir)/internal/serial.h
encoding.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
encoding.$(OBJEXT): $(top_srcdir)/internal/string.h
+encoding.$(OBJEXT): $(top_srcdir)/internal/variable.h
encoding.$(OBJEXT): $(top_srcdir)/internal/vm.h
encoding.$(OBJEXT): $(top_srcdir)/internal/warnings.h
encoding.$(OBJEXT): {$(VPATH)}assert.h
@@ -5454,6 +5482,7 @@ encoding.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
encoding.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
encoding.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
encoding.$(OBJEXT): {$(VPATH)}config.h
+encoding.$(OBJEXT): {$(VPATH)}constant.h
encoding.$(OBJEXT): {$(VPATH)}debug_counter.h
encoding.$(OBJEXT): {$(VPATH)}defines.h
encoding.$(OBJEXT): {$(VPATH)}encindex.h
@@ -5616,6 +5645,7 @@ encoding.$(OBJEXT): {$(VPATH)}onigmo.h
encoding.$(OBJEXT): {$(VPATH)}oniguruma.h
encoding.$(OBJEXT): {$(VPATH)}regenc.h
encoding.$(OBJEXT): {$(VPATH)}ruby_assert.h
+encoding.$(OBJEXT): {$(VPATH)}shape.h
encoding.$(OBJEXT): {$(VPATH)}st.h
encoding.$(OBJEXT): {$(VPATH)}subst.h
encoding.$(OBJEXT): {$(VPATH)}util.h
@@ -5640,6 +5670,7 @@ enum.$(OBJEXT): $(top_srcdir)/internal/rational.h
enum.$(OBJEXT): $(top_srcdir)/internal/re.h
enum.$(OBJEXT): $(top_srcdir)/internal/serial.h
enum.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+enum.$(OBJEXT): $(top_srcdir)/internal/variable.h
enum.$(OBJEXT): $(top_srcdir)/internal/vm.h
enum.$(OBJEXT): $(top_srcdir)/internal/warnings.h
enum.$(OBJEXT): {$(VPATH)}assert.h
@@ -5653,6 +5684,7 @@ enum.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
enum.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
enum.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
enum.$(OBJEXT): {$(VPATH)}config.h
+enum.$(OBJEXT): {$(VPATH)}constant.h
enum.$(OBJEXT): {$(VPATH)}defines.h
enum.$(OBJEXT): {$(VPATH)}encoding.h
enum.$(OBJEXT): {$(VPATH)}enum.c
@@ -5813,6 +5845,7 @@ enum.$(OBJEXT): {$(VPATH)}missing.h
enum.$(OBJEXT): {$(VPATH)}onigmo.h
enum.$(OBJEXT): {$(VPATH)}oniguruma.h
enum.$(OBJEXT): {$(VPATH)}ruby_assert.h
+enum.$(OBJEXT): {$(VPATH)}shape.h
enum.$(OBJEXT): {$(VPATH)}st.h
enum.$(OBJEXT): {$(VPATH)}subst.h
enum.$(OBJEXT): {$(VPATH)}symbol.h
@@ -6212,6 +6245,7 @@ error.$(OBJEXT): {$(VPATH)}onigmo.h
error.$(OBJEXT): {$(VPATH)}oniguruma.h
error.$(OBJEXT): {$(VPATH)}ruby_assert.h
error.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+error.$(OBJEXT): {$(VPATH)}shape.h
error.$(OBJEXT): {$(VPATH)}st.h
error.$(OBJEXT): {$(VPATH)}subst.h
error.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -6433,6 +6467,7 @@ eval.$(OBJEXT): {$(VPATH)}ractor.h
eval.$(OBJEXT): {$(VPATH)}ractor_core.h
eval.$(OBJEXT): {$(VPATH)}ruby_assert.h
eval.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+eval.$(OBJEXT): {$(VPATH)}shape.h
eval.$(OBJEXT): {$(VPATH)}st.h
eval.$(OBJEXT): {$(VPATH)}subst.h
eval.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -6473,6 +6508,7 @@ file.$(OBJEXT): $(top_srcdir)/internal/serial.h
file.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
file.$(OBJEXT): $(top_srcdir)/internal/string.h
file.$(OBJEXT): $(top_srcdir)/internal/thread.h
+file.$(OBJEXT): $(top_srcdir)/internal/variable.h
file.$(OBJEXT): $(top_srcdir)/internal/vm.h
file.$(OBJEXT): $(top_srcdir)/internal/warnings.h
file.$(OBJEXT): {$(VPATH)}assert.h
@@ -6486,6 +6522,7 @@ file.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
file.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
file.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
file.$(OBJEXT): {$(VPATH)}config.h
+file.$(OBJEXT): {$(VPATH)}constant.h
file.$(OBJEXT): {$(VPATH)}defines.h
file.$(OBJEXT): {$(VPATH)}dln.h
file.$(OBJEXT): {$(VPATH)}encindex.h
@@ -6648,6 +6685,7 @@ file.$(OBJEXT): {$(VPATH)}io.h
file.$(OBJEXT): {$(VPATH)}missing.h
file.$(OBJEXT): {$(VPATH)}onigmo.h
file.$(OBJEXT): {$(VPATH)}oniguruma.h
+file.$(OBJEXT): {$(VPATH)}shape.h
file.$(OBJEXT): {$(VPATH)}st.h
file.$(OBJEXT): {$(VPATH)}subst.h
file.$(OBJEXT): {$(VPATH)}thread.h
@@ -6880,6 +6918,7 @@ gc.$(OBJEXT): {$(VPATH)}regex.h
gc.$(OBJEXT): {$(VPATH)}regint.h
gc.$(OBJEXT): {$(VPATH)}ruby_assert.h
gc.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+gc.$(OBJEXT): {$(VPATH)}shape.h
gc.$(OBJEXT): {$(VPATH)}st.h
gc.$(OBJEXT): {$(VPATH)}subst.h
gc.$(OBJEXT): {$(VPATH)}symbol.h
@@ -6905,6 +6944,7 @@ goruby.$(OBJEXT): $(top_srcdir)/internal/gc.h
goruby.$(OBJEXT): $(top_srcdir)/internal/imemo.h
goruby.$(OBJEXT): $(top_srcdir)/internal/serial.h
goruby.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+goruby.$(OBJEXT): $(top_srcdir)/internal/variable.h
goruby.$(OBJEXT): $(top_srcdir)/internal/vm.h
goruby.$(OBJEXT): $(top_srcdir)/internal/warnings.h
goruby.$(OBJEXT): {$(VPATH)}assert.h
@@ -6920,11 +6960,13 @@ goruby.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
goruby.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
goruby.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
goruby.$(OBJEXT): {$(VPATH)}config.h
+goruby.$(OBJEXT): {$(VPATH)}constant.h
goruby.$(OBJEXT): {$(VPATH)}defines.h
goruby.$(OBJEXT): {$(VPATH)}golf_prelude.c
goruby.$(OBJEXT): {$(VPATH)}golf_prelude.rb
goruby.$(OBJEXT): {$(VPATH)}goruby.c
goruby.$(OBJEXT): {$(VPATH)}id.h
+goruby.$(OBJEXT): {$(VPATH)}id_table.h
goruby.$(OBJEXT): {$(VPATH)}intern.h
goruby.$(OBJEXT): {$(VPATH)}internal.h
goruby.$(OBJEXT): {$(VPATH)}internal/abi.h
@@ -7074,6 +7116,7 @@ goruby.$(OBJEXT): {$(VPATH)}missing.h
goruby.$(OBJEXT): {$(VPATH)}node.h
goruby.$(OBJEXT): {$(VPATH)}ruby_assert.h
goruby.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+goruby.$(OBJEXT): {$(VPATH)}shape.h
goruby.$(OBJEXT): {$(VPATH)}st.h
goruby.$(OBJEXT): {$(VPATH)}subst.h
goruby.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -7099,6 +7142,7 @@ hash.$(OBJEXT): $(top_srcdir)/internal/string.h
hash.$(OBJEXT): $(top_srcdir)/internal/symbol.h
hash.$(OBJEXT): $(top_srcdir)/internal/thread.h
hash.$(OBJEXT): $(top_srcdir)/internal/time.h
+hash.$(OBJEXT): $(top_srcdir)/internal/variable.h
hash.$(OBJEXT): $(top_srcdir)/internal/vm.h
hash.$(OBJEXT): $(top_srcdir)/internal/warnings.h
hash.$(OBJEXT): {$(VPATH)}assert.h
@@ -7112,6 +7156,7 @@ hash.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
hash.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
hash.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
hash.$(OBJEXT): {$(VPATH)}config.h
+hash.$(OBJEXT): {$(VPATH)}constant.h
hash.$(OBJEXT): {$(VPATH)}debug_counter.h
hash.$(OBJEXT): {$(VPATH)}defines.h
hash.$(OBJEXT): {$(VPATH)}encoding.h
@@ -7276,6 +7321,7 @@ hash.$(OBJEXT): {$(VPATH)}probes.dmyh
hash.$(OBJEXT): {$(VPATH)}probes.h
hash.$(OBJEXT): {$(VPATH)}ractor.h
hash.$(OBJEXT): {$(VPATH)}ruby_assert.h
+hash.$(OBJEXT): {$(VPATH)}shape.h
hash.$(OBJEXT): {$(VPATH)}st.h
hash.$(OBJEXT): {$(VPATH)}subst.h
hash.$(OBJEXT): {$(VPATH)}symbol.h
@@ -7660,6 +7706,7 @@ io.$(OBJEXT): {$(VPATH)}oniguruma.h
io.$(OBJEXT): {$(VPATH)}ractor.h
io.$(OBJEXT): {$(VPATH)}ruby_assert.h
io.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+io.$(OBJEXT): {$(VPATH)}shape.h
io.$(OBJEXT): {$(VPATH)}st.h
io.$(OBJEXT): {$(VPATH)}subst.h
io.$(OBJEXT): {$(VPATH)}thread.h
@@ -8062,6 +8109,7 @@ iseq.$(OBJEXT): {$(VPATH)}oniguruma.h
iseq.$(OBJEXT): {$(VPATH)}ractor.h
iseq.$(OBJEXT): {$(VPATH)}ruby_assert.h
iseq.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+iseq.$(OBJEXT): {$(VPATH)}shape.h
iseq.$(OBJEXT): {$(VPATH)}st.h
iseq.$(OBJEXT): {$(VPATH)}subst.h
iseq.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -8274,6 +8322,7 @@ load.$(OBJEXT): {$(VPATH)}probes.dmyh
load.$(OBJEXT): {$(VPATH)}probes.h
load.$(OBJEXT): {$(VPATH)}ruby_assert.h
load.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+load.$(OBJEXT): {$(VPATH)}shape.h
load.$(OBJEXT): {$(VPATH)}st.h
load.$(OBJEXT): {$(VPATH)}subst.h
load.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -8792,6 +8841,7 @@ marshal.$(OBJEXT): $(top_srcdir)/internal/string.h
marshal.$(OBJEXT): $(top_srcdir)/internal/struct.h
marshal.$(OBJEXT): $(top_srcdir)/internal/symbol.h
marshal.$(OBJEXT): $(top_srcdir)/internal/util.h
+marshal.$(OBJEXT): $(top_srcdir)/internal/variable.h
marshal.$(OBJEXT): $(top_srcdir)/internal/vm.h
marshal.$(OBJEXT): $(top_srcdir)/internal/warnings.h
marshal.$(OBJEXT): {$(VPATH)}assert.h
@@ -8806,6 +8856,7 @@ marshal.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
marshal.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
marshal.$(OBJEXT): {$(VPATH)}builtin.h
marshal.$(OBJEXT): {$(VPATH)}config.h
+marshal.$(OBJEXT): {$(VPATH)}constant.h
marshal.$(OBJEXT): {$(VPATH)}defines.h
marshal.$(OBJEXT): {$(VPATH)}encindex.h
marshal.$(OBJEXT): {$(VPATH)}encoding.h
@@ -8967,6 +9018,7 @@ marshal.$(OBJEXT): {$(VPATH)}marshal.rbinc
marshal.$(OBJEXT): {$(VPATH)}missing.h
marshal.$(OBJEXT): {$(VPATH)}onigmo.h
marshal.$(OBJEXT): {$(VPATH)}oniguruma.h
+marshal.$(OBJEXT): {$(VPATH)}shape.h
marshal.$(OBJEXT): {$(VPATH)}st.h
marshal.$(OBJEXT): {$(VPATH)}subst.h
marshal.$(OBJEXT): {$(VPATH)}util.h
@@ -8980,6 +9032,7 @@ math.$(OBJEXT): $(top_srcdir)/internal/math.h
math.$(OBJEXT): $(top_srcdir)/internal/object.h
math.$(OBJEXT): $(top_srcdir)/internal/serial.h
math.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+math.$(OBJEXT): $(top_srcdir)/internal/variable.h
math.$(OBJEXT): $(top_srcdir)/internal/vm.h
math.$(OBJEXT): $(top_srcdir)/internal/warnings.h
math.$(OBJEXT): {$(VPATH)}assert.h
@@ -8993,6 +9046,7 @@ math.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
math.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
math.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
math.$(OBJEXT): {$(VPATH)}config.h
+math.$(OBJEXT): {$(VPATH)}constant.h
math.$(OBJEXT): {$(VPATH)}defines.h
math.$(OBJEXT): {$(VPATH)}id_table.h
math.$(OBJEXT): {$(VPATH)}intern.h
@@ -9139,6 +9193,7 @@ math.$(OBJEXT): {$(VPATH)}internal/warning_push.h
math.$(OBJEXT): {$(VPATH)}internal/xmalloc.h
math.$(OBJEXT): {$(VPATH)}math.c
math.$(OBJEXT): {$(VPATH)}missing.h
+math.$(OBJEXT): {$(VPATH)}shape.h
math.$(OBJEXT): {$(VPATH)}st.h
math.$(OBJEXT): {$(VPATH)}subst.h
memory_view.$(OBJEXT): $(hdrdir)/ruby/ruby.h
@@ -9320,6 +9375,7 @@ miniinit.$(OBJEXT): $(top_srcdir)/internal/gc.h
miniinit.$(OBJEXT): $(top_srcdir)/internal/imemo.h
miniinit.$(OBJEXT): $(top_srcdir)/internal/serial.h
miniinit.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+miniinit.$(OBJEXT): $(top_srcdir)/internal/variable.h
miniinit.$(OBJEXT): $(top_srcdir)/internal/vm.h
miniinit.$(OBJEXT): $(top_srcdir)/internal/warnings.h
miniinit.$(OBJEXT): {$(VPATH)}array.rb
@@ -9337,12 +9393,14 @@ miniinit.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
miniinit.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
miniinit.$(OBJEXT): {$(VPATH)}builtin.h
miniinit.$(OBJEXT): {$(VPATH)}config.h
+miniinit.$(OBJEXT): {$(VPATH)}constant.h
miniinit.$(OBJEXT): {$(VPATH)}defines.h
miniinit.$(OBJEXT): {$(VPATH)}dir.rb
miniinit.$(OBJEXT): {$(VPATH)}encoding.h
miniinit.$(OBJEXT): {$(VPATH)}gc.rb
miniinit.$(OBJEXT): {$(VPATH)}gem_prelude.rb
miniinit.$(OBJEXT): {$(VPATH)}id.h
+miniinit.$(OBJEXT): {$(VPATH)}id_table.h
miniinit.$(OBJEXT): {$(VPATH)}intern.h
miniinit.$(OBJEXT): {$(VPATH)}internal.h
miniinit.$(OBJEXT): {$(VPATH)}internal/abi.h
@@ -9516,6 +9574,7 @@ miniinit.$(OBJEXT): {$(VPATH)}prelude.rb
miniinit.$(OBJEXT): {$(VPATH)}ractor.rb
miniinit.$(OBJEXT): {$(VPATH)}ruby_assert.h
miniinit.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+miniinit.$(OBJEXT): {$(VPATH)}shape.h
miniinit.$(OBJEXT): {$(VPATH)}st.h
miniinit.$(OBJEXT): {$(VPATH)}subst.h
miniinit.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -9547,6 +9606,7 @@ mjit.$(OBJEXT): $(top_srcdir)/internal/imemo.h
mjit.$(OBJEXT): $(top_srcdir)/internal/process.h
mjit.$(OBJEXT): $(top_srcdir)/internal/serial.h
mjit.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+mjit.$(OBJEXT): $(top_srcdir)/internal/variable.h
mjit.$(OBJEXT): $(top_srcdir)/internal/vm.h
mjit.$(OBJEXT): $(top_srcdir)/internal/warnings.h
mjit.$(OBJEXT): {$(VPATH)}assert.h
@@ -9741,6 +9801,7 @@ mjit.$(OBJEXT): {$(VPATH)}ractor.h
mjit.$(OBJEXT): {$(VPATH)}ractor_core.h
mjit.$(OBJEXT): {$(VPATH)}ruby_assert.h
mjit.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+mjit.$(OBJEXT): {$(VPATH)}shape.h
mjit.$(OBJEXT): {$(VPATH)}st.h
mjit.$(OBJEXT): {$(VPATH)}subst.h
mjit.$(OBJEXT): {$(VPATH)}thread.h
@@ -9951,6 +10012,7 @@ mjit_compiler.$(OBJEXT): {$(VPATH)}mjit_unit.h
mjit_compiler.$(OBJEXT): {$(VPATH)}node.h
mjit_compiler.$(OBJEXT): {$(VPATH)}ruby_assert.h
mjit_compiler.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+mjit_compiler.$(OBJEXT): {$(VPATH)}shape.h
mjit_compiler.$(OBJEXT): {$(VPATH)}st.h
mjit_compiler.$(OBJEXT): {$(VPATH)}subst.h
mjit_compiler.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -10140,6 +10202,7 @@ node.$(OBJEXT): {$(VPATH)}node.c
node.$(OBJEXT): {$(VPATH)}node.h
node.$(OBJEXT): {$(VPATH)}ruby_assert.h
node.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+node.$(OBJEXT): {$(VPATH)}shape.h
node.$(OBJEXT): {$(VPATH)}st.h
node.$(OBJEXT): {$(VPATH)}subst.h
node.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -10342,6 +10405,7 @@ numeric.$(OBJEXT): {$(VPATH)}numeric.rbinc
numeric.$(OBJEXT): {$(VPATH)}onigmo.h
numeric.$(OBJEXT): {$(VPATH)}oniguruma.h
numeric.$(OBJEXT): {$(VPATH)}ruby_assert.h
+numeric.$(OBJEXT): {$(VPATH)}shape.h
numeric.$(OBJEXT): {$(VPATH)}st.h
numeric.$(OBJEXT): {$(VPATH)}subst.h
numeric.$(OBJEXT): {$(VPATH)}util.h
@@ -10543,6 +10607,7 @@ object.$(OBJEXT): {$(VPATH)}onigmo.h
object.$(OBJEXT): {$(VPATH)}oniguruma.h
object.$(OBJEXT): {$(VPATH)}probes.dmyh
object.$(OBJEXT): {$(VPATH)}probes.h
+object.$(OBJEXT): {$(VPATH)}shape.h
object.$(OBJEXT): {$(VPATH)}st.h
object.$(OBJEXT): {$(VPATH)}subst.h
object.$(OBJEXT): {$(VPATH)}util.h
@@ -10961,6 +11026,7 @@ proc.$(OBJEXT): $(top_srcdir)/internal/serial.h
proc.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
proc.$(OBJEXT): $(top_srcdir)/internal/string.h
proc.$(OBJEXT): $(top_srcdir)/internal/symbol.h
+proc.$(OBJEXT): $(top_srcdir)/internal/variable.h
proc.$(OBJEXT): $(top_srcdir)/internal/vm.h
proc.$(OBJEXT): $(top_srcdir)/internal/warnings.h
proc.$(OBJEXT): {$(VPATH)}assert.h
@@ -10975,6 +11041,7 @@ proc.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
proc.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
proc.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
proc.$(OBJEXT): {$(VPATH)}config.h
+proc.$(OBJEXT): {$(VPATH)}constant.h
proc.$(OBJEXT): {$(VPATH)}defines.h
proc.$(OBJEXT): {$(VPATH)}encoding.h
proc.$(OBJEXT): {$(VPATH)}eval_intern.h
@@ -11141,6 +11208,7 @@ proc.$(OBJEXT): {$(VPATH)}oniguruma.h
proc.$(OBJEXT): {$(VPATH)}proc.c
proc.$(OBJEXT): {$(VPATH)}ruby_assert.h
proc.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+proc.$(OBJEXT): {$(VPATH)}shape.h
proc.$(OBJEXT): {$(VPATH)}st.h
proc.$(OBJEXT): {$(VPATH)}subst.h
proc.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -11360,6 +11428,7 @@ process.$(OBJEXT): {$(VPATH)}process.c
process.$(OBJEXT): {$(VPATH)}ractor.h
process.$(OBJEXT): {$(VPATH)}ruby_assert.h
process.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+process.$(OBJEXT): {$(VPATH)}shape.h
process.$(OBJEXT): {$(VPATH)}st.h
process.$(OBJEXT): {$(VPATH)}subst.h
process.$(OBJEXT): {$(VPATH)}thread.h
@@ -11390,6 +11459,7 @@ ractor.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
ractor.$(OBJEXT): $(top_srcdir)/internal/string.h
ractor.$(OBJEXT): $(top_srcdir)/internal/struct.h
ractor.$(OBJEXT): $(top_srcdir)/internal/thread.h
+ractor.$(OBJEXT): $(top_srcdir)/internal/variable.h
ractor.$(OBJEXT): $(top_srcdir)/internal/vm.h
ractor.$(OBJEXT): $(top_srcdir)/internal/warnings.h
ractor.$(OBJEXT): {$(VPATH)}assert.h
@@ -11405,6 +11475,7 @@ ractor.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
ractor.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
ractor.$(OBJEXT): {$(VPATH)}builtin.h
ractor.$(OBJEXT): {$(VPATH)}config.h
+ractor.$(OBJEXT): {$(VPATH)}constant.h
ractor.$(OBJEXT): {$(VPATH)}debug_counter.h
ractor.$(OBJEXT): {$(VPATH)}defines.h
ractor.$(OBJEXT): {$(VPATH)}encoding.h
@@ -11574,6 +11645,7 @@ ractor.$(OBJEXT): {$(VPATH)}ractor.rbinc
ractor.$(OBJEXT): {$(VPATH)}ractor_core.h
ractor.$(OBJEXT): {$(VPATH)}ruby_assert.h
ractor.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+ractor.$(OBJEXT): {$(VPATH)}shape.h
ractor.$(OBJEXT): {$(VPATH)}st.h
ractor.$(OBJEXT): {$(VPATH)}subst.h
ractor.$(OBJEXT): {$(VPATH)}thread.h
@@ -11971,6 +12043,7 @@ rational.$(OBJEXT): $(top_srcdir)/internal/object.h
rational.$(OBJEXT): $(top_srcdir)/internal/rational.h
rational.$(OBJEXT): $(top_srcdir)/internal/serial.h
rational.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+rational.$(OBJEXT): $(top_srcdir)/internal/variable.h
rational.$(OBJEXT): $(top_srcdir)/internal/vm.h
rational.$(OBJEXT): $(top_srcdir)/internal/warnings.h
rational.$(OBJEXT): {$(VPATH)}assert.h
@@ -11984,6 +12057,7 @@ rational.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
rational.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
rational.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
rational.$(OBJEXT): {$(VPATH)}config.h
+rational.$(OBJEXT): {$(VPATH)}constant.h
rational.$(OBJEXT): {$(VPATH)}defines.h
rational.$(OBJEXT): {$(VPATH)}id.h
rational.$(OBJEXT): {$(VPATH)}id_table.h
@@ -12132,6 +12206,7 @@ rational.$(OBJEXT): {$(VPATH)}internal/xmalloc.h
rational.$(OBJEXT): {$(VPATH)}missing.h
rational.$(OBJEXT): {$(VPATH)}rational.c
rational.$(OBJEXT): {$(VPATH)}ruby_assert.h
+rational.$(OBJEXT): {$(VPATH)}shape.h
rational.$(OBJEXT): {$(VPATH)}st.h
rational.$(OBJEXT): {$(VPATH)}subst.h
re.$(OBJEXT): $(hdrdir)/ruby.h
@@ -12329,6 +12404,7 @@ re.$(OBJEXT): {$(VPATH)}re.h
re.$(OBJEXT): {$(VPATH)}regenc.h
re.$(OBJEXT): {$(VPATH)}regex.h
re.$(OBJEXT): {$(VPATH)}regint.h
+re.$(OBJEXT): {$(VPATH)}shape.h
re.$(OBJEXT): {$(VPATH)}st.h
re.$(OBJEXT): {$(VPATH)}subst.h
re.$(OBJEXT): {$(VPATH)}util.h
@@ -13526,6 +13602,7 @@ ruby.$(OBJEXT): {$(VPATH)}oniguruma.h
ruby.$(OBJEXT): {$(VPATH)}ruby.c
ruby.$(OBJEXT): {$(VPATH)}ruby_assert.h
ruby.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+ruby.$(OBJEXT): {$(VPATH)}shape.h
ruby.$(OBJEXT): {$(VPATH)}st.h
ruby.$(OBJEXT): {$(VPATH)}subst.h
ruby.$(OBJEXT): {$(VPATH)}thread.h
@@ -13547,6 +13624,7 @@ scheduler.$(OBJEXT): $(top_srcdir)/internal/imemo.h
scheduler.$(OBJEXT): $(top_srcdir)/internal/serial.h
scheduler.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
scheduler.$(OBJEXT): $(top_srcdir)/internal/thread.h
+scheduler.$(OBJEXT): $(top_srcdir)/internal/variable.h
scheduler.$(OBJEXT): $(top_srcdir)/internal/vm.h
scheduler.$(OBJEXT): $(top_srcdir)/internal/warnings.h
scheduler.$(OBJEXT): {$(VPATH)}assert.h
@@ -13561,10 +13639,12 @@ scheduler.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
scheduler.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
scheduler.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
scheduler.$(OBJEXT): {$(VPATH)}config.h
+scheduler.$(OBJEXT): {$(VPATH)}constant.h
scheduler.$(OBJEXT): {$(VPATH)}defines.h
scheduler.$(OBJEXT): {$(VPATH)}encoding.h
scheduler.$(OBJEXT): {$(VPATH)}fiber/scheduler.h
scheduler.$(OBJEXT): {$(VPATH)}id.h
+scheduler.$(OBJEXT): {$(VPATH)}id_table.h
scheduler.$(OBJEXT): {$(VPATH)}intern.h
scheduler.$(OBJEXT): {$(VPATH)}internal.h
scheduler.$(OBJEXT): {$(VPATH)}internal/abi.h
@@ -13726,6 +13806,7 @@ scheduler.$(OBJEXT): {$(VPATH)}oniguruma.h
scheduler.$(OBJEXT): {$(VPATH)}ruby_assert.h
scheduler.$(OBJEXT): {$(VPATH)}ruby_atomic.h
scheduler.$(OBJEXT): {$(VPATH)}scheduler.c
+scheduler.$(OBJEXT): {$(VPATH)}shape.h
scheduler.$(OBJEXT): {$(VPATH)}st.h
scheduler.$(OBJEXT): {$(VPATH)}subst.h
scheduler.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -13891,6 +13972,208 @@ setproctitle.$(OBJEXT): {$(VPATH)}setproctitle.c
setproctitle.$(OBJEXT): {$(VPATH)}st.h
setproctitle.$(OBJEXT): {$(VPATH)}subst.h
setproctitle.$(OBJEXT): {$(VPATH)}util.h
+shape.$(OBJEXT): $(CCAN_DIR)/check_type/check_type.h
+shape.$(OBJEXT): $(CCAN_DIR)/container_of/container_of.h
+shape.$(OBJEXT): $(CCAN_DIR)/list/list.h
+shape.$(OBJEXT): $(CCAN_DIR)/str/str.h
+shape.$(OBJEXT): $(hdrdir)/ruby/ruby.h
+shape.$(OBJEXT): $(top_srcdir)/internal/array.h
+shape.$(OBJEXT): $(top_srcdir)/internal/class.h
+shape.$(OBJEXT): $(top_srcdir)/internal/compilers.h
+shape.$(OBJEXT): $(top_srcdir)/internal/gc.h
+shape.$(OBJEXT): $(top_srcdir)/internal/imemo.h
+shape.$(OBJEXT): $(top_srcdir)/internal/serial.h
+shape.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+shape.$(OBJEXT): $(top_srcdir)/internal/symbol.h
+shape.$(OBJEXT): $(top_srcdir)/internal/variable.h
+shape.$(OBJEXT): $(top_srcdir)/internal/vm.h
+shape.$(OBJEXT): $(top_srcdir)/internal/warnings.h
+shape.$(OBJEXT): {$(VPATH)}assert.h
+shape.$(OBJEXT): {$(VPATH)}atomic.h
+shape.$(OBJEXT): {$(VPATH)}backward/2/assume.h
+shape.$(OBJEXT): {$(VPATH)}backward/2/attributes.h
+shape.$(OBJEXT): {$(VPATH)}backward/2/bool.h
+shape.$(OBJEXT): {$(VPATH)}backward/2/gcc_version_since.h
+shape.$(OBJEXT): {$(VPATH)}backward/2/inttypes.h
+shape.$(OBJEXT): {$(VPATH)}backward/2/limits.h
+shape.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
+shape.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
+shape.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
+shape.$(OBJEXT): {$(VPATH)}config.h
+shape.$(OBJEXT): {$(VPATH)}constant.h
+shape.$(OBJEXT): {$(VPATH)}debug_counter.h
+shape.$(OBJEXT): {$(VPATH)}defines.h
+shape.$(OBJEXT): {$(VPATH)}encoding.h
+shape.$(OBJEXT): {$(VPATH)}id.h
+shape.$(OBJEXT): {$(VPATH)}id_table.h
+shape.$(OBJEXT): {$(VPATH)}intern.h
+shape.$(OBJEXT): {$(VPATH)}internal.h
+shape.$(OBJEXT): {$(VPATH)}internal/abi.h
+shape.$(OBJEXT): {$(VPATH)}internal/anyargs.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/char.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/double.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/fixnum.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/gid_t.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/int.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/intptr_t.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/long.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/long_long.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/mode_t.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/off_t.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/pid_t.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/short.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/size_t.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/st_data_t.h
+shape.$(OBJEXT): {$(VPATH)}internal/arithmetic/uid_t.h
+shape.$(OBJEXT): {$(VPATH)}internal/assume.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/alloc_size.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/artificial.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/cold.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/const.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/constexpr.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/deprecated.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/diagnose_if.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/enum_extensibility.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/error.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/flag_enum.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/forceinline.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/format.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/maybe_unused.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/noalias.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/nodiscard.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/noexcept.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/noinline.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/nonnull.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/noreturn.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/pure.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/restrict.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/returns_nonnull.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/warning.h
+shape.$(OBJEXT): {$(VPATH)}internal/attr/weakref.h
+shape.$(OBJEXT): {$(VPATH)}internal/cast.h
+shape.$(OBJEXT): {$(VPATH)}internal/compiler_is.h
+shape.$(OBJEXT): {$(VPATH)}internal/compiler_is/apple.h
+shape.$(OBJEXT): {$(VPATH)}internal/compiler_is/clang.h
+shape.$(OBJEXT): {$(VPATH)}internal/compiler_is/gcc.h
+shape.$(OBJEXT): {$(VPATH)}internal/compiler_is/intel.h
+shape.$(OBJEXT): {$(VPATH)}internal/compiler_is/msvc.h
+shape.$(OBJEXT): {$(VPATH)}internal/compiler_is/sunpro.h
+shape.$(OBJEXT): {$(VPATH)}internal/compiler_since.h
+shape.$(OBJEXT): {$(VPATH)}internal/config.h
+shape.$(OBJEXT): {$(VPATH)}internal/constant_p.h
+shape.$(OBJEXT): {$(VPATH)}internal/core.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rarray.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rbasic.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rbignum.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rclass.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rdata.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rfile.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rhash.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/robject.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rregexp.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rstring.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rstruct.h
+shape.$(OBJEXT): {$(VPATH)}internal/core/rtypeddata.h
+shape.$(OBJEXT): {$(VPATH)}internal/ctype.h
+shape.$(OBJEXT): {$(VPATH)}internal/dllexport.h
+shape.$(OBJEXT): {$(VPATH)}internal/dosish.h
+shape.$(OBJEXT): {$(VPATH)}internal/encoding/coderange.h
+shape.$(OBJEXT): {$(VPATH)}internal/encoding/ctype.h
+shape.$(OBJEXT): {$(VPATH)}internal/encoding/encoding.h
+shape.$(OBJEXT): {$(VPATH)}internal/encoding/pathname.h
+shape.$(OBJEXT): {$(VPATH)}internal/encoding/re.h
+shape.$(OBJEXT): {$(VPATH)}internal/encoding/sprintf.h
+shape.$(OBJEXT): {$(VPATH)}internal/encoding/string.h
+shape.$(OBJEXT): {$(VPATH)}internal/encoding/symbol.h
+shape.$(OBJEXT): {$(VPATH)}internal/encoding/transcode.h
+shape.$(OBJEXT): {$(VPATH)}internal/error.h
+shape.$(OBJEXT): {$(VPATH)}internal/eval.h
+shape.$(OBJEXT): {$(VPATH)}internal/event.h
+shape.$(OBJEXT): {$(VPATH)}internal/fl_type.h
+shape.$(OBJEXT): {$(VPATH)}internal/gc.h
+shape.$(OBJEXT): {$(VPATH)}internal/glob.h
+shape.$(OBJEXT): {$(VPATH)}internal/globals.h
+shape.$(OBJEXT): {$(VPATH)}internal/has/attribute.h
+shape.$(OBJEXT): {$(VPATH)}internal/has/builtin.h
+shape.$(OBJEXT): {$(VPATH)}internal/has/c_attribute.h
+shape.$(OBJEXT): {$(VPATH)}internal/has/cpp_attribute.h
+shape.$(OBJEXT): {$(VPATH)}internal/has/declspec_attribute.h
+shape.$(OBJEXT): {$(VPATH)}internal/has/extension.h
+shape.$(OBJEXT): {$(VPATH)}internal/has/feature.h
+shape.$(OBJEXT): {$(VPATH)}internal/has/warning.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/array.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/bignum.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/class.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/compar.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/complex.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/cont.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/dir.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/enum.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/enumerator.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/error.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/eval.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/file.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/gc.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/hash.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/io.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/load.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/marshal.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/numeric.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/object.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/parse.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/proc.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/process.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/random.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/range.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/rational.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/re.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/ruby.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/select.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/select/largesize.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/signal.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/sprintf.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/string.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/struct.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/thread.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/time.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/variable.h
+shape.$(OBJEXT): {$(VPATH)}internal/intern/vm.h
+shape.$(OBJEXT): {$(VPATH)}internal/interpreter.h
+shape.$(OBJEXT): {$(VPATH)}internal/iterator.h
+shape.$(OBJEXT): {$(VPATH)}internal/memory.h
+shape.$(OBJEXT): {$(VPATH)}internal/method.h
+shape.$(OBJEXT): {$(VPATH)}internal/module.h
+shape.$(OBJEXT): {$(VPATH)}internal/newobj.h
+shape.$(OBJEXT): {$(VPATH)}internal/rgengc.h
+shape.$(OBJEXT): {$(VPATH)}internal/scan_args.h
+shape.$(OBJEXT): {$(VPATH)}internal/special_consts.h
+shape.$(OBJEXT): {$(VPATH)}internal/static_assert.h
+shape.$(OBJEXT): {$(VPATH)}internal/stdalign.h
+shape.$(OBJEXT): {$(VPATH)}internal/stdbool.h
+shape.$(OBJEXT): {$(VPATH)}internal/symbol.h
+shape.$(OBJEXT): {$(VPATH)}internal/value.h
+shape.$(OBJEXT): {$(VPATH)}internal/value_type.h
+shape.$(OBJEXT): {$(VPATH)}internal/variable.h
+shape.$(OBJEXT): {$(VPATH)}internal/warning_push.h
+shape.$(OBJEXT): {$(VPATH)}internal/xmalloc.h
+shape.$(OBJEXT): {$(VPATH)}method.h
+shape.$(OBJEXT): {$(VPATH)}missing.h
+shape.$(OBJEXT): {$(VPATH)}node.h
+shape.$(OBJEXT): {$(VPATH)}onigmo.h
+shape.$(OBJEXT): {$(VPATH)}oniguruma.h
+shape.$(OBJEXT): {$(VPATH)}ruby_assert.h
+shape.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+shape.$(OBJEXT): {$(VPATH)}shape.c
+shape.$(OBJEXT): {$(VPATH)}shape.h
+shape.$(OBJEXT): {$(VPATH)}st.h
+shape.$(OBJEXT): {$(VPATH)}subst.h
+shape.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
+shape.$(OBJEXT): {$(VPATH)}thread_native.h
+shape.$(OBJEXT): {$(VPATH)}vm_core.h
+shape.$(OBJEXT): {$(VPATH)}vm_debug.h
+shape.$(OBJEXT): {$(VPATH)}vm_opts.h
+shape.$(OBJEXT): {$(VPATH)}vm_sync.h
signal.$(OBJEXT): $(CCAN_DIR)/check_type/check_type.h
signal.$(OBJEXT): $(CCAN_DIR)/container_of/container_of.h
signal.$(OBJEXT): $(CCAN_DIR)/list/list.h
@@ -13907,6 +14190,7 @@ signal.$(OBJEXT): $(top_srcdir)/internal/signal.h
signal.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
signal.$(OBJEXT): $(top_srcdir)/internal/string.h
signal.$(OBJEXT): $(top_srcdir)/internal/thread.h
+signal.$(OBJEXT): $(top_srcdir)/internal/variable.h
signal.$(OBJEXT): $(top_srcdir)/internal/vm.h
signal.$(OBJEXT): $(top_srcdir)/internal/warnings.h
signal.$(OBJEXT): {$(VPATH)}assert.h
@@ -13921,6 +14205,7 @@ signal.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
signal.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
signal.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
signal.$(OBJEXT): {$(VPATH)}config.h
+signal.$(OBJEXT): {$(VPATH)}constant.h
signal.$(OBJEXT): {$(VPATH)}debug_counter.h
signal.$(OBJEXT): {$(VPATH)}defines.h
signal.$(OBJEXT): {$(VPATH)}encoding.h
@@ -14087,6 +14372,7 @@ signal.$(OBJEXT): {$(VPATH)}ractor.h
signal.$(OBJEXT): {$(VPATH)}ractor_core.h
signal.$(OBJEXT): {$(VPATH)}ruby_assert.h
signal.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+signal.$(OBJEXT): {$(VPATH)}shape.h
signal.$(OBJEXT): {$(VPATH)}signal.c
signal.$(OBJEXT): {$(VPATH)}st.h
signal.$(OBJEXT): {$(VPATH)}subst.h
@@ -14111,6 +14397,7 @@ sprintf.$(OBJEXT): $(top_srcdir)/internal/serial.h
sprintf.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
sprintf.$(OBJEXT): $(top_srcdir)/internal/string.h
sprintf.$(OBJEXT): $(top_srcdir)/internal/symbol.h
+sprintf.$(OBJEXT): $(top_srcdir)/internal/variable.h
sprintf.$(OBJEXT): $(top_srcdir)/internal/vm.h
sprintf.$(OBJEXT): $(top_srcdir)/internal/warnings.h
sprintf.$(OBJEXT): {$(VPATH)}assert.h
@@ -14124,6 +14411,7 @@ sprintf.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
sprintf.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
sprintf.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
sprintf.$(OBJEXT): {$(VPATH)}config.h
+sprintf.$(OBJEXT): {$(VPATH)}constant.h
sprintf.$(OBJEXT): {$(VPATH)}defines.h
sprintf.$(OBJEXT): {$(VPATH)}encoding.h
sprintf.$(OBJEXT): {$(VPATH)}id.h
@@ -14285,6 +14573,7 @@ sprintf.$(OBJEXT): {$(VPATH)}onigmo.h
sprintf.$(OBJEXT): {$(VPATH)}oniguruma.h
sprintf.$(OBJEXT): {$(VPATH)}re.h
sprintf.$(OBJEXT): {$(VPATH)}regex.h
+sprintf.$(OBJEXT): {$(VPATH)}shape.h
sprintf.$(OBJEXT): {$(VPATH)}sprintf.c
sprintf.$(OBJEXT): {$(VPATH)}st.h
sprintf.$(OBJEXT): {$(VPATH)}subst.h
@@ -14653,6 +14942,7 @@ string.$(OBJEXT): $(top_srcdir)/internal/serial.h
string.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
string.$(OBJEXT): $(top_srcdir)/internal/string.h
string.$(OBJEXT): $(top_srcdir)/internal/transcode.h
+string.$(OBJEXT): $(top_srcdir)/internal/variable.h
string.$(OBJEXT): $(top_srcdir)/internal/vm.h
string.$(OBJEXT): $(top_srcdir)/internal/warnings.h
string.$(OBJEXT): {$(VPATH)}assert.h
@@ -14667,6 +14957,7 @@ string.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
string.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
string.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
string.$(OBJEXT): {$(VPATH)}config.h
+string.$(OBJEXT): {$(VPATH)}constant.h
string.$(OBJEXT): {$(VPATH)}debug_counter.h
string.$(OBJEXT): {$(VPATH)}defines.h
string.$(OBJEXT): {$(VPATH)}encindex.h
@@ -14834,6 +15125,7 @@ string.$(OBJEXT): {$(VPATH)}probes.h
string.$(OBJEXT): {$(VPATH)}re.h
string.$(OBJEXT): {$(VPATH)}regex.h
string.$(OBJEXT): {$(VPATH)}ruby_assert.h
+string.$(OBJEXT): {$(VPATH)}shape.h
string.$(OBJEXT): {$(VPATH)}st.h
string.$(OBJEXT): {$(VPATH)}string.c
string.$(OBJEXT): {$(VPATH)}subst.h
@@ -14890,6 +15182,7 @@ struct.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
struct.$(OBJEXT): $(top_srcdir)/internal/string.h
struct.$(OBJEXT): $(top_srcdir)/internal/struct.h
struct.$(OBJEXT): $(top_srcdir)/internal/symbol.h
+struct.$(OBJEXT): $(top_srcdir)/internal/variable.h
struct.$(OBJEXT): $(top_srcdir)/internal/vm.h
struct.$(OBJEXT): $(top_srcdir)/internal/warnings.h
struct.$(OBJEXT): {$(VPATH)}assert.h
@@ -14905,6 +15198,7 @@ struct.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
struct.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
struct.$(OBJEXT): {$(VPATH)}builtin.h
struct.$(OBJEXT): {$(VPATH)}config.h
+struct.$(OBJEXT): {$(VPATH)}constant.h
struct.$(OBJEXT): {$(VPATH)}defines.h
struct.$(OBJEXT): {$(VPATH)}encoding.h
struct.$(OBJEXT): {$(VPATH)}id.h
@@ -15067,6 +15361,7 @@ struct.$(OBJEXT): {$(VPATH)}onigmo.h
struct.$(OBJEXT): {$(VPATH)}oniguruma.h
struct.$(OBJEXT): {$(VPATH)}ruby_assert.h
struct.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+struct.$(OBJEXT): {$(VPATH)}shape.h
struct.$(OBJEXT): {$(VPATH)}st.h
struct.$(OBJEXT): {$(VPATH)}struct.c
struct.$(OBJEXT): {$(VPATH)}subst.h
@@ -15086,6 +15381,7 @@ symbol.$(OBJEXT): $(top_srcdir)/internal/serial.h
symbol.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
symbol.$(OBJEXT): $(top_srcdir)/internal/string.h
symbol.$(OBJEXT): $(top_srcdir)/internal/symbol.h
+symbol.$(OBJEXT): $(top_srcdir)/internal/variable.h
symbol.$(OBJEXT): $(top_srcdir)/internal/vm.h
symbol.$(OBJEXT): $(top_srcdir)/internal/warnings.h
symbol.$(OBJEXT): {$(VPATH)}assert.h
@@ -15099,6 +15395,7 @@ symbol.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
symbol.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
symbol.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
symbol.$(OBJEXT): {$(VPATH)}config.h
+symbol.$(OBJEXT): {$(VPATH)}constant.h
symbol.$(OBJEXT): {$(VPATH)}debug_counter.h
symbol.$(OBJEXT): {$(VPATH)}defines.h
symbol.$(OBJEXT): {$(VPATH)}encoding.h
@@ -15264,6 +15561,7 @@ symbol.$(OBJEXT): {$(VPATH)}oniguruma.h
symbol.$(OBJEXT): {$(VPATH)}probes.dmyh
symbol.$(OBJEXT): {$(VPATH)}probes.h
symbol.$(OBJEXT): {$(VPATH)}ruby_assert.h
+symbol.$(OBJEXT): {$(VPATH)}shape.h
symbol.$(OBJEXT): {$(VPATH)}st.h
symbol.$(OBJEXT): {$(VPATH)}subst.h
symbol.$(OBJEXT): {$(VPATH)}symbol.c
@@ -15294,6 +15592,7 @@ thread.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
thread.$(OBJEXT): $(top_srcdir)/internal/string.h
thread.$(OBJEXT): $(top_srcdir)/internal/thread.h
thread.$(OBJEXT): $(top_srcdir)/internal/time.h
+thread.$(OBJEXT): $(top_srcdir)/internal/variable.h
thread.$(OBJEXT): $(top_srcdir)/internal/vm.h
thread.$(OBJEXT): $(top_srcdir)/internal/warnings.h
thread.$(OBJEXT): {$(VPATH)}assert.h
@@ -15309,6 +15608,7 @@ thread.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
thread.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
thread.$(OBJEXT): {$(VPATH)}builtin.h
thread.$(OBJEXT): {$(VPATH)}config.h
+thread.$(OBJEXT): {$(VPATH)}constant.h
thread.$(OBJEXT): {$(VPATH)}debug.h
thread.$(OBJEXT): {$(VPATH)}debug_counter.h
thread.$(OBJEXT): {$(VPATH)}defines.h
@@ -15482,6 +15782,7 @@ thread.$(OBJEXT): {$(VPATH)}ractor.h
thread.$(OBJEXT): {$(VPATH)}ractor_core.h
thread.$(OBJEXT): {$(VPATH)}ruby_assert.h
thread.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+thread.$(OBJEXT): {$(VPATH)}shape.h
thread.$(OBJEXT): {$(VPATH)}st.h
thread.$(OBJEXT): {$(VPATH)}subst.h
thread.$(OBJEXT): {$(VPATH)}thread.c
@@ -15702,6 +16003,7 @@ transcode.$(OBJEXT): $(top_srcdir)/internal/serial.h
transcode.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
transcode.$(OBJEXT): $(top_srcdir)/internal/string.h
transcode.$(OBJEXT): $(top_srcdir)/internal/transcode.h
+transcode.$(OBJEXT): $(top_srcdir)/internal/variable.h
transcode.$(OBJEXT): $(top_srcdir)/internal/warnings.h
transcode.$(OBJEXT): {$(VPATH)}assert.h
transcode.$(OBJEXT): {$(VPATH)}backward/2/assume.h
@@ -15714,6 +16016,7 @@ transcode.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
transcode.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
transcode.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
transcode.$(OBJEXT): {$(VPATH)}config.h
+transcode.$(OBJEXT): {$(VPATH)}constant.h
transcode.$(OBJEXT): {$(VPATH)}defines.h
transcode.$(OBJEXT): {$(VPATH)}encoding.h
transcode.$(OBJEXT): {$(VPATH)}id.h
@@ -15872,6 +16175,7 @@ transcode.$(OBJEXT): {$(VPATH)}internal/xmalloc.h
transcode.$(OBJEXT): {$(VPATH)}missing.h
transcode.$(OBJEXT): {$(VPATH)}onigmo.h
transcode.$(OBJEXT): {$(VPATH)}oniguruma.h
+transcode.$(OBJEXT): {$(VPATH)}shape.h
transcode.$(OBJEXT): {$(VPATH)}st.h
transcode.$(OBJEXT): {$(VPATH)}subst.h
transcode.$(OBJEXT): {$(VPATH)}transcode.c
@@ -16421,6 +16725,7 @@ variable.$(OBJEXT): {$(VPATH)}ractor.h
variable.$(OBJEXT): {$(VPATH)}ractor_core.h
variable.$(OBJEXT): {$(VPATH)}ruby_assert.h
variable.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+variable.$(OBJEXT): {$(VPATH)}shape.h
variable.$(OBJEXT): {$(VPATH)}st.h
variable.$(OBJEXT): {$(VPATH)}subst.h
variable.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -16446,6 +16751,7 @@ version.$(OBJEXT): $(top_srcdir)/internal/gc.h
version.$(OBJEXT): $(top_srcdir)/internal/imemo.h
version.$(OBJEXT): $(top_srcdir)/internal/serial.h
version.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+version.$(OBJEXT): $(top_srcdir)/internal/variable.h
version.$(OBJEXT): $(top_srcdir)/internal/vm.h
version.$(OBJEXT): $(top_srcdir)/internal/warnings.h
version.$(OBJEXT): $(top_srcdir)/revision.h
@@ -16462,9 +16768,11 @@ version.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
version.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
version.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
version.$(OBJEXT): {$(VPATH)}config.h
+version.$(OBJEXT): {$(VPATH)}constant.h
version.$(OBJEXT): {$(VPATH)}debug_counter.h
version.$(OBJEXT): {$(VPATH)}defines.h
version.$(OBJEXT): {$(VPATH)}id.h
+version.$(OBJEXT): {$(VPATH)}id_table.h
version.$(OBJEXT): {$(VPATH)}intern.h
version.$(OBJEXT): {$(VPATH)}internal.h
version.$(OBJEXT): {$(VPATH)}internal/abi.h
@@ -16613,6 +16921,7 @@ version.$(OBJEXT): {$(VPATH)}mjit.h
version.$(OBJEXT): {$(VPATH)}node.h
version.$(OBJEXT): {$(VPATH)}ruby_assert.h
version.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+version.$(OBJEXT): {$(VPATH)}shape.h
version.$(OBJEXT): {$(VPATH)}st.h
version.$(OBJEXT): {$(VPATH)}subst.h
version.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -16848,6 +17157,7 @@ vm.$(OBJEXT): {$(VPATH)}ractor.h
vm.$(OBJEXT): {$(VPATH)}ractor_core.h
vm.$(OBJEXT): {$(VPATH)}ruby_assert.h
vm.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+vm.$(OBJEXT): {$(VPATH)}shape.h
vm.$(OBJEXT): {$(VPATH)}st.h
vm.$(OBJEXT): {$(VPATH)}subst.h
vm.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -16884,6 +17194,7 @@ vm_backtrace.$(OBJEXT): $(top_srcdir)/internal/imemo.h
vm_backtrace.$(OBJEXT): $(top_srcdir)/internal/serial.h
vm_backtrace.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
vm_backtrace.$(OBJEXT): $(top_srcdir)/internal/string.h
+vm_backtrace.$(OBJEXT): $(top_srcdir)/internal/variable.h
vm_backtrace.$(OBJEXT): $(top_srcdir)/internal/vm.h
vm_backtrace.$(OBJEXT): $(top_srcdir)/internal/warnings.h
vm_backtrace.$(OBJEXT): {$(VPATH)}assert.h
@@ -16898,11 +17209,13 @@ vm_backtrace.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
vm_backtrace.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
vm_backtrace.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
vm_backtrace.$(OBJEXT): {$(VPATH)}config.h
+vm_backtrace.$(OBJEXT): {$(VPATH)}constant.h
vm_backtrace.$(OBJEXT): {$(VPATH)}debug.h
vm_backtrace.$(OBJEXT): {$(VPATH)}defines.h
vm_backtrace.$(OBJEXT): {$(VPATH)}encoding.h
vm_backtrace.$(OBJEXT): {$(VPATH)}eval_intern.h
vm_backtrace.$(OBJEXT): {$(VPATH)}id.h
+vm_backtrace.$(OBJEXT): {$(VPATH)}id_table.h
vm_backtrace.$(OBJEXT): {$(VPATH)}intern.h
vm_backtrace.$(OBJEXT): {$(VPATH)}internal.h
vm_backtrace.$(OBJEXT): {$(VPATH)}internal/abi.h
@@ -17062,6 +17375,7 @@ vm_backtrace.$(OBJEXT): {$(VPATH)}onigmo.h
vm_backtrace.$(OBJEXT): {$(VPATH)}oniguruma.h
vm_backtrace.$(OBJEXT): {$(VPATH)}ruby_assert.h
vm_backtrace.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+vm_backtrace.$(OBJEXT): {$(VPATH)}shape.h
vm_backtrace.$(OBJEXT): {$(VPATH)}st.h
vm_backtrace.$(OBJEXT): {$(VPATH)}subst.h
vm_backtrace.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -17252,6 +17566,7 @@ vm_dump.$(OBJEXT): {$(VPATH)}ractor.h
vm_dump.$(OBJEXT): {$(VPATH)}ractor_core.h
vm_dump.$(OBJEXT): {$(VPATH)}ruby_assert.h
vm_dump.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+vm_dump.$(OBJEXT): {$(VPATH)}shape.h
vm_dump.$(OBJEXT): {$(VPATH)}st.h
vm_dump.$(OBJEXT): {$(VPATH)}subst.h
vm_dump.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -17271,6 +17586,7 @@ vm_sync.$(OBJEXT): $(top_srcdir)/internal/gc.h
vm_sync.$(OBJEXT): $(top_srcdir)/internal/imemo.h
vm_sync.$(OBJEXT): $(top_srcdir)/internal/serial.h
vm_sync.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+vm_sync.$(OBJEXT): $(top_srcdir)/internal/variable.h
vm_sync.$(OBJEXT): $(top_srcdir)/internal/vm.h
vm_sync.$(OBJEXT): $(top_srcdir)/internal/warnings.h
vm_sync.$(OBJEXT): {$(VPATH)}assert.h
@@ -17285,6 +17601,7 @@ vm_sync.$(OBJEXT): {$(VPATH)}backward/2/long_long.h
vm_sync.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
vm_sync.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
vm_sync.$(OBJEXT): {$(VPATH)}config.h
+vm_sync.$(OBJEXT): {$(VPATH)}constant.h
vm_sync.$(OBJEXT): {$(VPATH)}debug_counter.h
vm_sync.$(OBJEXT): {$(VPATH)}defines.h
vm_sync.$(OBJEXT): {$(VPATH)}gc.h
@@ -17439,6 +17756,7 @@ vm_sync.$(OBJEXT): {$(VPATH)}ractor.h
vm_sync.$(OBJEXT): {$(VPATH)}ractor_core.h
vm_sync.$(OBJEXT): {$(VPATH)}ruby_assert.h
vm_sync.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+vm_sync.$(OBJEXT): {$(VPATH)}shape.h
vm_sync.$(OBJEXT): {$(VPATH)}st.h
vm_sync.$(OBJEXT): {$(VPATH)}subst.h
vm_sync.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
@@ -17462,6 +17780,7 @@ vm_trace.$(OBJEXT): $(top_srcdir)/internal/imemo.h
vm_trace.$(OBJEXT): $(top_srcdir)/internal/serial.h
vm_trace.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
vm_trace.$(OBJEXT): $(top_srcdir)/internal/symbol.h
+vm_trace.$(OBJEXT): $(top_srcdir)/internal/variable.h
vm_trace.$(OBJEXT): $(top_srcdir)/internal/vm.h
vm_trace.$(OBJEXT): $(top_srcdir)/internal/warnings.h
vm_trace.$(OBJEXT): {$(VPATH)}assert.h
@@ -17477,12 +17796,14 @@ vm_trace.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
vm_trace.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
vm_trace.$(OBJEXT): {$(VPATH)}builtin.h
vm_trace.$(OBJEXT): {$(VPATH)}config.h
+vm_trace.$(OBJEXT): {$(VPATH)}constant.h
vm_trace.$(OBJEXT): {$(VPATH)}debug.h
vm_trace.$(OBJEXT): {$(VPATH)}debug_counter.h
vm_trace.$(OBJEXT): {$(VPATH)}defines.h
vm_trace.$(OBJEXT): {$(VPATH)}encoding.h
vm_trace.$(OBJEXT): {$(VPATH)}eval_intern.h
vm_trace.$(OBJEXT): {$(VPATH)}id.h
+vm_trace.$(OBJEXT): {$(VPATH)}id_table.h
vm_trace.$(OBJEXT): {$(VPATH)}intern.h
vm_trace.$(OBJEXT): {$(VPATH)}internal.h
vm_trace.$(OBJEXT): {$(VPATH)}internal/abi.h
@@ -17644,6 +17965,7 @@ vm_trace.$(OBJEXT): {$(VPATH)}oniguruma.h
vm_trace.$(OBJEXT): {$(VPATH)}ractor.h
vm_trace.$(OBJEXT): {$(VPATH)}ruby_assert.h
vm_trace.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+vm_trace.$(OBJEXT): {$(VPATH)}shape.h
vm_trace.$(OBJEXT): {$(VPATH)}st.h
vm_trace.$(OBJEXT): {$(VPATH)}subst.h
vm_trace.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
diff --git a/compile.c b/compile.c
index a5da919c0a..01f6abe6bc 100644
--- a/compile.c
+++ b/compile.c
@@ -2058,20 +2058,7 @@ cdhash_set_label_i(VALUE key, VALUE val, VALUE ptr)
static inline VALUE
get_ivar_ic_value(rb_iseq_t *iseq,ID id)
{
- VALUE val;
- struct rb_id_table *tbl = ISEQ_COMPILE_DATA(iseq)->ivar_cache_table;
- if (tbl) {
- if (rb_id_table_lookup(tbl,id,&val)) {
- return val;
- }
- }
- else {
- tbl = rb_id_table_create(1);
- ISEQ_COMPILE_DATA(iseq)->ivar_cache_table = tbl;
- }
- val = INT2FIX(ISEQ_BODY(iseq)->ivc_size++);
- rb_id_table_insert(tbl,id,val);
- return val;
+ return INT2FIX(ISEQ_BODY(iseq)->ivc_size++);
}
static inline VALUE
@@ -2472,9 +2459,13 @@ iseq_set_sequence(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
generated_iseq[code_index + 1 + j] = (VALUE)ic;
}
break;
+ case TS_IVC: /* inline ivar cache */
+ {
+ unsigned int ic_index = FIX2UINT(operands[j]);
+ vm_ic_attr_index_initialize(((IVC)&body->is_entries[ic_index]), INVALID_SHAPE_ID);
+ }
case TS_ISE: /* inline storage entry: `once` insn */
case TS_ICVARC: /* inline cvar cache */
- case TS_IVC: /* inline ivar cache */
{
unsigned int ic_index = FIX2UINT(operands[j]);
IC ic = &ISEQ_IS_ENTRY_START(body, type)[ic_index].ic_cache;
@@ -11514,6 +11505,11 @@ ibf_load_code(const struct ibf_load *load, rb_iseq_t *iseq, ibf_offset_t bytecod
ISE ic = ISEQ_IS_ENTRY_START(load_body, operand_type) + op;
code[code_index] = (VALUE)ic;
+
+ if (operand_type == TS_IVC) {
+ vm_ic_attr_index_initialize(((IVC)code[code_index]), INVALID_SHAPE_ID);
+ }
+
}
break;
case TS_CALLDATA:
diff --git a/debug_counter.h b/debug_counter.h
index c6f4176e97..256633727f 100644
--- a/debug_counter.h
+++ b/debug_counter.h
@@ -130,7 +130,6 @@ RB_DEBUG_COUNTER(frame_C2R)
/* instance variable counts
*
* * ivar_get_ic_hit/miss: ivar_get inline cache (ic) hit/miss counts (VM insn)
- * * ivar_get_ic_miss_serial: ivar_get ic miss reason by serial (VM insn)
* * ivar_get_ic_miss_unset: ... by unset (VM insn)
* * ivar_get_ic_miss_noobject: ... by "not T_OBJECT" (VM insn)
* * ivar_set_...: same counts with ivar_set (VM insn)
@@ -140,17 +139,17 @@ RB_DEBUG_COUNTER(frame_C2R)
*/
RB_DEBUG_COUNTER(ivar_get_ic_hit)
RB_DEBUG_COUNTER(ivar_get_ic_miss)
-RB_DEBUG_COUNTER(ivar_get_ic_miss_serial)
-RB_DEBUG_COUNTER(ivar_get_ic_miss_unset)
RB_DEBUG_COUNTER(ivar_get_ic_miss_noobject)
RB_DEBUG_COUNTER(ivar_set_ic_hit)
RB_DEBUG_COUNTER(ivar_set_ic_miss)
-RB_DEBUG_COUNTER(ivar_set_ic_miss_serial)
-RB_DEBUG_COUNTER(ivar_set_ic_miss_unset)
RB_DEBUG_COUNTER(ivar_set_ic_miss_iv_hit)
RB_DEBUG_COUNTER(ivar_set_ic_miss_noobject)
RB_DEBUG_COUNTER(ivar_get_base)
RB_DEBUG_COUNTER(ivar_set_base)
+RB_DEBUG_COUNTER(ivar_get_ic_miss_set)
+RB_DEBUG_COUNTER(ivar_get_cc_miss_set)
+RB_DEBUG_COUNTER(ivar_get_ic_miss_unset)
+RB_DEBUG_COUNTER(ivar_get_cc_miss_unset)
/* local variable counts
*
@@ -321,6 +320,7 @@ RB_DEBUG_COUNTER(obj_imemo_parser_strterm)
RB_DEBUG_COUNTER(obj_imemo_callinfo)
RB_DEBUG_COUNTER(obj_imemo_callcache)
RB_DEBUG_COUNTER(obj_imemo_constcache)
+RB_DEBUG_COUNTER(obj_imemo_shape)
/* ar_table */
RB_DEBUG_COUNTER(artable_hint_hit)
diff --git a/ext/coverage/depend b/ext/coverage/depend
index 57d368d3f5..719c6c6e79 100644
--- a/ext/coverage/depend
+++ b/ext/coverage/depend
@@ -165,7 +165,9 @@ coverage.o: $(top_srcdir)/ccan/check_type/check_type.h
coverage.o: $(top_srcdir)/ccan/container_of/container_of.h
coverage.o: $(top_srcdir)/ccan/list/list.h
coverage.o: $(top_srcdir)/ccan/str/str.h
+coverage.o: $(top_srcdir)/constant.h
coverage.o: $(top_srcdir)/gc.h
+coverage.o: $(top_srcdir)/id_table.h
coverage.o: $(top_srcdir)/internal.h
coverage.o: $(top_srcdir)/internal/array.h
coverage.o: $(top_srcdir)/internal/compilers.h
@@ -176,12 +178,14 @@ coverage.o: $(top_srcdir)/internal/sanitizers.h
coverage.o: $(top_srcdir)/internal/serial.h
coverage.o: $(top_srcdir)/internal/static_assert.h
coverage.o: $(top_srcdir)/internal/thread.h
+coverage.o: $(top_srcdir)/internal/variable.h
coverage.o: $(top_srcdir)/internal/vm.h
coverage.o: $(top_srcdir)/internal/warnings.h
coverage.o: $(top_srcdir)/method.h
coverage.o: $(top_srcdir)/node.h
coverage.o: $(top_srcdir)/ruby_assert.h
coverage.o: $(top_srcdir)/ruby_atomic.h
+coverage.o: $(top_srcdir)/shape.h
coverage.o: $(top_srcdir)/thread_pthread.h
coverage.o: $(top_srcdir)/vm_core.h
coverage.o: $(top_srcdir)/vm_opts.h
diff --git a/ext/objspace/depend b/ext/objspace/depend
index c4da8031cc..88c66a232b 100644
--- a/ext/objspace/depend
+++ b/ext/objspace/depend
@@ -350,6 +350,7 @@ objspace.o: $(top_srcdir)/internal/serial.h
objspace.o: $(top_srcdir)/internal/static_assert.h
objspace.o: $(top_srcdir)/internal/warnings.h
objspace.o: $(top_srcdir)/node.h
+objspace.o: $(top_srcdir)/shape.h
objspace.o: $(top_srcdir)/symbol.h
objspace.o: objspace.c
objspace.o: {$(VPATH)}id.h
@@ -533,7 +534,9 @@ objspace_dump.o: $(top_srcdir)/ccan/check_type/check_type.h
objspace_dump.o: $(top_srcdir)/ccan/container_of/container_of.h
objspace_dump.o: $(top_srcdir)/ccan/list/list.h
objspace_dump.o: $(top_srcdir)/ccan/str/str.h
+objspace_dump.o: $(top_srcdir)/constant.h
objspace_dump.o: $(top_srcdir)/gc.h
+objspace_dump.o: $(top_srcdir)/id_table.h
objspace_dump.o: $(top_srcdir)/internal.h
objspace_dump.o: $(top_srcdir)/internal/array.h
objspace_dump.o: $(top_srcdir)/internal/compilers.h
@@ -544,12 +547,14 @@ objspace_dump.o: $(top_srcdir)/internal/sanitizers.h
objspace_dump.o: $(top_srcdir)/internal/serial.h
objspace_dump.o: $(top_srcdir)/internal/static_assert.h
objspace_dump.o: $(top_srcdir)/internal/string.h
+objspace_dump.o: $(top_srcdir)/internal/variable.h
objspace_dump.o: $(top_srcdir)/internal/vm.h
objspace_dump.o: $(top_srcdir)/internal/warnings.h
objspace_dump.o: $(top_srcdir)/method.h
objspace_dump.o: $(top_srcdir)/node.h
objspace_dump.o: $(top_srcdir)/ruby_assert.h
objspace_dump.o: $(top_srcdir)/ruby_atomic.h
+objspace_dump.o: $(top_srcdir)/shape.h
objspace_dump.o: $(top_srcdir)/thread_pthread.h
objspace_dump.o: $(top_srcdir)/vm_core.h
objspace_dump.o: $(top_srcdir)/vm_opts.h
diff --git a/ext/objspace/objspace.c b/ext/objspace/objspace.c
index 0b1b094325..364d36e696 100644
--- a/ext/objspace/objspace.c
+++ b/ext/objspace/objspace.c
@@ -644,6 +644,7 @@ count_imemo_objects(int argc, VALUE *argv, VALUE self)
INIT_IMEMO_TYPE_ID(imemo_callinfo);
INIT_IMEMO_TYPE_ID(imemo_callcache);
INIT_IMEMO_TYPE_ID(imemo_constcache);
+ INIT_IMEMO_TYPE_ID(imemo_shape);
#undef INIT_IMEMO_TYPE_ID
}
diff --git a/gc.c b/gc.c
index d026139d7b..03f936f0d8 100644
--- a/gc.c
+++ b/gc.c
@@ -2895,8 +2895,7 @@ rb_class_instance_allocate_internal(VALUE klass, VALUE flags, bool wb_protected)
GC_ASSERT((flags & RUBY_T_MASK) == T_OBJECT);
GC_ASSERT(flags & ROBJECT_EMBED);
- st_table *index_tbl = RCLASS_IV_INDEX_TBL(klass);
- uint32_t index_tbl_num_entries = index_tbl == NULL ? 0 : (uint32_t)index_tbl->num_entries;
+ uint32_t index_tbl_num_entries = RCLASS_EXT(klass)->max_iv_count;
size_t size;
bool embed = true;
@@ -2931,7 +2930,7 @@ rb_class_instance_allocate_internal(VALUE klass, VALUE flags, bool wb_protected)
#endif
}
else {
- rb_init_iv_list(obj);
+ rb_ensure_iv_list_size(obj, 0, index_tbl_num_entries);
}
return obj;
@@ -2972,6 +2971,7 @@ rb_imemo_name(enum imemo_type type)
IMEMO_NAME(callinfo);
IMEMO_NAME(callcache);
IMEMO_NAME(constcache);
+ IMEMO_NAME(shape);
#undef IMEMO_NAME
}
return "unknown";
@@ -3018,6 +3018,14 @@ imemo_memsize(VALUE obj)
case imemo_iseq:
size += rb_iseq_memsize((rb_iseq_t *)obj);
break;
+ case imemo_shape:
+ {
+ struct rb_id_table* edges = ((rb_shape_t *) obj)->edges;
+ if (edges) {
+ size += rb_id_table_memsize(edges);
+ }
+ break;
+ }
case imemo_env:
size += RANY(obj)->as.imemo.env.env_size * sizeof(VALUE);
break;
@@ -3206,20 +3214,6 @@ rb_free_const_table(struct rb_id_table *tbl)
rb_id_table_free(tbl);
}
-static int
-free_iv_index_tbl_free_i(st_data_t key, st_data_t value, st_data_t data)
-{
- xfree((void *)value);
- return ST_CONTINUE;
-}
-
-static void
-iv_index_tbl_free(struct st_table *tbl)
-{
- st_foreach(tbl, free_iv_index_tbl_free_i, 0);
- st_free_table(tbl);
-}
-
// alive: if false, target pointers can be freed already.
// To check it, we need objspace parameter.
static void
@@ -3387,6 +3381,22 @@ obj_free_object_id(rb_objspace_t *objspace, VALUE obj)
}
}
+static enum rb_id_table_iterator_result
+remove_child_shapes_parent(VALUE value, void *ref)
+{
+ rb_shape_t * shape = (rb_shape_t *) value;
+ GC_ASSERT(IMEMO_TYPE_P(shape, imemo_shape));
+
+ // If both objects live on the same page and we're currently
+ // sweeping that page, then we need to assert that neither are marked
+ if (GET_HEAP_PAGE(shape) == GET_HEAP_PAGE(shape->parent)) {
+ GC_ASSERT(!MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(shape), shape));
+ }
+
+ shape->parent = NULL;
+ return ID_TABLE_CONTINUE;
+}
+
static int
obj_free(rb_objspace_t *objspace, VALUE obj)
{
@@ -3435,6 +3445,19 @@ obj_free(rb_objspace_t *objspace, VALUE obj)
RB_DEBUG_COUNTER_INC(obj_obj_transient);
}
else {
+ // A shape can be collected before an object is collected (if both
+ // happened to be garbage at the same time), so when we look up the shape, _do not_
+ // assert that the shape is an IMEMO because it could be null
+ rb_shape_t *shape = rb_shape_get_shape_by_id_without_assertion(ROBJECT_SHAPE_ID(obj));
+ if (shape) {
+ VALUE klass = RBASIC_CLASS(obj);
+
+ // Increment max_iv_count if applicable, used to determine size pool allocation
+ uint32_t num_of_ivs = shape->iv_count;
+ if (RCLASS_EXT(klass)->max_iv_count < num_of_ivs) {
+ RCLASS_EXT(klass)->max_iv_count = num_of_ivs;
+ }
+ }
xfree(RANY(obj)->as.object.as.heap.ivptr);
RB_DEBUG_COUNTER_INC(obj_obj_ptr);
}
@@ -3449,9 +3472,6 @@ obj_free(rb_objspace_t *objspace, VALUE obj)
if (RCLASS_CONST_TBL(obj)) {
rb_free_const_table(RCLASS_CONST_TBL(obj));
}
- if (RCLASS_IV_INDEX_TBL(obj)) {
- iv_index_tbl_free(RCLASS_IV_INDEX_TBL(obj));
- }
if (RCLASS_CVC_TBL(obj)) {
rb_id_table_foreach_values(RCLASS_CVC_TBL(obj), cvar_table_free_i, NULL);
rb_id_table_free(RCLASS_CVC_TBL(obj));
@@ -3728,8 +3748,39 @@ obj_free(rb_objspace_t *objspace, VALUE obj)
case imemo_constcache:
RB_DEBUG_COUNTER_INC(obj_imemo_constcache);
break;
- }
- return TRUE;
+ case imemo_shape:
+ {
+ rb_shape_t *shape = (rb_shape_t *)obj;
+ rb_shape_t *parent = shape->parent;
+
+ if (parent) {
+ RUBY_ASSERT(IMEMO_TYPE_P(parent, imemo_shape));
+ RUBY_ASSERT(parent->edges);
+ VALUE res; // Only used to temporarily store lookup value
+ if (rb_id_table_lookup(parent->edges, shape->edge_name, &res)) {
+ if ((rb_shape_t *)res == shape) {
+ rb_id_table_delete(parent->edges, shape->edge_name);
+ }
+ }
+ else {
+ rb_bug("Edge %s should exist", rb_id2name(shape->edge_name));
+ }
+ }
+ if (shape->edges) {
+ rb_id_table_foreach_values(shape->edges, remove_child_shapes_parent, NULL);
+ rb_id_table_free(shape->edges);
+ shape->edges = NULL;
+ }
+
+ shape->parent = NULL;
+
+ rb_shape_set_shape_by_id(SHAPE_ID(shape), NULL);
+
+ RB_DEBUG_COUNTER_INC(obj_imemo_shape);
+ break;
+ }
+ }
+ return TRUE;
default:
rb_bug("gc_sweep(): unknown data type 0x%x(%p) 0x%"PRIxVALUE,
@@ -4873,10 +4924,6 @@ obj_memsize_of(VALUE obj, int use_all_types)
if (RCLASS_CVC_TBL(obj)) {
size += rb_id_table_memsize(RCLASS_CVC_TBL(obj));
}
- if (RCLASS_IV_INDEX_TBL(obj)) {
- // TODO: more correct value
- size += st_memsize(RCLASS_IV_INDEX_TBL(obj));
- }
if (RCLASS_EXT(obj)->iv_tbl) {
size += st_memsize(RCLASS_EXT(obj)->iv_tbl);
}
@@ -7154,6 +7201,21 @@ gc_mark_imemo(rb_objspace_t *objspace, VALUE obj)
const struct rb_callcache *cc = (const struct rb_callcache *)obj;
// should not mark klass here
gc_mark(objspace, (VALUE)vm_cc_cme(cc));
+
+ // Check it's an attr_(reader|writer)
+ if (cc->cme_ && (cc->cme_->def->type == VM_METHOD_TYPE_ATTRSET ||
+ cc->cme_->def->type == VM_METHOD_TYPE_IVAR)) {
+ shape_id_t source_shape_id = vm_cc_attr_index_source_shape_id(cc);
+ shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
+ if (source_shape_id != INVALID_SHAPE_ID) {
+ rb_shape_t *shape = rb_shape_get_shape_by_id(source_shape_id);
+ rb_gc_mark((VALUE)shape);
+ }
+ if (dest_shape_id != INVALID_SHAPE_ID) {
+ rb_shape_t *shape = rb_shape_get_shape_by_id(dest_shape_id);
+ rb_gc_mark((VALUE)shape);
+ }
+ }
}
return;
case imemo_constcache:
@@ -7162,6 +7224,14 @@ gc_mark_imemo(rb_objspace_t *objspace, VALUE obj)
gc_mark(objspace, ice->value);
}
return;
+ case imemo_shape:
+ {
+ rb_shape_t *shape = (rb_shape_t *)obj;
+ if (shape->edges) {
+ mark_m_tbl(objspace, shape->edges);
+ }
+ }
+ return;
#if VM_CHECK_MODE > 0
default:
VM_UNREACHABLE(gc_mark_imemo);
@@ -9765,6 +9835,10 @@ gc_is_moveable_obj(rb_objspace_t *objspace, VALUE obj)
GC_ASSERT(!SPECIAL_CONST_P(obj));
switch (BUILTIN_TYPE(obj)) {
+ case T_IMEMO:
+ if (IMEMO_TYPE_P(obj, imemo_shape)) {
+ return FALSE;
+ }
case T_NONE:
case T_NIL:
case T_MOVED:
@@ -9778,7 +9852,6 @@ gc_is_moveable_obj(rb_objspace_t *objspace, VALUE obj)
case T_STRING:
case T_OBJECT:
case T_FLOAT:
- case T_IMEMO:
case T_ARRAY:
case T_BIGNUM:
case T_ICLASS:
@@ -10178,6 +10251,38 @@ gc_update_values(rb_objspace_t *objspace, long n, VALUE *values)
}
}
+static enum rb_id_table_iterator_result
+check_id_table_move(VALUE value, void *data)
+{
+ rb_objspace_t *objspace = (rb_objspace_t *)data;
+
+ if (gc_object_moved_p(objspace, (VALUE)value)) {
+ return ID_TABLE_REPLACE;
+ }
+
+ return ID_TABLE_CONTINUE;
+}
+
+static enum rb_id_table_iterator_result
+update_id_table(VALUE *value, void *data, int existing)
+{
+ rb_objspace_t *objspace = (rb_objspace_t *)data;
+
+ if (gc_object_moved_p(objspace, (VALUE)*value)) {
+ *value = rb_gc_location((VALUE)*value);
+ }
+
+ return ID_TABLE_CONTINUE;
+}
+
+static void
+update_m_tbl(rb_objspace_t *objspace, struct rb_id_table *tbl)
+{
+ if (tbl) {
+ rb_id_table_foreach_values_with_replace(tbl, check_id_table_move, update_id_table, objspace);
+ }
+}
+
static void
gc_ref_update_imemo(rb_objspace_t *objspace, VALUE obj)
{
@@ -10250,24 +10355,23 @@ gc_ref_update_imemo(rb_objspace_t *objspace, VALUE obj)
case imemo_tmpbuf:
case imemo_callinfo:
break;
+ case imemo_shape:
+ {
+ rb_shape_t * shape = (rb_shape_t *)obj;
+ if(shape->edges) {
+ update_m_tbl(objspace, shape->edges);
+ }
+ if (shape->parent) {
+ shape->parent = (rb_shape_t *)rb_gc_location((VALUE)shape->parent);
+ }
+ }
+ break;
default:
rb_bug("not reachable %d", imemo_type(obj));
break;
}
}
-static enum rb_id_table_iterator_result
-check_id_table_move(VALUE value, void *data)
-{
- rb_objspace_t *objspace = (rb_objspace_t *)data;
-
- if (gc_object_moved_p(objspace, (VALUE)value)) {
- return ID_TABLE_REPLACE;
- }
-
- return ID_TABLE_CONTINUE;
-}
-
/* Returns the new location of an object, if it moved. Otherwise returns
* the existing location. */
VALUE
@@ -10301,26 +10405,6 @@ rb_gc_location(VALUE value)
}
static enum rb_id_table_iterator_result
-update_id_table(VALUE *value, void *data, int existing)
-{
- rb_objspace_t *objspace = (rb_objspace_t *)data;
-
- if (gc_object_moved_p(objspace, (VALUE)*value)) {
- *value = rb_gc_location((VALUE)*value);
- }
-
- return ID_TABLE_CONTINUE;
-}
-
-static void
-update_m_tbl(rb_objspace_t *objspace, struct rb_id_table *tbl)
-{
- if (tbl) {
- rb_id_table_foreach_values_with_replace(tbl, check_id_table_move, update_id_table, objspace);
- }
-}
-
-static enum rb_id_table_iterator_result
update_cc_tbl_i(VALUE ccs_ptr, void *data)
{
rb_objspace_t *objspace = (rb_objspace_t *)data;
@@ -10407,15 +10491,6 @@ update_subclass_entries(rb_objspace_t *objspace, rb_subclass_entry_t *entry)
}
}
-static int
-update_iv_index_tbl_i(st_data_t key, st_data_t value, st_data_t arg)
-{
- rb_objspace_t *objspace = (rb_objspace_t *)arg;
- struct rb_iv_index_tbl_entry *ent = (struct rb_iv_index_tbl_entry *)value;
- UPDATE_IF_MOVED(objspace, ent->class_value);
- return ST_CONTINUE;
-}
-
static void
update_class_ext(rb_objspace_t *objspace, rb_classext_t *ext)
{
@@ -10423,11 +10498,6 @@ update_class_ext(rb_objspace_t *objspace, rb_classext_t *ext)
UPDATE_IF_MOVED(objspace, ext->includer);
UPDATE_IF_MOVED(objspace, ext->refined_class);
update_subclass_entries(objspace, ext->subclasses);
-
- // ext->iv_index_tbl
- if (ext->iv_index_tbl) {
- st_foreach(ext->iv_index_tbl, update_iv_index_tbl_i, (st_data_t)objspace);
- }
}
static void
@@ -10669,6 +10739,8 @@ gc_update_references(rb_objspace_t *objspace)
struct heap_page *page = NULL;
+ rb_vm_update_references(vm);
+
for (int i = 0; i < SIZE_POOL_COUNT; i++) {
bool should_set_mark_bits = TRUE;
rb_size_pool_t *size_pool = &size_pools[i];
@@ -10687,7 +10759,6 @@ gc_update_references(rb_objspace_t *objspace)
}
}
}
- rb_vm_update_references(vm);
rb_transient_heap_update_references();
rb_gc_update_global_tbl();
global_symbols.ids = rb_gc_location(global_symbols.ids);
diff --git a/include/ruby/internal/core/robject.h b/include/ruby/internal/core/robject.h
index 7823061d8f..bec0b45fd4 100644
--- a/include/ruby/internal/core/robject.h
+++ b/include/ruby/internal/core/robject.h
@@ -46,7 +46,6 @@
#define ROBJECT_EMBED ROBJECT_EMBED
#define ROBJECT_NUMIV ROBJECT_NUMIV
#define ROBJECT_IVPTR ROBJECT_IVPTR
-#define ROBJECT_IV_INDEX_TBL ROBJECT_IV_INDEX_TBL
/** @endcond */
/**
@@ -132,7 +131,7 @@ struct RObject {
*
* This is a shortcut for `RCLASS_IV_INDEX_TBL(rb_obj_class(obj))`.
*/
- struct st_table *iv_index_tbl;
+ struct rb_id_table *iv_index_tbl;
} heap;
#if USE_RVARGC
diff --git a/include/ruby/internal/fl_type.h b/include/ruby/internal/fl_type.h
index c51bd2e9d9..7383426b23 100644
--- a/include/ruby/internal/fl_type.h
+++ b/include/ruby/internal/fl_type.h
@@ -941,21 +941,8 @@ RB_OBJ_FREEZE_RAW(VALUE obj)
RB_FL_SET_RAW(obj, RUBY_FL_FREEZE);
}
-/**
- * Prevents further modifications to the given object. ::rb_eFrozenError shall
- * be raised if modification is attempted.
- *
- * @param[out] x Object in question.
- */
-static inline void
-rb_obj_freeze_inline(VALUE x)
-{
- if (RB_FL_ABLE(x)) {
- RB_OBJ_FREEZE_RAW(x);
- if (RBASIC_CLASS(x) && !(RBASIC(x)->flags & RUBY_FL_SINGLETON)) {
- rb_freeze_singleton_class(x);
- }
- }
-}
+RUBY_SYMBOL_EXPORT_BEGIN
+void rb_obj_freeze_inline(VALUE obj);
+RUBY_SYMBOL_EXPORT_END
#endif /* RBIMPL_FL_TYPE_H */
diff --git a/inits.c b/inits.c
index 9decba3c11..d1204c1324 100644
--- a/inits.c
+++ b/inits.c
@@ -77,6 +77,7 @@ rb_call_inits(void)
CALL(vm_stack_canary);
CALL(ast);
CALL(gc_stress);
+ CALL(shape);
// enable builtin loading
CALL(builtin);
diff --git a/internal.h b/internal.h
index 0740ae99e5..695c9cfb7e 100644
--- a/internal.h
+++ b/internal.h
@@ -48,9 +48,6 @@
#undef RHASH_TBL
#undef RHASH_EMPTY_P
-/* internal/object.h */
-#undef ROBJECT_IV_INDEX_TBL
-
/* internal/struct.h */
#undef RSTRUCT_LEN
#undef RSTRUCT_PTR
diff --git a/internal/class.h b/internal/class.h
index ae680564a6..36635d6eaa 100644
--- a/internal/class.h
+++ b/internal/class.h
@@ -14,6 +14,7 @@
#include "ruby/internal/stdbool.h" /* for bool */
#include "ruby/intern.h" /* for rb_alloc_func_t */
#include "ruby/ruby.h" /* for struct RBasic */
+#include "shape.h"
#ifdef RCLASS_SUPER
# undef RCLASS_SUPER
@@ -26,9 +27,9 @@ struct rb_subclass_entry {
};
struct rb_iv_index_tbl_entry {
- uint32_t index;
- rb_serial_t class_serial;
- VALUE class_value;
+ uint32_t idx;
+ shape_id_t source_shape_id;
+ shape_id_t dest_shape_id;
};
struct rb_cvar_class_tbl_entry {
@@ -38,7 +39,6 @@ struct rb_cvar_class_tbl_entry {
};
struct rb_classext_struct {
- struct st_table *iv_index_tbl; // ID -> struct rb_iv_index_tbl_entry
struct st_table *iv_tbl;
#if SIZEOF_SERIAL_T == SIZEOF_VALUE /* otherwise m_tbl is in struct RClass */
struct rb_id_table *m_tbl;
@@ -64,6 +64,8 @@ struct rb_classext_struct {
const VALUE refined_class;
rb_alloc_func_t allocator;
const VALUE includer;
+ uint32_t max_iv_count;
+ uint16_t shape_id;
};
struct RClass {
@@ -102,7 +104,6 @@ typedef struct rb_classext_struct rb_classext_t;
#define RCLASS_CALLABLE_M_TBL(c) (RCLASS_EXT(c)->callable_m_tbl)
#define RCLASS_CC_TBL(c) (RCLASS_EXT(c)->cc_tbl)
#define RCLASS_CVC_TBL(c) (RCLASS_EXT(c)->cvc_tbl)
-#define RCLASS_IV_INDEX_TBL(c) (RCLASS_EXT(c)->iv_index_tbl)
#define RCLASS_ORIGIN(c) (RCLASS_EXT(c)->origin_)
#define RCLASS_REFINED_CLASS(c) (RCLASS_EXT(c)->refined_class)
#if SIZEOF_SERIAL_T == SIZEOF_VALUE
diff --git a/internal/imemo.h b/internal/imemo.h
index 91b524e0a6..20bfff8d7c 100644
--- a/internal/imemo.h
+++ b/internal/imemo.h
@@ -45,6 +45,7 @@ enum imemo_type {
imemo_callinfo = 11,
imemo_callcache = 12,
imemo_constcache = 13,
+ imemo_shape = 14,
};
/* CREF (Class REFerence) is defined in method.h */
diff --git a/internal/object.h b/internal/object.h
index 88f3a44bc6..7b54e13dd2 100644
--- a/internal/object.h
+++ b/internal/object.h
@@ -9,11 +9,6 @@
* @brief Internal header for Object.
*/
#include "ruby/ruby.h" /* for VALUE */
-#include "internal/class.h" /* for RCLASS_IV_INDEX_TBL */
-
-#ifdef ROBJECT_IV_INDEX_TBL
-# undef ROBJECT_IV_INDEX_TBL
-#endif
/* object.c */
VALUE rb_class_search_ancestor(VALUE klass, VALUE super);
@@ -26,7 +21,6 @@ int rb_bool_expected(VALUE, const char *, int raise);
static inline void RBASIC_CLEAR_CLASS(VALUE obj);
static inline void RBASIC_SET_CLASS_RAW(VALUE obj, VALUE klass);
static inline void RBASIC_SET_CLASS(VALUE obj, VALUE klass);
-static inline struct st_table *ROBJECT_IV_INDEX_TBL_inline(VALUE obj);
RUBY_SYMBOL_EXPORT_BEGIN
/* object.c (export) */
@@ -64,20 +58,4 @@ RBASIC_SET_CLASS(VALUE obj, VALUE klass)
RBASIC_SET_CLASS_RAW(obj, klass);
RB_OBJ_WRITTEN(obj, oldv, klass);
}
-
-RBIMPL_ATTR_PURE()
-static inline struct st_table *
-ROBJECT_IV_INDEX_TBL_inline(VALUE obj)
-{
- if (RB_FL_ANY_RAW(obj, ROBJECT_EMBED)) {
- VALUE klass = rb_obj_class(obj);
- return RCLASS_IV_INDEX_TBL(klass);
- }
- else {
- const struct RObject *const ptr = ROBJECT(obj);
- return ptr->as.heap.iv_index_tbl;
- }
-}
-#define ROBJECT_IV_INDEX_TBL ROBJECT_IV_INDEX_TBL_inline
-
#endif /* INTERNAL_OBJECT_H */
diff --git a/internal/variable.h b/internal/variable.h
index 1a19e8964b..47037a3392 100644
--- a/internal/variable.h
+++ b/internal/variable.h
@@ -37,6 +37,9 @@ static inline void ROBJ_TRANSIENT_SET(VALUE obj);
static inline void ROBJ_TRANSIENT_UNSET(VALUE obj);
uint32_t rb_obj_ensure_iv_index_mapping(VALUE obj, ID id);
+struct gen_ivtbl;
+int rb_gen_ivtbl_get(VALUE obj, ID id, struct gen_ivtbl **ivtbl);
+
RUBY_SYMBOL_EXPORT_BEGIN
/* variable.c (export) */
void rb_mark_generic_ivar(VALUE);
@@ -52,6 +55,8 @@ VALUE rb_gvar_set(ID, VALUE);
VALUE rb_gvar_defined(ID);
void rb_const_warn_if_deprecated(const rb_const_entry_t *, VALUE, ID);
void rb_init_iv_list(VALUE obj);
+void rb_ensure_iv_list_size(VALUE obj, uint32_t len, uint32_t newsize);
+struct gen_ivtbl * rb_ensure_generic_iv_list_size(VALUE obj, uint32_t newsize);
MJIT_SYMBOL_EXPORT_END
static inline bool
diff --git a/iseq.c b/iseq.c
index a4792d81fd..7014162a19 100644
--- a/iseq.c
+++ b/iseq.c
@@ -233,13 +233,15 @@ rb_iseq_each_value(const rb_iseq_t *iseq, iseq_value_itr_t * func, void *data)
// IVC entries
for (unsigned int i = 0; i < body->ivc_size; i++, is_entries++) {
IVC ivc = (IVC)is_entries;
- if (ivc->entry) {
- RUBY_ASSERT(!RB_TYPE_P(ivc->entry->class_value, T_NONE));
-
- VALUE nv = func(data, ivc->entry->class_value);
- if (ivc->entry->class_value != nv) {
- ivc->entry->class_value = nv;
- }
+ shape_id_t source_shape_id = vm_ic_attr_index_source_shape_id(ivc);
+ shape_id_t dest_shape_id = vm_ic_attr_index_dest_shape_id(ivc);
+ if (source_shape_id != INVALID_SHAPE_ID) {
+ rb_shape_t *shape = rb_shape_get_shape_by_id(source_shape_id);
+ func(data, (VALUE)shape);
+ }
+ if (dest_shape_id != INVALID_SHAPE_ID) {
+ rb_shape_t *shape = rb_shape_get_shape_by_id(dest_shape_id);
+ func(data, (VALUE)shape);
}
}
diff --git a/lib/mjit/compiler.rb b/lib/mjit/compiler.rb
index 49f28ab690..06f018c934 100644
--- a/lib/mjit/compiler.rb
+++ b/lib/mjit/compiler.rb
@@ -73,23 +73,6 @@ module RubyVM::MJIT
src << "#undef GET_SELF\n"
src << "#define GET_SELF() cfp_self\n"
- # Generate merged ivar guards first if needed
- if !status.compile_info.disable_ivar_cache && status.merge_ivar_guards_p
- src << " if (UNLIKELY(!(RB_TYPE_P(GET_SELF(), T_OBJECT) && (rb_serial_t)#{status.ivar_serial} == RCLASS_SERIAL(RBASIC(GET_SELF())->klass) &&"
- if USE_RVARGC
- src << "#{status.max_ivar_index} < ROBJECT_NUMIV(GET_SELF())" # index < ROBJECT_NUMIV(obj)
- else
- if status.max_ivar_index >= ROBJECT_EMBED_LEN_MAX
- src << "#{status.max_ivar_index} < ROBJECT_NUMIV(GET_SELF())" # index < ROBJECT_NUMIV(obj) && !RB_FL_ANY_RAW(obj, ROBJECT_EMBED)
- else
- src << "ROBJECT_EMBED_LEN_MAX == ROBJECT_NUMIV(GET_SELF())" # index < ROBJECT_NUMIV(obj) && RB_FL_ANY_RAW(obj, ROBJECT_EMBED)
- end
- end
- src << "))) {\n"
- src << " goto ivar_cancel;\n"
- src << " }\n"
- end
-
# Simulate `opt_pc` in setup_parameters_complex. Other PCs which may be passed by catch tables
# are not considered since vm_exec doesn't call jit_exec for catch tables.
if iseq.body.param.flags.has_opt
@@ -103,6 +86,13 @@ module RubyVM::MJIT
src << " }\n"
end
+ # Generate merged ivar guards first if needed
+ if !status.compile_info.disable_ivar_cache && status.merge_ivar_guards_p
+ src << " if (UNLIKELY(!(RB_TYPE_P(GET_SELF(), T_OBJECT)))) {"
+ src << " goto ivar_cancel;\n"
+ src << " }\n"
+ end
+
C.fprintf(f, src)
compile_insns(0, 0, status, iseq.body, f)
compile_cancel_handler(f, iseq.body, status)
@@ -363,52 +353,37 @@ module RubyVM::MJIT
ic_copy = (status.is_entries + (C.iseq_inline_storage_entry.new(operands[1]) - body.is_entries)).iv_cache
src = +''
- if !status.compile_info.disable_ivar_cache && ic_copy.entry
+ if !status.compile_info.disable_ivar_cache && ic_copy.source_shape_id != C.INVALID_SHAPE_ID
# JIT: optimize away motion of sp and pc. This path does not call rb_warning() and so it's always leaf and not `handles_sp`.
# compile_pc_and_sp(src, insn, stack_size, sp_inc, local_stack_p, next_pos)
# JIT: prepare vm_getivar/vm_setivar arguments and variables
src << "{\n"
src << " VALUE obj = GET_SELF();\n"
- src << " const uint32_t index = #{ic_copy.entry.index};\n"
- if status.merge_ivar_guards_p
- # JIT: Access ivar without checking these VM_ASSERTed prerequisites as we checked them in the beginning of `mjit_compile_body`
- src << " VM_ASSERT(RB_TYPE_P(obj, T_OBJECT));\n"
- src << " VM_ASSERT((rb_serial_t)#{ic_copy.entry.class_serial} == RCLASS_SERIAL(RBASIC(obj)->klass));\n"
- src << " VM_ASSERT(index < ROBJECT_NUMIV(obj));\n"
- if insn_name == :setinstancevariable
- if USE_RVARGC
- src << " if (LIKELY(!RB_OBJ_FROZEN_RAW(obj) && index < ROBJECT_NUMIV(obj))) {\n"
- src << " RB_OBJ_WRITE(obj, &ROBJECT_IVPTR(obj)[index], stack[#{stack_size - 1}]);\n"
- else
- heap_ivar_p = status.max_ivar_index >= ROBJECT_EMBED_LEN_MAX
- src << " if (LIKELY(!RB_OBJ_FROZEN_RAW(obj) && #{heap_ivar_p ? 'true' : 'RB_FL_ANY_RAW(obj, ROBJECT_EMBED)'})) {\n"
- src << " RB_OBJ_WRITE(obj, &ROBJECT(obj)->as.#{heap_ivar_p ? 'heap.ivptr[index]' : 'ary[index]'}, stack[#{stack_size - 1}]);\n"
- end
- src << " }\n"
- else
- src << " VALUE val;\n"
- if USE_RVARGC
- src << " if (LIKELY(index < ROBJECT_NUMIV(obj) && (val = ROBJECT_IVPTR(obj)[index]) != Qundef)) {\n"
- else
- heap_ivar_p = status.max_ivar_index >= ROBJECT_EMBED_LEN_MAX
- src << " if (LIKELY(#{heap_ivar_p ? 'true' : 'RB_FL_ANY_RAW(obj, ROBJECT_EMBED)'} && (val = ROBJECT(obj)->as.#{heap_ivar_p ? 'heap.ivptr[index]' : 'ary[index]'}) != Qundef)) {\n"
- end
- src << " stack[#{stack_size}] = val;\n"
- src << " }\n"
- end
+ src << " const shape_id_t source_shape_id = (rb_serial_t)#{ic_copy.source_shape_id};\n"
+ # JIT: cache hit path of vm_getivar/vm_setivar, or cancel JIT (recompile it with exivar)
+ if insn_name == :setinstancevariable
+ src << " const uint32_t index = #{ic_copy.attr_index - 1};\n"
+ src << " const shape_id_t dest_shape_id = (rb_serial_t)#{ic_copy.dest_shape_id};\n"
+ src << " if (source_shape_id == ROBJECT_SHAPE_ID(obj) && \n"
+ src << " dest_shape_id != ROBJECT_SHAPE_ID(obj)) {\n"
+ src << " if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {\n"
+ src << " rb_init_iv_list(obj);\n"
+ src << " }\n"
+ src << " ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);\n"
+ src << " VALUE *ptr = ROBJECT_IVPTR(obj);\n"
+ src << " RB_OBJ_WRITE(obj, &ptr[index], stack[#{stack_size - 1}]);\n"
+ src << " }\n"
else
- src << " const rb_serial_t ic_serial = (rb_serial_t)#{ic_copy.entry.class_serial};\n"
- # JIT: cache hit path of vm_getivar/vm_setivar, or cancel JIT (recompile it with exivar)
- if insn_name == :setinstancevariable
- src << " if (LIKELY(RB_TYPE_P(obj, T_OBJECT) && ic_serial == RCLASS_SERIAL(RBASIC(obj)->klass) && index < ROBJECT_NUMIV(obj) && !RB_OBJ_FROZEN_RAW(obj))) {\n"
- src << " VALUE *ptr = ROBJECT_IVPTR(obj);\n"
- src << " RB_OBJ_WRITE(obj, &ptr[index], stack[#{stack_size - 1}]);\n"
+ if ic_copy.attr_index == 0 # cache hit, but uninitialized iv
+ src << " /* Uninitialized instance variable */\n"
+ src << " if (source_shape_id == ROBJECT_SHAPE_ID(obj)) {\n"
+ src << " stack[#{stack_size}] = Qnil;\n"
src << " }\n"
else
- src << " VALUE val;\n"
- src << " if (LIKELY(RB_TYPE_P(obj, T_OBJECT) && ic_serial == RCLASS_SERIAL(RBASIC(obj)->klass) && index < ROBJECT_NUMIV(obj) && (val = ROBJECT_IVPTR(obj)[index]) != Qundef)) {\n"
- src << " stack[#{stack_size}] = val;\n"
+ src << " const uint32_t index = #{ic_copy.attr_index - 1};\n"
+ src << " if (source_shape_id == ROBJECT_SHAPE_ID(obj)) {\n"
+ src << " stack[#{stack_size}] = ROBJECT_IVPTR(obj)[index];\n"
src << " }\n"
end
end
@@ -419,20 +394,19 @@ module RubyVM::MJIT
src << " }\n"
src << "}\n"
return src
- elsif insn_name == :getinstancevariable && !status.compile_info.disable_exivar_cache && ic_copy.entry
+ elsif insn_name == :getinstancevariable && !status.compile_info.disable_exivar_cache && ic_copy.source_shape_id != C.INVALID_SHAPE_ID
# JIT: optimize away motion of sp and pc. This path does not call rb_warning() and so it's always leaf and not `handles_sp`.
# compile_pc_and_sp(src, insn, stack_size, sp_inc, local_stack_p, next_pos)
# JIT: prepare vm_getivar's arguments and variables
src << "{\n"
src << " VALUE obj = GET_SELF();\n"
- src << " const rb_serial_t ic_serial = (rb_serial_t)#{ic_copy.entry.class_serial};\n"
- src << " const uint32_t index = #{ic_copy.entry.index};\n"
+ src << " const shape_id_t source_shape_id = (rb_serial_t)#{ic_copy.source_shape_id};\n"
+ src << " const uint32_t index = #{ic_copy.attr_index - 1};\n"
# JIT: cache hit path of vm_getivar, or cancel JIT (recompile it without any ivar optimization)
src << " struct gen_ivtbl *ivtbl;\n"
- src << " VALUE val;\n"
- src << " if (LIKELY(FL_TEST_RAW(obj, FL_EXIVAR) && ic_serial == RCLASS_SERIAL(RBASIC(obj)->klass) && rb_ivar_generic_ivtbl_lookup(obj, &ivtbl) && index < ivtbl->numiv && (val = ivtbl->ivptr[index]) != Qundef)) {\n"
- src << " stack[#{stack_size}] = val;\n"
+ src << " if (LIKELY(FL_TEST_RAW(obj, FL_EXIVAR) && source_shape_id == rb_shape_get_shape_id(obj) && rb_ivar_generic_ivtbl_lookup(obj, &ivtbl))) {\n"
+ src << " stack[#{stack_size}] = ivtbl->ivptr[index];\n"
src << " }\n"
src << " else {\n"
src << " reg_cfp->pc = original_body_iseq + #{pos};\n"
@@ -832,35 +806,16 @@ module RubyVM::MJIT
def init_ivar_compile_status(body, status)
C.mjit_capture_is_entries(body, status.is_entries)
- num_ivars = 0
pos = 0
- status.max_ivar_index = 0
- status.ivar_serial = 0
while pos < body.iseq_size
insn = INSNS.fetch(C.rb_vm_insn_decode(body.iseq_encoded[pos]))
if insn.name == :getinstancevariable || insn.name == :setinstancevariable
- ic = body.iseq_encoded[pos+2]
- ic_copy = (status.is_entries + (C.iseq_inline_storage_entry.new(ic) - body.is_entries)).iv_cache
- if ic_copy.entry # Only initialized (ic_serial > 0) IVCs are optimized
- num_ivars += 1
-
- if status.max_ivar_index < ic_copy.entry.index
- status.max_ivar_index = ic_copy.entry.index
- end
-
- if status.ivar_serial == 0
- status.ivar_serial = ic_copy.entry.class_serial
- elsif status.ivar_serial != ic_copy.entry.class_serial
- # Multiple classes have used this ISeq. Give up assuming one serial.
- status.merge_ivar_guards_p = false
- return
- end
- end
+ status.merge_ivar_guards_p = true
+ return
end
pos += insn.len
end
- status.merge_ivar_guards_p = status.ivar_serial > 0 && num_ivars >= 2
end
# Expand simple macro that doesn't require dynamic C code.
diff --git a/marshal.c b/marshal.c
index e4b40c0607..59edbfe53a 100644
--- a/marshal.c
+++ b/marshal.c
@@ -39,6 +39,7 @@
#include "ruby/st.h"
#include "ruby/util.h"
#include "builtin.h"
+#include "shape.h"
#define BITSPERSHORT (2*CHAR_BIT)
#define SHORTMASK ((1<<BITSPERSHORT)-1)
@@ -622,10 +623,6 @@ w_obj_each(st_data_t key, st_data_t val, st_data_t a)
}
return ST_CONTINUE;
}
- if (!ivarg->num_ivar) {
- rb_raise(rb_eRuntimeError, "instance variable added to %"PRIsVALUE" instance",
- CLASS_OF(arg->obj));
- }
--ivarg->num_ivar;
w_symbol(ID2SYM(id), arg->arg);
w_object(value, arg->arg, arg->limit);
@@ -720,6 +717,7 @@ has_ivars(VALUE obj, VALUE encname, VALUE *ivobj)
static void
w_ivar_each(VALUE obj, st_index_t num, struct dump_call_arg *arg)
{
+ shape_id_t shape_id = rb_shape_get_shape_id(arg->obj);
struct w_ivar_arg ivarg = {arg, num};
if (!num) return;
rb_ivar_foreach(obj, w_obj_each, (st_data_t)&ivarg);
@@ -727,6 +725,10 @@ w_ivar_each(VALUE obj, st_index_t num, struct dump_call_arg *arg)
rb_raise(rb_eRuntimeError, "instance variable removed from %"PRIsVALUE" instance",
CLASS_OF(arg->obj));
}
+ if (shape_id != rb_shape_get_shape_id(arg->obj)) {
+ rb_raise(rb_eRuntimeError, "instance variable added to %"PRIsVALUE" instance",
+ CLASS_OF(arg->obj));
+ }
}
static void
diff --git a/misc/lldb_cruby.py b/misc/lldb_cruby.py
index 595d54dfab..da97e28b69 100755
--- a/misc/lldb_cruby.py
+++ b/misc/lldb_cruby.py
@@ -418,9 +418,13 @@ def lldb_inspect(debugger, target, result, val):
elif flType == RUBY_T_IMEMO:
# I'm not sure how to get IMEMO_MASK out of lldb. It's not in globals()
imemo_type = (flags >> RUBY_FL_USHIFT) & 0x0F # IMEMO_MASK
+
print("T_IMEMO: ", file=result)
append_command_output(debugger, "p (enum imemo_type) %d" % imemo_type, result)
- append_command_output(debugger, "p *(struct MEMO *) %0#x" % val.GetValueAsUnsigned(), result)
+ if imemo_type == imemo_shape:
+ append_command_output(debugger, "p *(rb_shape_t *) %0#x" % val.GetValueAsUnsigned(), result)
+ else:
+ append_command_output(debugger, "p *(struct MEMO *) %0#x" % val.GetValueAsUnsigned(), result)
elif flType == RUBY_T_STRUCT:
tRTypedData = target.FindFirstType("struct RStruct").GetPointerType()
val = val.Cast(tRTypedData)
diff --git a/mjit_c.rb b/mjit_c.rb
index d8e5628bda..fd8991af3f 100644
--- a/mjit_c.rb
+++ b/mjit_c.rb
@@ -5,6 +5,10 @@ module RubyVM::MJIT
C = Object.new
class << C
+ def SHAPE_BITS
+ RubyVM::Shape::SHAPE_BITS
+ end
+
def ROBJECT_EMBED_LEN_MAX
Primitive.cexpr! 'INT2NUM(RBIMPL_EMBED_LEN_MAX_OF(VALUE))'
end
@@ -165,6 +169,14 @@ module RubyVM::MJIT
Primitive.cexpr! %q{ INT2NUM(VM_METHOD_TYPE_ISEQ) }
end
+ def C.INVALID_SHAPE_ID
+ Primitive.cexpr! %q{ ULONG2NUM(INVALID_SHAPE_ID) }
+ end
+
+ def C.SHAPE_MASK
+ Primitive.cexpr! %q{ ULONG2NUM(SHAPE_MASK) }
+ end
+
def C.CALL_DATA
@CALL_DATA ||= self.rb_call_data
end
@@ -181,6 +193,10 @@ module RubyVM::MJIT
@RB_BUILTIN ||= self.rb_builtin_function
end
+ def C.attr_index_t
+ @attr_index_t ||= CType::Immediate.parse("uint32_t")
+ end
+
def C.compile_branch
@compile_branch ||= CType::Struct.new(
"compile_branch", Primitive.cexpr!("SIZEOF(struct compile_branch)"),
@@ -201,7 +217,6 @@ module RubyVM::MJIT
compiled_id: [CType::Immediate.parse("int"), Primitive.cexpr!("OFFSETOF((*((struct compile_status *)NULL)), compiled_id)")],
compile_info: [CType::Pointer.new { self.rb_mjit_compile_info }, Primitive.cexpr!("OFFSETOF((*((struct compile_status *)NULL)), compile_info)")],
merge_ivar_guards_p: [self._Bool, Primitive.cexpr!("OFFSETOF((*((struct compile_status *)NULL)), merge_ivar_guards_p)")],
- ivar_serial: [self.rb_serial_t, Primitive.cexpr!("OFFSETOF((*((struct compile_status *)NULL)), ivar_serial)")],
max_ivar_index: [CType::Immediate.parse("size_t"), Primitive.cexpr!("OFFSETOF((*((struct compile_status *)NULL)), max_ivar_index)")],
inlined_iseqs: [CType::Pointer.new { CType::Pointer.new { self.rb_iseq_constant_body } }, Primitive.cexpr!("OFFSETOF((*((struct compile_status *)NULL)), inlined_iseqs)")],
inline_context: [self.inlined_call_context, Primitive.cexpr!("OFFSETOF((*((struct compile_status *)NULL)), inline_context)")],
@@ -240,7 +255,9 @@ module RubyVM::MJIT
def C.iseq_inline_iv_cache_entry
@iseq_inline_iv_cache_entry ||= CType::Struct.new(
"iseq_inline_iv_cache_entry", Primitive.cexpr!("SIZEOF(struct iseq_inline_iv_cache_entry)"),
- entry: [CType::Pointer.new { self.rb_iv_index_tbl_entry }, Primitive.cexpr!("OFFSETOF((*((struct iseq_inline_iv_cache_entry *)NULL)), entry)")],
+ source_shape_id: [self.shape_id_t, Primitive.cexpr!("OFFSETOF((*((struct iseq_inline_iv_cache_entry *)NULL)), source_shape_id)")],
+ dest_shape_id: [self.shape_id_t, Primitive.cexpr!("OFFSETOF((*((struct iseq_inline_iv_cache_entry *)NULL)), dest_shape_id)")],
+ attr_index: [self.attr_index_t, Primitive.cexpr!("OFFSETOF((*((struct iseq_inline_iv_cache_entry *)NULL)), attr_index)")],
)
end
@@ -313,7 +330,11 @@ module RubyVM::MJIT
call_: [self.vm_call_handler, Primitive.cexpr!("OFFSETOF((*((struct rb_callcache *)NULL)), call_)")],
aux_: [CType::Union.new(
"", Primitive.cexpr!("SIZEOF(((struct rb_callcache *)NULL)->aux_)"),
- attr_index: CType::Immediate.parse("unsigned int"),
+ attr: CType::Struct.new(
+ "", Primitive.cexpr!("SIZEOF(((struct rb_callcache *)NULL)->aux_.attr)"),
+ index: [self.attr_index_t, Primitive.cexpr!("OFFSETOF(((struct rb_callcache *)NULL)->aux_.attr, index)")],
+ dest_shape_id: [self.shape_id_t, Primitive.cexpr!("OFFSETOF(((struct rb_callcache *)NULL)->aux_.attr, dest_shape_id)")],
+ ),
method_missing_reason: self.method_missing_reason,
v: self.VALUE,
), Primitive.cexpr!("OFFSETOF((*((struct rb_callcache *)NULL)), aux_)")],
@@ -502,9 +523,9 @@ module RubyVM::MJIT
def C.rb_iv_index_tbl_entry
@rb_iv_index_tbl_entry ||= CType::Struct.new(
"rb_iv_index_tbl_entry", Primitive.cexpr!("SIZEOF(struct rb_iv_index_tbl_entry)"),
- index: [CType::Immediate.parse("uint32_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_iv_index_tbl_entry *)NULL)), index)")],
- class_serial: [self.rb_serial_t, Primitive.cexpr!("OFFSETOF((*((struct rb_iv_index_tbl_entry *)NULL)), class_serial)")],
- class_value: [self.VALUE, Primitive.cexpr!("OFFSETOF((*((struct rb_iv_index_tbl_entry *)NULL)), class_value)")],
+ idx: [CType::Immediate.parse("uint32_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_iv_index_tbl_entry *)NULL)), idx)")],
+ source_shape_id: [self.shape_id_t, Primitive.cexpr!("OFFSETOF((*((struct rb_iv_index_tbl_entry *)NULL)), source_shape_id)")],
+ dest_shape_id: [self.shape_id_t, Primitive.cexpr!("OFFSETOF((*((struct rb_iv_index_tbl_entry *)NULL)), dest_shape_id)")],
)
end
@@ -577,6 +598,10 @@ module RubyVM::MJIT
@VALUE ||= CType::Immediate.find(Primitive.cexpr!("SIZEOF(VALUE)"), Primitive.cexpr!("SIGNED_TYPE_P(VALUE)"))
end
+ def C.shape_id_t
+ @shape_id_t ||= CType::Immediate.find(Primitive.cexpr!("SIZEOF(shape_id_t)"), Primitive.cexpr!("SIGNED_TYPE_P(shape_id_t)"))
+ end
+
def C._Bool
CType::Bool.new
end
diff --git a/mjit_compiler.h b/mjit_compiler.h
index da79054420..b465be00fd 100644
--- a/mjit_compiler.h
+++ b/mjit_compiler.h
@@ -8,6 +8,7 @@
#include "builtin.h"
#include "mjit.h"
#include "mjit_unit.h"
+#include "shape.h"
// Macros to check if a position is already compiled using compile_status.stack_size_for_pos
#define NOT_COMPILED_STACK_SIZE -1
@@ -48,7 +49,6 @@ struct compile_status {
// Mutated optimization levels
struct rb_mjit_compile_info *compile_info;
bool merge_ivar_guards_p; // If true, merge guards of ivar accesses
- rb_serial_t ivar_serial; // ic_serial of IVC in is_entries (used only when merge_ivar_guards_p)
size_t max_ivar_index; // Max IVC index in is_entries (used only when merge_ivar_guards_p)
// If `inlined_iseqs[pos]` is not NULL, `mjit_compile_body` tries to inline ISeq there.
const struct rb_iseq_constant_body **inlined_iseqs;
diff --git a/object.c b/object.c
index 2328b20757..213f9760be 100644
--- a/object.c
+++ b/object.c
@@ -39,6 +39,7 @@
#include "ruby/util.h"
#include "ruby/assert.h"
#include "builtin.h"
+#include "shape.h"
/*!
* \addtogroup object
@@ -271,9 +272,33 @@ rb_obj_copy_ivar(VALUE dest, VALUE obj)
VALUE *src_buf = ROBJECT_IVPTR(obj);
uint32_t dest_len = ROBJECT_NUMIV(dest);
uint32_t src_len = ROBJECT_NUMIV(obj);
- uint32_t len = dest_len < src_len ? dest_len : src_len;
+ uint32_t max_len = dest_len < src_len ? src_len : dest_len;
- MEMCPY(dest_buf, src_buf, VALUE, len);
+ rb_ensure_iv_list_size(dest, dest_len, max_len);
+
+ dest_len = ROBJECT_NUMIV(dest);
+ uint32_t min_len = dest_len > src_len ? src_len : dest_len;
+
+ if (RBASIC(obj)->flags & ROBJECT_EMBED) {
+ src_buf = ROBJECT(obj)->as.ary;
+
+ // embedded -> embedded
+ if (RBASIC(dest)->flags & ROBJECT_EMBED) {
+ dest_buf = ROBJECT(dest)->as.ary;
+ }
+ // embedded -> extended
+ else {
+ dest_buf = ROBJECT(dest)->as.heap.ivptr;
+ }
+ }
+ // extended -> extended
+ else {
+ RUBY_ASSERT(!(RBASIC(dest)->flags & ROBJECT_EMBED));
+ dest_buf = ROBJECT(dest)->as.heap.ivptr;
+ src_buf = ROBJECT(obj)->as.heap.ivptr;
+ }
+
+ MEMCPY(dest_buf, src_buf, VALUE, min_len);
}
static void
@@ -283,10 +308,23 @@ init_copy(VALUE dest, VALUE obj)
rb_raise(rb_eTypeError, "[bug] frozen object (%s) allocated", rb_obj_classname(dest));
}
RBASIC(dest)->flags &= ~(T_MASK|FL_EXIVAR);
+ // Copies the shape id from obj to dest
RBASIC(dest)->flags |= RBASIC(obj)->flags & (T_MASK|FL_EXIVAR);
rb_copy_wb_protected_attribute(dest, obj);
rb_copy_generic_ivar(dest, obj);
rb_gc_copy_finalizer(dest, obj);
+
+ rb_shape_t *shape_to_set = rb_shape_get_shape(obj);
+
+ // If the object is frozen, the "dup"'d object will *not* be frozen,
+ // so we need to copy the frozen shape's parent to the new object.
+ if (rb_shape_frozen_shape_p(shape_to_set)) {
+ shape_to_set = shape_to_set->parent;
+ }
+
+ // shape ids are different
+ rb_shape_set_shape(dest, shape_to_set);
+
if (RB_TYPE_P(obj, T_OBJECT)) {
rb_obj_copy_ivar(dest, obj);
}
@@ -392,6 +430,9 @@ mutable_obj_clone(VALUE obj, VALUE kwfreeze)
case Qnil:
rb_funcall(clone, id_init_clone, 1, obj);
RBASIC(clone)->flags |= RBASIC(obj)->flags & FL_FREEZE;
+ if (RB_OBJ_FROZEN(obj)) {
+ rb_shape_transition_shape_frozen(clone);
+ }
break;
case Qtrue:
{
@@ -407,6 +448,7 @@ mutable_obj_clone(VALUE obj, VALUE kwfreeze)
argv[1] = freeze_true_hash;
rb_funcallv_kw(clone, id_init_clone, 2, argv, RB_PASS_KEYWORDS);
RBASIC(clone)->flags |= FL_FREEZE;
+ rb_shape_transition_shape_frozen(clone);
break;
}
case Qfalse:
diff --git a/ractor_core.h b/ractor_core.h
index a065f5f809..9d4b8387c7 100644
--- a/ractor_core.h
+++ b/ractor_core.h
@@ -289,11 +289,13 @@ rb_ractor_id(const rb_ractor_t *r)
#if RACTOR_CHECK_MODE > 0
uint32_t rb_ractor_current_id(void);
+// If ractor check mode is enabled, shape bits needs to be smaller
+STATIC_ASSERT(shape_bits, SHAPE_BITS == 16);
static inline void
rb_ractor_setup_belonging_to(VALUE obj, uint32_t rid)
{
- VALUE flags = RBASIC(obj)->flags & 0xffffffff; // 4B
+ VALUE flags = RBASIC(obj)->flags & 0xffff0000ffffffff; // 4B
RBASIC(obj)->flags = flags | ((VALUE)rid << 32);
}
@@ -310,7 +312,7 @@ rb_ractor_belonging(VALUE obj)
return 0;
}
else {
- return RBASIC(obj)->flags >> 32;
+ return RBASIC(obj)->flags >> 32 & 0xFFFF;
}
}
diff --git a/shape.c b/shape.c
new file mode 100644
index 0000000000..fa91466340
--- /dev/null
+++ b/shape.c
@@ -0,0 +1,571 @@
+#include "vm_core.h"
+#include "vm_sync.h"
+#include "shape.h"
+#include "internal/class.h"
+#include "internal/symbol.h"
+#include "internal/variable.h"
+#include <stdbool.h>
+
+/*
+ * Shape getters
+ */
+static rb_shape_t*
+rb_shape_get_root_shape(void) {
+ return GET_VM()->root_shape;
+}
+
+static rb_shape_t*
+rb_shape_get_frozen_root_shape(void) {
+ return GET_VM()->frozen_root_shape;
+}
+
+bool
+rb_shape_root_shape_p(rb_shape_t* shape) {
+ return shape == rb_shape_get_root_shape();
+}
+
+rb_shape_t*
+rb_shape_get_shape_by_id(shape_id_t shape_id)
+{
+ RUBY_ASSERT(shape_id != INVALID_SHAPE_ID);
+
+ rb_vm_t *vm = GET_VM();
+ rb_shape_t *shape = vm->shape_list[shape_id];
+ RUBY_ASSERT(IMEMO_TYPE_P(shape, imemo_shape));
+ return shape;
+}
+
+rb_shape_t*
+rb_shape_get_shape_by_id_without_assertion(shape_id_t shape_id)
+{
+ RUBY_ASSERT(shape_id != INVALID_SHAPE_ID);
+
+ rb_vm_t *vm = GET_VM();
+ rb_shape_t *shape = vm->shape_list[shape_id];
+ return shape;
+}
+
+static inline shape_id_t
+shape_set_shape_id(rb_shape_t *shape, shape_id_t id) {
+ VALUE flags = shape->flags & ~((uint64_t)SHAPE_MASK << 16);
+ return (shape_id_t)(shape->flags = (flags | ((VALUE)id << SHAPE_FLAG_SHIFT)));
+}
+
+#if !SHAPE_IN_BASIC_FLAGS
+static inline shape_id_t
+RCLASS_SHAPE_ID(VALUE obj)
+{
+ return RCLASS_EXT(obj)->shape_id;
+}
+
+shape_id_t rb_generic_shape_id(VALUE obj);
+#endif
+
+shape_id_t
+rb_shape_get_shape_id(VALUE obj)
+{
+ if (RB_SPECIAL_CONST_P(obj)) {
+ return SHAPE_ID(rb_shape_get_frozen_root_shape());
+ }
+
+#if SHAPE_IN_BASIC_FLAGS
+ return RBASIC_SHAPE_ID(obj);
+#else
+ switch (BUILTIN_TYPE(obj)) {
+ case T_OBJECT:
+ return ROBJECT_SHAPE_ID(obj);
+ break;
+ case T_CLASS:
+ case T_MODULE:
+ return RCLASS_SHAPE_ID(obj);
+ default:
+ return rb_generic_shape_id(obj);
+ }
+#endif
+}
+
+rb_shape_t*
+rb_shape_get_shape(VALUE obj)
+{
+ return rb_shape_get_shape_by_id(rb_shape_get_shape_id(obj));
+}
+
+static shape_id_t
+get_next_shape_id(void)
+{
+ rb_vm_t *vm = GET_VM();
+ vm->max_shape_count++;
+ return vm->max_shape_count;
+}
+
+static rb_shape_t *
+rb_shape_lookup_id(rb_shape_t* shape, ID id, enum shape_type shape_type) {
+ while (shape->parent) {
+ if (shape->edge_name == id) {
+ // If the shape type is different, we don't
+ // want this to count as a "found" ID
+ if (shape_type == (enum shape_type)shape->type) {
+ return shape;
+ }
+ else {
+ return NULL;
+ }
+ }
+ shape = shape->parent;
+ }
+ return NULL;
+}
+
+static rb_shape_t*
+get_next_shape_internal(rb_shape_t* shape, ID id, VALUE obj, enum shape_type shape_type)
+{
+ rb_shape_t *res = NULL;
+ RUBY_ASSERT(SHAPE_FROZEN != (enum shape_type)shape->type);
+ RB_VM_LOCK_ENTER();
+ {
+ if (rb_shape_lookup_id(shape, id, shape_type)) {
+ // If shape already contains the ivar that is being set, we'll return shape
+ res = shape;
+ }
+ else {
+ if (!shape->edges) {
+ shape->edges = rb_id_table_create(0);
+ }
+
+ // Lookup the shape in edges - if there's already an edge and a corresponding shape for it,
+ // we can return that. Otherwise, we'll need to get a new shape
+ if (!rb_id_table_lookup(shape->edges, id, (VALUE *)&res) || rb_objspace_garbage_object_p((VALUE)res)) {
+ // In this case, the shape exists, but the shape is garbage, so we need to recreate it
+ if (res) {
+ rb_id_table_delete(shape->edges, id);
+ res->parent = NULL;
+ }
+
+ shape_id_t next_shape_id = get_next_shape_id();
+
+ if (next_shape_id == MAX_SHAPE_ID) {
+ // TODO: Make an OutOfShapesError ??
+ rb_bug("Out of shapes\n");
+ }
+ else {
+ RUBY_ASSERT(next_shape_id < MAX_SHAPE_ID);
+ rb_shape_t * new_shape = rb_shape_alloc(next_shape_id,
+ id,
+ shape);
+
+ new_shape->type = (uint8_t)shape_type;
+
+ switch(shape_type) {
+ case SHAPE_FROZEN:
+ RB_OBJ_FREEZE_RAW((VALUE)new_shape);
+ break;
+ case SHAPE_IVAR:
+ new_shape->iv_count = new_shape->parent->iv_count + 1;
+
+ // Check if we should update max_iv_count on the object's class
+ if (BUILTIN_TYPE(obj) == T_OBJECT) {
+ VALUE klass = rb_obj_class(obj);
+ if (new_shape->iv_count > RCLASS_EXT(klass)->max_iv_count) {
+ RCLASS_EXT(klass)->max_iv_count = new_shape->iv_count;
+ }
+ }
+ break;
+ case SHAPE_IVAR_UNDEF:
+ new_shape->iv_count = new_shape->parent->iv_count;
+ break;
+ case SHAPE_ROOT:
+ rb_bug("Unreachable");
+ break;
+ }
+
+ rb_id_table_insert(shape->edges, id, (VALUE)new_shape);
+ RB_OBJ_WRITTEN((VALUE)shape, Qundef, (VALUE)new_shape);
+ rb_shape_set_shape_by_id(next_shape_id, new_shape);
+
+ res = new_shape;
+ }
+ }
+ }
+ }
+ RB_VM_LOCK_LEAVE();
+ return res;
+}
+
+MJIT_FUNC_EXPORTED int
+rb_shape_frozen_shape_p(rb_shape_t* shape)
+{
+ return SHAPE_FROZEN == (enum shape_type)shape->type;
+}
+
+void
+rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape)
+{
+ rb_shape_t* next_shape = get_next_shape_internal(shape, id, obj, SHAPE_IVAR_UNDEF);
+
+ if (shape == next_shape) {
+ return;
+ }
+
+ RUBY_ASSERT(!rb_objspace_garbage_object_p((VALUE)next_shape));
+ rb_shape_set_shape(obj, next_shape);
+}
+
+void
+rb_shape_transition_shape_frozen(VALUE obj)
+{
+ rb_shape_t* shape = rb_shape_get_shape(obj);
+ RUBY_ASSERT(shape);
+ RUBY_ASSERT(RB_OBJ_FROZEN(obj));
+
+ if (rb_shape_frozen_shape_p(shape)) {
+ return;
+ }
+
+ rb_shape_t* next_shape;
+
+ if (shape == rb_shape_get_root_shape()) {
+ switch(BUILTIN_TYPE(obj)) {
+ case T_OBJECT:
+ case T_CLASS:
+ case T_MODULE:
+ break;
+ default:
+ return;
+ }
+ next_shape = rb_shape_get_frozen_root_shape();
+ }
+ else {
+ static ID id_frozen;
+ if (!id_frozen) {
+ id_frozen = rb_make_internal_id();
+ }
+
+ next_shape = get_next_shape_internal(shape, (ID)id_frozen, obj, SHAPE_FROZEN);
+ }
+
+ RUBY_ASSERT(next_shape);
+ rb_shape_set_shape(obj, next_shape);
+}
+
+void
+rb_shape_transition_shape(VALUE obj, ID id, rb_shape_t *shape)
+{
+ rb_shape_t* next_shape = rb_shape_get_next(shape, obj, id);
+ if (shape == next_shape) {
+ return;
+ }
+
+ RUBY_ASSERT(!rb_objspace_garbage_object_p((VALUE)next_shape));
+ rb_shape_set_shape(obj, next_shape);
+}
+
+rb_shape_t*
+rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id)
+{
+ return get_next_shape_internal(shape, id, obj, SHAPE_IVAR);
+}
+
+bool
+rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t *value) {
+ while (shape->parent) {
+ if (shape->edge_name == id) {
+ enum shape_type shape_type;
+ shape_type = (enum shape_type)shape->type;
+
+ switch(shape_type) {
+ case SHAPE_IVAR:
+ RUBY_ASSERT(shape->iv_count > 0);
+ *value = shape->iv_count - 1;
+ return true;
+ case SHAPE_IVAR_UNDEF:
+ case SHAPE_ROOT:
+ return false;
+ case SHAPE_FROZEN:
+ rb_bug("Ivar should not exist on frozen transition\n");
+ }
+ }
+ shape = shape->parent;
+ }
+ return false;
+}
+
+static rb_shape_t *
+shape_alloc(void)
+{
+ rb_shape_t *shape = (rb_shape_t *)rb_imemo_new(imemo_shape, 0, 0, 0, 0);
+ FL_SET_RAW((VALUE)shape, RUBY_FL_SHAREABLE);
+ FL_SET_RAW((VALUE)shape, RUBY_FL_PROMOTED1);
+ return shape;
+}
+
+rb_shape_t *
+rb_shape_alloc(shape_id_t shape_id, ID edge_name, rb_shape_t * parent)
+{
+ rb_shape_t * shape = shape_alloc();
+ shape_set_shape_id(shape, shape_id);
+
+ shape->edge_name = edge_name;
+ shape->iv_count = 0;
+
+ RB_OBJ_WRITE(shape, &shape->parent, parent);
+
+ RUBY_ASSERT(!parent || IMEMO_TYPE_P(parent, imemo_shape));
+
+ return shape;
+}
+
+MJIT_FUNC_EXPORTED void
+rb_shape_set_shape(VALUE obj, rb_shape_t* shape)
+{
+ RUBY_ASSERT(IMEMO_TYPE_P(shape, imemo_shape));
+ RUBY_ASSERT(SHAPE_FROZEN == shape->type ? RB_OBJ_FROZEN(obj) : 1);
+
+ if(rb_shape_set_shape_id(obj, SHAPE_ID(shape))) {
+ if (shape != rb_shape_get_frozen_root_shape()) {
+ RB_OBJ_WRITTEN(obj, Qundef, (VALUE)shape);
+ }
+ }
+}
+
+void
+rb_shape_set_shape_by_id(shape_id_t shape_id, rb_shape_t *shape)
+{
+ rb_vm_t *vm = GET_VM();
+
+ RUBY_ASSERT(shape == NULL || IMEMO_TYPE_P(shape, imemo_shape));
+ vm->shape_list[shape_id] = shape;
+}
+
+VALUE rb_cShape;
+
+static void
+shape_mark(void *ptr)
+{
+ rb_gc_mark((VALUE)ptr);
+}
+
+/*
+ * Exposing Shape to Ruby via RubyVM.debug_shape
+ */
+static const rb_data_type_t shape_data_type = {
+ "Shape",
+ {shape_mark, NULL, NULL,},
+ 0, 0, RUBY_TYPED_FREE_IMMEDIATELY|RUBY_TYPED_WB_PROTECTED
+};
+
+static VALUE
+rb_shape_id(VALUE self) {
+ rb_shape_t * shape;
+ TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
+ return INT2NUM(SHAPE_ID(shape));
+}
+
+static VALUE
+rb_shape_type(VALUE self) {
+ rb_shape_t * shape;
+ TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
+ return INT2NUM(shape->type);
+}
+
+static VALUE
+rb_shape_parent_id(VALUE self)
+{
+ rb_shape_t * shape;
+ TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
+ if (shape->parent) {
+ return INT2NUM(SHAPE_ID(shape->parent));
+ }
+ else {
+ return Qnil;
+ }
+}
+
+static VALUE parse_key(ID key) {
+ if ((key & RUBY_ID_INTERNAL) == RUBY_ID_INTERNAL) {
+ return LONG2NUM(key);
+ } else {
+ return ID2SYM(key);
+ }
+}
+
+static VALUE
+rb_shape_t_to_rb_cShape(rb_shape_t *shape) {
+ union { const rb_shape_t *in; void *out; } deconst;
+ VALUE res;
+ deconst.in = shape;
+ res = TypedData_Wrap_Struct(rb_cShape, &shape_data_type, deconst.out);
+ RB_OBJ_WRITTEN(res, Qundef, shape);
+
+ return res;
+}
+
+static enum rb_id_table_iterator_result rb_edges_to_hash(ID key, VALUE value, void *ref)
+{
+ rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_shape_t_to_rb_cShape((rb_shape_t*)value));
+ return ID_TABLE_CONTINUE;
+}
+
+static VALUE
+rb_shape_edges(VALUE self)
+{
+ rb_shape_t* shape;
+ TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
+
+ VALUE hash = rb_hash_new();
+
+ if (shape->edges) {
+ rb_id_table_foreach(shape->edges, rb_edges_to_hash, &hash);
+ }
+
+ return hash;
+}
+
+static VALUE
+rb_shape_edge_name(VALUE self)
+{
+ rb_shape_t* shape;
+ TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
+
+ if (shape->edge_name) {
+ return ID2SYM(shape->edge_name);
+ }
+ else {
+ return Qnil;
+ }
+}
+
+static VALUE
+rb_shape_iv_count(VALUE self)
+{
+ rb_shape_t* shape;
+ TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
+
+ return INT2NUM(shape->iv_count);
+}
+
+static VALUE
+rb_shape_export_depth(VALUE self)
+{
+ rb_shape_t* shape;
+ TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
+
+ unsigned int depth = 0;
+ while (shape->parent) {
+ depth++;
+ shape = shape->parent;
+ }
+ return INT2NUM(depth);
+}
+
+static VALUE
+rb_shape_parent(VALUE self)
+{
+ rb_shape_t * shape;
+ TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
+ if (shape->parent) {
+ return rb_shape_t_to_rb_cShape(shape->parent);
+ }
+ else {
+ return Qnil;
+ }
+}
+
+VALUE rb_shape_debug_shape(VALUE self, VALUE obj) {
+ return rb_shape_t_to_rb_cShape(rb_shape_get_shape(obj));
+}
+
+VALUE rb_shape_debug_root_shape(VALUE self) {
+ return rb_shape_t_to_rb_cShape(rb_shape_get_root_shape());
+}
+
+VALUE rb_shape_debug_frozen_root_shape(VALUE self) {
+ return rb_shape_t_to_rb_cShape(rb_shape_get_frozen_root_shape());
+}
+
+VALUE rb_obj_shape(rb_shape_t* shape);
+
+static enum rb_id_table_iterator_result collect_keys_and_values(ID key, VALUE value, void *ref)
+{
+ rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_obj_shape((rb_shape_t*)value));
+ return ID_TABLE_CONTINUE;
+}
+
+static VALUE edges(struct rb_id_table* edges)
+{
+ VALUE hash = rb_hash_new();
+ if (edges)
+ rb_id_table_foreach(edges, collect_keys_and_values, &hash);
+ return hash;
+}
+
+VALUE rb_obj_shape(rb_shape_t* shape) {
+ VALUE rb_shape = rb_hash_new();
+
+ rb_hash_aset(rb_shape, ID2SYM(rb_intern("id")), INT2NUM(SHAPE_ID(shape)));
+ rb_hash_aset(rb_shape, ID2SYM(rb_intern("edges")), edges(shape->edges));
+
+ if (shape == rb_shape_get_root_shape()) {
+ rb_hash_aset(rb_shape, ID2SYM(rb_intern("parent_id")), INT2NUM(ROOT_SHAPE_ID));
+ }
+ else {
+ rb_hash_aset(rb_shape, ID2SYM(rb_intern("parent_id")), INT2NUM(SHAPE_ID(shape->parent)));
+ }
+
+ rb_hash_aset(rb_shape, ID2SYM(rb_intern("edge_name")), rb_id2str(shape->edge_name));
+ return rb_shape;
+}
+
+static VALUE shape_transition_tree(VALUE self) {
+ return rb_obj_shape(rb_shape_get_root_shape());
+}
+
+static VALUE shape_count(VALUE self) {
+ int shape_count = 0;
+ rb_vm_t *vm = GET_VM();
+ for(shape_id_t i = 0; i < vm->max_shape_count; i++) {
+ if(rb_shape_get_shape_by_id_without_assertion(i)) {
+ shape_count++;
+ }
+ }
+ return INT2NUM(shape_count);
+}
+
+static VALUE
+shape_max_shape_count(VALUE self)
+{
+ return INT2NUM(GET_VM()->max_shape_count);
+}
+
+VALUE
+rb_shape_flags_mask(void)
+{
+ return SHAPE_FLAG_MASK;
+}
+
+void
+Init_shape(void)
+{
+ rb_cShape = rb_define_class_under(rb_cRubyVM, "Shape", rb_cObject);
+ rb_undef_alloc_func(rb_cShape);
+
+ rb_define_method(rb_cShape, "parent_id", rb_shape_parent_id, 0);
+ rb_define_method(rb_cShape, "parent", rb_shape_parent, 0);
+ rb_define_method(rb_cShape, "edges", rb_shape_edges, 0);
+ rb_define_method(rb_cShape, "edge_name", rb_shape_edge_name, 0);
+ rb_define_method(rb_cShape, "iv_count", rb_shape_iv_count, 0);
+ rb_define_method(rb_cShape, "depth", rb_shape_export_depth, 0);
+ rb_define_method(rb_cShape, "id", rb_shape_id, 0);
+ rb_define_method(rb_cShape, "type", rb_shape_type, 0);
+ rb_define_const(rb_cShape, "SHAPE_ROOT", INT2NUM(SHAPE_ROOT));
+ rb_define_const(rb_cShape, "SHAPE_IVAR", INT2NUM(SHAPE_IVAR));
+ rb_define_const(rb_cShape, "SHAPE_IVAR_UNDEF", INT2NUM(SHAPE_IVAR_UNDEF));
+ rb_define_const(rb_cShape, "SHAPE_FROZEN", INT2NUM(SHAPE_FROZEN));
+ rb_define_const(rb_cShape, "SHAPE_BITS", INT2NUM(SHAPE_BITS));
+
+ rb_define_module_function(rb_cRubyVM, "debug_shape_transition_tree", shape_transition_tree, 0);
+ rb_define_module_function(rb_cRubyVM, "debug_shape_count", shape_count, 0);
+ rb_define_singleton_method(rb_cRubyVM, "debug_shape", rb_shape_debug_shape, 1);
+ rb_define_singleton_method(rb_cRubyVM, "debug_max_shape_count", shape_max_shape_count, 0);
+ rb_define_singleton_method(rb_cRubyVM, "debug_root_shape", rb_shape_debug_root_shape, 0);
+ rb_define_singleton_method(rb_cRubyVM, "debug_frozen_root_shape", rb_shape_debug_frozen_root_shape, 0);
+}
diff --git a/shape.h b/shape.h
new file mode 100644
index 0000000000..b381b9e6ba
--- /dev/null
+++ b/shape.h
@@ -0,0 +1,153 @@
+#ifndef RUBY_SHAPE_H
+#define RUBY_SHAPE_H
+#if (SIZEOF_UINT64_T == SIZEOF_VALUE)
+#define SIZEOF_SHAPE_T 4
+#define SHAPE_IN_BASIC_FLAGS 1
+typedef uint32_t attr_index_t;
+#else
+#define SIZEOF_SHAPE_T 2
+#define SHAPE_IN_BASIC_FLAGS 0
+typedef uint16_t attr_index_t;
+#endif
+
+#define MAX_IVARS (attr_index_t)(-1)
+
+#if RUBY_DEBUG || (defined(VM_CHECK_MODE) && VM_CHECK_MODE > 0)
+# if SIZEOF_SHAPE_T == 4
+typedef uint32_t shape_id_t;
+# define SHAPE_BITS 16
+# else
+typedef uint16_t shape_id_t;
+# define SHAPE_BITS 16
+# endif
+#else
+# if SIZEOF_SHAPE_T == 4
+typedef uint32_t shape_id_t;
+# define SHAPE_BITS 32
+# else
+typedef uint16_t shape_id_t;
+# define SHAPE_BITS 16
+# endif
+#endif
+
+# define SHAPE_MASK (((uintptr_t)1 << SHAPE_BITS) - 1)
+# define SHAPE_FLAG_MASK (((VALUE)-1) >> SHAPE_BITS)
+
+# define SHAPE_FLAG_SHIFT ((SIZEOF_VALUE * 8) - SHAPE_BITS)
+
+# define SHAPE_BITMAP_SIZE 16384
+
+# define MAX_SHAPE_ID (SHAPE_MASK - 1)
+# define INVALID_SHAPE_ID SHAPE_MASK
+# define ROOT_SHAPE_ID 0x0
+# define FROZEN_ROOT_SHAPE_ID 0x1
+
+#define SHAPE_ID(shape) ((((rb_shape_t *)shape)->flags >> SHAPE_FLAG_SHIFT) & SHAPE_MASK)
+
+struct rb_shape {
+ VALUE flags; // Shape ID and frozen status encoded within flags
+ struct rb_shape * parent; // Pointer to the parent
+ struct rb_id_table * edges; // id_table from ID (ivar) to next shape
+ ID edge_name; // ID (ivar) for transition from parent to rb_shape
+ attr_index_t iv_count;
+ uint8_t type;
+};
+
+typedef struct rb_shape rb_shape_t;
+
+enum shape_type {
+ SHAPE_ROOT,
+ SHAPE_IVAR,
+ SHAPE_FROZEN,
+ SHAPE_IVAR_UNDEF,
+};
+
+static inline shape_id_t
+IMEMO_CACHED_SHAPE_ID(VALUE cc)
+{
+ RBIMPL_ASSERT_TYPE((VALUE)cc, RUBY_T_IMEMO);
+ return (shape_id_t)(SHAPE_MASK & (RBASIC(cc)->flags >> SHAPE_FLAG_SHIFT));
+}
+
+static inline void
+IMEMO_SET_CACHED_SHAPE_ID(VALUE cc, shape_id_t shape_id)
+{
+ RBIMPL_ASSERT_TYPE((VALUE)cc, RUBY_T_IMEMO);
+ RBASIC(cc)->flags &= SHAPE_FLAG_MASK;
+ RBASIC(cc)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT);
+}
+
+#if SHAPE_IN_BASIC_FLAGS
+static inline shape_id_t
+RBASIC_SHAPE_ID(VALUE obj)
+{
+ RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
+ return (shape_id_t)(SHAPE_MASK & ((RBASIC(obj)->flags) >> SHAPE_FLAG_SHIFT));
+}
+
+static inline void
+RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
+{
+ // Ractors are occupying the upper 32 bits of flags, but only in debug mode
+ // Object shapes are occupying top bits
+ RBASIC(obj)->flags &= SHAPE_FLAG_MASK;
+ RBASIC(obj)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT);
+}
+
+static inline shape_id_t
+ROBJECT_SHAPE_ID(VALUE obj)
+{
+ RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
+ return RBASIC_SHAPE_ID(obj);
+}
+
+static inline void
+ROBJECT_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
+{
+ RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
+ RBASIC_SET_SHAPE_ID(obj, shape_id);
+}
+
+#else
+
+static inline shape_id_t
+ROBJECT_SHAPE_ID(VALUE obj)
+{
+ RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
+ return (shape_id_t)(SHAPE_MASK & (RBASIC(obj)->flags >> SHAPE_FLAG_SHIFT));
+}
+
+static inline void
+ROBJECT_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
+{
+ RBASIC(obj)->flags &= SHAPE_FLAG_MASK;
+ RBASIC(obj)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT);
+}
+#endif
+
+bool rb_shape_root_shape_p(rb_shape_t* shape);
+
+rb_shape_t* rb_shape_get_shape_by_id_without_assertion(shape_id_t shape_id);
+
+MJIT_SYMBOL_EXPORT_BEGIN
+rb_shape_t* rb_shape_get_shape_by_id(shape_id_t shape_id);
+void rb_shape_set_shape(VALUE obj, rb_shape_t* shape);
+shape_id_t rb_shape_get_shape_id(VALUE obj);
+rb_shape_t* rb_shape_get_shape(VALUE obj);
+int rb_shape_frozen_shape_p(rb_shape_t* shape);
+void rb_shape_transition_shape_frozen(VALUE obj);
+void rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape);
+void rb_shape_transition_shape(VALUE obj, ID id, rb_shape_t *shape);
+rb_shape_t* rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id);
+bool rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t * value);
+MJIT_SYMBOL_EXPORT_END
+
+rb_shape_t * rb_shape_alloc(shape_id_t shape_id, ID edge_name, rb_shape_t * parent);
+
+bool rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id);
+void rb_shape_set_shape_by_id(shape_id_t, rb_shape_t *);
+
+VALUE rb_obj_debug_shape(VALUE self, VALUE obj);
+VALUE rb_shape_flags_mask(void);
+
+#endif
diff --git a/spec/ruby/library/objectspace/reachable_objects_from_spec.rb b/spec/ruby/library/objectspace/reachable_objects_from_spec.rb
index 7e70bc8569..61ebc9f93d 100644
--- a/spec/ruby/library/objectspace/reachable_objects_from_spec.rb
+++ b/spec/ruby/library/objectspace/reachable_objects_from_spec.rb
@@ -17,7 +17,7 @@ describe "ObjectSpace.reachable_objects_from" do
it "enumerates objects directly reachable from a given object" do
ObjectSpace.reachable_objects_from(['a', 'b', 'c']).should include(Array, 'a', 'b', 'c')
- ObjectSpace.reachable_objects_from(Object.new).should == [Object]
+ ObjectSpace.reachable_objects_from(Object.new).should include(Object)
end
it "finds an object stored in an Array" do
diff --git a/spec/ruby/optional/capi/shared/rbasic.rb b/spec/ruby/optional/capi/shared/rbasic.rb
index 99c2044bd7..105408ec3d 100644
--- a/spec/ruby/optional/capi/shared/rbasic.rb
+++ b/spec/ruby/optional/capi/shared/rbasic.rb
@@ -8,16 +8,9 @@ describe :rbasic, shared: true do
it "reports the appropriate FREEZE flag for the object when reading" do
obj, _ = @data.call
- initial = @specs.get_flags(obj)
+ (@specs.get_flags(obj) & @freeze).should == 0
obj.freeze
- @specs.get_flags(obj).should == @freeze | initial
- end
-
- it "supports setting the FREEZE flag" do
- obj, _ = @data.call
- initial = @specs.get_flags(obj)
- @specs.set_flags(obj, @freeze | initial).should == @freeze | initial
- obj.should.frozen?
+ (@specs.get_flags(obj) & @freeze).should == @freeze
end
it "supports retrieving the (meta)class" do
diff --git a/test/-ext-/marshal/test_internal_ivar.rb b/test/-ext-/marshal/test_internal_ivar.rb
index a32138f6e8..9359c7f113 100644
--- a/test/-ext-/marshal/test_internal_ivar.rb
+++ b/test/-ext-/marshal/test_internal_ivar.rb
@@ -7,6 +7,7 @@ module Bug end
module Bug::Marshal
class TestInternalIVar < Test::Unit::TestCase
def test_marshal
+ pend "We don't support IVs with ID of 0"
v = InternalIVar.new("hello", "world", "bye")
assert_equal("hello", v.normal)
assert_equal("world", v.internal)
diff --git a/test/objspace/test_objspace.rb b/test/objspace/test_objspace.rb
index 3b90319858..94d68dc093 100644
--- a/test/objspace/test_objspace.rb
+++ b/test/objspace/test_objspace.rb
@@ -116,12 +116,16 @@ class TestObjSpace < Test::Unit::TestCase
opts = %w[--disable-gem --disable=frozen-string-literal -robjspace]
assert_separately opts, "#{<<-"begin;"}\n#{<<-'end;'}"
begin;
- assert_equal(nil, ObjectSpace.reachable_objects_from(nil))
- assert_equal([Array, 'a', 'b', 'c'], ObjectSpace.reachable_objects_from(['a', 'b', 'c']))
+ def assert_reachable_object_as_expected(expectation, reachable_objects_from_array)
+ reachable_objects = ObjectSpace.reachable_objects_from(reachable_objects_from_array)
+ assert_equal(expectation, reachable_objects)
+ end
- assert_equal([Array, 'a', 'a', 'a'], ObjectSpace.reachable_objects_from(['a', 'a', 'a']))
- assert_equal([Array, 'a', 'a'], ObjectSpace.reachable_objects_from(['a', v = 'a', v]))
- assert_equal([Array, 'a'], ObjectSpace.reachable_objects_from([v = 'a', v, v]))
+ assert_equal(nil, ObjectSpace.reachable_objects_from(nil))
+ assert_reachable_object_as_expected([Array, 'a', 'b', 'c'], ['a', 'b', 'c'])
+ assert_reachable_object_as_expected([Array, 'a', 'a', 'a'], ['a', 'a', 'a'])
+ assert_reachable_object_as_expected([Array, 'a', 'a'], ['a', v = 'a', v])
+ assert_reachable_object_as_expected([Array, 'a'], [v = 'a', v, v])
long_ary = Array.new(1_000){''}
max = 0
diff --git a/test/ruby/test_mjit.rb b/test/ruby/test_mjit.rb
index e49195f763..4c6cc6f39f 100644
--- a/test/ruby/test_mjit.rb
+++ b/test/ruby/test_mjit.rb
@@ -831,7 +831,7 @@ class TestMJIT < Test::Unit::TestCase
end
def test_inlined_exivar
- assert_eval_with_jit("#{<<~"begin;"}\n#{<<~"end;"}", stdout: "aaa", success_count: 3, recompile_count: 1, min_calls: 2)
+ assert_eval_with_jit("#{<<~"begin;"}\n#{<<~"end;"}", stdout: "aaa", success_count: 4, recompile_count: 2, min_calls: 2)
begin;
class Foo < Hash
def initialize
@@ -850,7 +850,7 @@ class TestMJIT < Test::Unit::TestCase
end
def test_inlined_undefined_ivar
- assert_eval_with_jit("#{<<~"begin;"}\n#{<<~"end;"}", stdout: "bbb", success_count: 3, min_calls: 3)
+ assert_eval_with_jit("#{<<~"begin;"}\n#{<<~"end;"}", stdout: "bbb", success_count: 2, min_calls: 2)
begin;
class Foo
def initialize
diff --git a/test/ruby/test_shapes.rb b/test/ruby/test_shapes.rb
new file mode 100644
index 0000000000..cdca08dbb4
--- /dev/null
+++ b/test/ruby/test_shapes.rb
@@ -0,0 +1,171 @@
+# frozen_string_literal: false
+require 'test/unit'
+
+# These test the functionality of object shapes
+class TestShapes < Test::Unit::TestCase
+ class Example
+ def initialize
+ @a = 1
+ end
+ end
+
+ class RemoveAndAdd
+ def add_foo
+ @foo = 1
+ end
+
+ def remove
+ remove_instance_variable(:@foo)
+ end
+
+ def add_bar
+ @bar = 1
+ end
+ end
+
+ # RubyVM.debug_shape returns new instances of shape objects for
+ # each call. This helper method allows us to define equality for
+ # shapes
+ def assert_shape_equal(shape1, shape2)
+ assert_equal(shape1.id, shape2.id)
+ assert_equal(shape1.parent_id, shape2.parent_id)
+ assert_equal(shape1.depth, shape2.depth)
+ assert_equal(shape1.type, shape2.type)
+ end
+
+ def refute_shape_equal(shape1, shape2)
+ refute_equal(shape1.id, shape2.id)
+ end
+
+ def test_iv_index
+ example = RemoveAndAdd.new
+ shape = RubyVM.debug_shape(example)
+ assert_equal 0, shape.iv_count
+
+ example.add_foo # makes a transition
+ new_shape = RubyVM.debug_shape(example)
+ assert_equal([:@foo], example.instance_variables)
+ assert_equal(shape.id, new_shape.parent.id)
+ assert_equal(1, new_shape.iv_count)
+
+ example.remove # makes a transition
+ remove_shape = RubyVM.debug_shape(example)
+ assert_equal([], example.instance_variables)
+ assert_equal(new_shape.id, remove_shape.parent.id)
+ assert_equal(1, remove_shape.iv_count)
+
+ example.add_bar # makes a transition
+ bar_shape = RubyVM.debug_shape(example)
+ assert_equal([:@bar], example.instance_variables)
+ assert_equal(remove_shape.id, bar_shape.parent.id)
+ assert_equal(2, bar_shape.iv_count)
+ end
+
+ def test_new_obj_has_root_shape
+ assert_shape_equal(RubyVM.debug_root_shape, RubyVM.debug_shape(Object.new))
+ end
+
+ def test_frozen_new_obj_has_frozen_root_shape
+ assert_shape_equal(
+ RubyVM.debug_frozen_root_shape,
+ RubyVM.debug_shape(Object.new.freeze)
+ )
+ end
+
+ def test_str_has_root_shape
+ assert_shape_equal(RubyVM.debug_root_shape, RubyVM.debug_shape(""))
+ end
+
+ def test_array_has_root_shape
+ assert_shape_equal(RubyVM.debug_root_shape, RubyVM.debug_shape([]))
+ end
+
+ def test_hash_has_root_shape
+ assert_shape_equal(RubyVM.debug_root_shape, RubyVM.debug_shape({}))
+ end
+
+ def test_true_has_frozen_root_shape
+ assert_shape_equal(RubyVM.debug_frozen_root_shape, RubyVM.debug_shape(true))
+ end
+
+ def test_nil_has_frozen_root_shape
+ assert_shape_equal(RubyVM.debug_frozen_root_shape, RubyVM.debug_shape(nil))
+ end
+
+ def test_basic_shape_transition
+ obj = Example.new
+ refute_equal(RubyVM.debug_root_shape, RubyVM.debug_shape(obj))
+ assert_shape_equal(RubyVM.debug_root_shape.edges[:@a], RubyVM.debug_shape(obj))
+ assert_equal(obj.instance_variable_get(:@a), 1)
+ end
+
+ def test_different_objects_make_same_transition
+ obj = Example.new
+ obj2 = ""
+ obj2.instance_variable_set(:@a, 1)
+ assert_shape_equal(RubyVM.debug_shape(obj), RubyVM.debug_shape(obj2))
+ end
+
+ def test_duplicating_objects
+ obj = Example.new
+ obj2 = obj.dup
+ assert_shape_equal(RubyVM.debug_shape(obj), RubyVM.debug_shape(obj2))
+ end
+
+ def test_freezing_and_duplicating_object
+ obj = Object.new.freeze
+ obj2 = obj.dup
+ refute_predicate(obj2, :frozen?)
+ refute_equal(RubyVM.debug_shape(obj).id, RubyVM.debug_shape(obj2).id)
+ end
+
+ def test_freezing_and_duplicating_object_with_ivars
+ obj = Example.new.freeze
+ obj2 = obj.dup
+ refute_predicate(obj2, :frozen?)
+ refute_shape_equal(RubyVM.debug_shape(obj), RubyVM.debug_shape(obj2))
+ assert_equal(obj2.instance_variable_get(:@a), 1)
+ end
+
+ def test_freezing_and_duplicating_string_with_ivars
+ str = "str"
+ str.instance_variable_set(:@a, 1)
+ str.freeze
+ str2 = str.dup
+ refute_predicate(str2, :frozen?)
+ refute_equal(RubyVM.debug_shape(str).id, RubyVM.debug_shape(str2).id)
+ assert_equal(str2.instance_variable_get(:@a), 1)
+ end
+
+ def test_freezing_and_cloning_objects
+ obj = Object.new.freeze
+ obj2 = obj.clone(freeze: true)
+ assert_predicate(obj2, :frozen?)
+ assert_shape_equal(RubyVM.debug_shape(obj), RubyVM.debug_shape(obj2))
+ end
+
+ def test_freezing_and_cloning_object_with_ivars
+ obj = Example.new.freeze
+ obj2 = obj.clone(freeze: true)
+ assert_predicate(obj2, :frozen?)
+ assert_shape_equal(RubyVM.debug_shape(obj), RubyVM.debug_shape(obj2))
+ assert_equal(obj2.instance_variable_get(:@a), 1)
+ end
+
+ def test_freezing_and_cloning_string
+ str = "str".freeze
+ str2 = str.clone(freeze: true)
+ assert_predicate(str2, :frozen?)
+ assert_shape_equal(RubyVM.debug_shape(str), RubyVM.debug_shape(str2))
+ end
+
+ def test_freezing_and_cloning_string_with_ivars
+ str = "str"
+ str.instance_variable_set(:@a, 1)
+ str.freeze
+ str2 = str.clone(freeze: true)
+ assert_predicate(str2, :frozen?)
+ assert_shape_equal(RubyVM.debug_shape(str), RubyVM.debug_shape(str2))
+ assert_equal(str2.instance_variable_get(:@a), 1)
+ end
+end
diff --git a/tool/mjit/bindgen.rb b/tool/mjit/bindgen.rb
index d0f9bf527b..8c21d42449 100755
--- a/tool/mjit/bindgen.rb
+++ b/tool/mjit/bindgen.rb
@@ -341,12 +341,17 @@ generator = BindingGenerator.new(
VM_METHOD_TYPE_CFUNC
VM_METHOD_TYPE_ISEQ
],
+ ULONG: %w[
+ INVALID_SHAPE_ID
+ SHAPE_MASK
+ ],
},
types: %w[
CALL_DATA
IC
IVC
RB_BUILTIN
+ attr_index_t
compile_branch
compile_status
inlined_call_context
@@ -360,10 +365,10 @@ generator = BindingGenerator.new(
rb_callable_method_entry_struct
rb_callcache
rb_callinfo
- rb_cref_t
rb_control_frame_t
- rb_execution_context_t
+ rb_cref_t
rb_execution_context_struct
+ rb_execution_context_t
rb_iseq_constant_body
rb_iseq_location_t
rb_iseq_struct
@@ -378,6 +383,7 @@ generator = BindingGenerator.new(
],
dynamic_types: %w[
VALUE
+ shape_id_t
],
skip_fields: {
'rb_execution_context_struct.machine': %w[regs], # differs between macOS and Linux
diff --git a/variable.c b/variable.c
index 056a1000b8..a6f6d5ec1b 100644
--- a/variable.c
+++ b/variable.c
@@ -34,6 +34,7 @@
#include "ruby/st.h"
#include "ruby/util.h"
#include "transient_heap.h"
+#include "shape.h"
#include "variable.h"
#include "vm_core.h"
#include "ractor_core.h"
@@ -63,12 +64,9 @@ static VALUE rb_const_search(VALUE klass, ID id, int exclude, int recurse, int v
static st_table *generic_iv_tbl_;
struct ivar_update {
- union {
- st_table *iv_index_tbl;
- struct gen_ivtbl *ivtbl;
- } u;
- st_data_t index;
- int iv_extended;
+ struct gen_ivtbl *ivtbl;
+ uint32_t iv_index;
+ rb_shape_t* shape;
};
void
@@ -896,30 +894,6 @@ rb_alias_variable(ID name1, ID name2)
entry1->var = entry2->var;
}
-static bool
-iv_index_tbl_lookup(struct st_table *tbl, ID id, uint32_t *indexp)
-{
- st_data_t ent_data;
- int r;
-
- if (tbl == NULL) return false;
-
- RB_VM_LOCK_ENTER();
- {
- r = st_lookup(tbl, (st_data_t)id, &ent_data);
- }
- RB_VM_LOCK_LEAVE();
-
- if (r) {
- struct rb_iv_index_tbl_entry *ent = (void *)ent_data;
- *indexp = ent->index;
- return true;
- }
- else {
- return false;
- }
-}
-
static void
IVAR_ACCESSOR_SHOULD_BE_MAIN_RACTOR(ID id)
{
@@ -957,7 +931,20 @@ generic_ivtbl_no_ractor_check(VALUE obj)
}
static int
-gen_ivtbl_get(VALUE obj, ID id, struct gen_ivtbl **ivtbl)
+gen_ivtbl_get_unlocked(VALUE obj, ID id, struct gen_ivtbl **ivtbl)
+{
+ st_data_t data;
+
+ if (st_lookup(generic_ivtbl(obj, id, false), (st_data_t)obj, &data)) {
+ *ivtbl = (struct gen_ivtbl *)data;
+ return 1;
+ }
+
+ return 0;
+}
+
+MJIT_FUNC_EXPORTED int
+rb_gen_ivtbl_get(VALUE obj, ID id, struct gen_ivtbl **ivtbl)
{
st_data_t data;
int r = 0;
@@ -977,63 +964,7 @@ gen_ivtbl_get(VALUE obj, ID id, struct gen_ivtbl **ivtbl)
MJIT_FUNC_EXPORTED int
rb_ivar_generic_ivtbl_lookup(VALUE obj, struct gen_ivtbl **ivtbl)
{
- return gen_ivtbl_get(obj, 0, ivtbl);
-}
-
-MJIT_FUNC_EXPORTED VALUE
-rb_ivar_generic_lookup_with_index(VALUE obj, ID id, uint32_t index)
-{
- struct gen_ivtbl *ivtbl;
-
- if (gen_ivtbl_get(obj, id, &ivtbl)) {
- if (LIKELY(index < ivtbl->numiv)) {
- VALUE val = ivtbl->ivptr[index];
- return val;
- }
- }
-
- return Qundef;
-}
-
-static VALUE
-generic_ivar_delete(VALUE obj, ID id, VALUE undef)
-{
- struct gen_ivtbl *ivtbl;
-
- if (gen_ivtbl_get(obj, id, &ivtbl)) {
- st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj));
- uint32_t index;
-
- if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &index)) {
- if (index < ivtbl->numiv) {
- VALUE ret = ivtbl->ivptr[index];
-
- ivtbl->ivptr[index] = Qundef;
- return ret == Qundef ? undef : ret;
- }
- }
- }
- return undef;
-}
-
-static VALUE
-generic_ivar_get(VALUE obj, ID id, VALUE undef)
-{
- struct gen_ivtbl *ivtbl;
-
- if (gen_ivtbl_get(obj, id, &ivtbl)) {
- st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj));
- uint32_t index;
-
- if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &index)) {
- if (index < ivtbl->numiv) {
- VALUE ret = ivtbl->ivptr[index];
-
- return ret == Qundef ? undef : ret;
- }
- }
- }
- return undef;
+ return rb_gen_ivtbl_get(obj, 0, ivtbl);
}
static size_t
@@ -1045,6 +976,8 @@ gen_ivtbl_bytes(size_t n)
static struct gen_ivtbl *
gen_ivtbl_resize(struct gen_ivtbl *old, uint32_t n)
{
+ RUBY_ASSERT(n > 0);
+
uint32_t len = old ? old->numiv : 0;
struct gen_ivtbl *ivtbl = xrealloc(old, gen_ivtbl_bytes(n));
@@ -1069,18 +1002,6 @@ gen_ivtbl_dup(const struct gen_ivtbl *orig)
}
#endif
-static uint32_t
-iv_index_tbl_newsize(struct ivar_update *ivup)
-{
- if (!ivup->iv_extended) {
- return (uint32_t)ivup->u.iv_index_tbl->num_entries;
- }
- else {
- uint32_t index = (uint32_t)ivup->index; /* should not overflow */
- return (index+1) + (index+1)/4; /* (index+1)*1.25 */
- }
-}
-
static int
generic_ivar_update(st_data_t *k, st_data_t *v, st_data_t u, int existing)
{
@@ -1091,53 +1012,22 @@ generic_ivar_update(st_data_t *k, st_data_t *v, st_data_t u, int existing)
if (existing) {
ivtbl = (struct gen_ivtbl *)*v;
- if (ivup->index < ivtbl->numiv) {
- ivup->u.ivtbl = ivtbl;
+ if (ivup->iv_index < ivtbl->numiv) {
+ ivup->ivtbl = ivtbl;
return ST_STOP;
}
}
FL_SET((VALUE)*k, FL_EXIVAR);
- uint32_t newsize = iv_index_tbl_newsize(ivup);
- ivtbl = gen_ivtbl_resize(ivtbl, newsize);
+ ivtbl = gen_ivtbl_resize(ivtbl, ivup->shape->iv_count);
+ // Reinsert in to the hash table because ivtbl might be a newly resized chunk of memory
*v = (st_data_t)ivtbl;
- ivup->u.ivtbl = ivtbl;
+ ivup->ivtbl = ivtbl;
+#if !SHAPE_IN_BASIC_FLAGS
+ ivtbl->shape_id = SHAPE_ID(ivup->shape);
+#endif
return ST_CONTINUE;
}
-static VALUE
-generic_ivar_defined(VALUE obj, ID id)
-{
- struct gen_ivtbl *ivtbl;
- st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj));
- uint32_t index;
-
- if (!iv_index_tbl_lookup(iv_index_tbl, id, &index)) return Qfalse;
- if (!gen_ivtbl_get(obj, id, &ivtbl)) return Qfalse;
-
- return RBOOL((index < ivtbl->numiv) && (ivtbl->ivptr[index] != Qundef));
-}
-
-static int
-generic_ivar_remove(VALUE obj, ID id, VALUE *valp)
-{
- struct gen_ivtbl *ivtbl;
- uint32_t index;
- st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj));
-
- if (!iv_index_tbl) return 0;
- if (!iv_index_tbl_lookup(iv_index_tbl, id, &index)) return 0;
- if (!gen_ivtbl_get(obj, id, &ivtbl)) return 0;
-
- if (index < ivtbl->numiv) {
- if (ivtbl->ivptr[index] != Qundef) {
- *valp = ivtbl->ivptr[index];
- ivtbl->ivptr[index] = Qundef;
- return 1;
- }
- }
- return 0;
-}
-
static void
gen_ivtbl_mark(const struct gen_ivtbl *ivtbl)
{
@@ -1153,8 +1043,12 @@ rb_mark_generic_ivar(VALUE obj)
{
struct gen_ivtbl *ivtbl;
- if (gen_ivtbl_get(obj, 0, &ivtbl)) {
- gen_ivtbl_mark(ivtbl);
+ if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
+
+#if !SHAPE_IN_BASIC_FLAGS
+ rb_gc_mark((VALUE)rb_shape_get_shape_by_id(ivtbl->shape_id));
+#endif
+ gen_ivtbl_mark(ivtbl);
}
}
@@ -1182,11 +1076,35 @@ rb_generic_ivar_memsize(VALUE obj)
{
struct gen_ivtbl *ivtbl;
- if (gen_ivtbl_get(obj, 0, &ivtbl))
- return gen_ivtbl_bytes(ivtbl->numiv);
+ if (rb_gen_ivtbl_get(obj, 0, &ivtbl))
+ return gen_ivtbl_bytes(ivtbl->numiv);
return 0;
}
+#if !SHAPE_IN_BASIC_FLAGS
+MJIT_FUNC_EXPORTED shape_id_t
+rb_generic_shape_id(VALUE obj)
+{
+ struct gen_ivtbl *ivtbl = 0;
+ shape_id_t shape_id = 0;
+
+ RB_VM_LOCK_ENTER();
+ {
+ st_table* global_iv_table = generic_ivtbl(obj, 0, false);
+
+ if (global_iv_table && st_lookup(global_iv_table, obj, (st_data_t *)&ivtbl)) {
+ shape_id = ivtbl->shape_id;
+ }
+ else if (OBJ_FROZEN(obj)) {
+ shape_id = FROZEN_ROOT_SHAPE_ID;
+ }
+ }
+ RB_VM_LOCK_LEAVE();
+
+ return shape_id;
+}
+#endif
+
static size_t
gen_ivtbl_count(const struct gen_ivtbl *ivtbl)
{
@@ -1254,23 +1172,16 @@ VALUE
rb_ivar_lookup(VALUE obj, ID id, VALUE undef)
{
if (SPECIAL_CONST_P(obj)) return undef;
+
+ shape_id_t shape_id;
+ VALUE * ivar_list;
+ rb_shape_t * shape;
+
+#if SHAPE_IN_BASIC_FLAGS
+ shape_id = RBASIC_SHAPE_ID(obj);
+#endif
+
switch (BUILTIN_TYPE(obj)) {
- case T_OBJECT:
- {
- uint32_t index;
- uint32_t len = ROBJECT_NUMIV(obj);
- VALUE *ptr = ROBJECT_IVPTR(obj);
- VALUE val;
-
- if (iv_index_tbl_lookup(ROBJECT_IV_INDEX_TBL(obj), id, &index) &&
- index < len &&
- (val = ptr[index]) != Qundef) {
- return val;
- }
- else {
- break;
- }
- }
case T_CLASS:
case T_MODULE:
{
@@ -1287,14 +1198,37 @@ rb_ivar_lookup(VALUE obj, ID id, VALUE undef)
return val;
}
else {
- break;
+ return undef;
}
}
+ case T_OBJECT:
+ {
+#if !SHAPE_IN_BASIC_FLAGS
+ shape_id = ROBJECT_SHAPE_ID(obj);
+#endif
+ ivar_list = ROBJECT_IVPTR(obj);
+ break;
+ }
default:
- if (FL_TEST(obj, FL_EXIVAR))
- return generic_ivar_get(obj, id, undef);
+ if (FL_TEST_RAW(obj, FL_EXIVAR)) {
+ struct gen_ivtbl *ivtbl;
+ rb_gen_ivtbl_get(obj, id, &ivtbl);
+#if !SHAPE_IN_BASIC_FLAGS
+ shape_id = ivtbl->shape_id;
+#endif
+ ivar_list = ivtbl->ivptr;
+ } else {
+ return undef;
+ }
break;
}
+
+ attr_index_t index = 0;
+ shape = rb_shape_get_shape_by_id(shape_id);
+ if (rb_shape_get_iv_index(shape, id, &index)) {
+ return ivar_list[index];
+ }
+
return undef;
}
@@ -1315,26 +1249,12 @@ rb_attr_get(VALUE obj, ID id)
static VALUE
rb_ivar_delete(VALUE obj, ID id, VALUE undef)
{
- VALUE *ptr;
- struct st_table *iv_index_tbl;
- uint32_t len, index;
-
rb_check_frozen(obj);
- switch (BUILTIN_TYPE(obj)) {
- case T_OBJECT:
- len = ROBJECT_NUMIV(obj);
- ptr = ROBJECT_IVPTR(obj);
- iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
- if (iv_index_tbl_lookup(iv_index_tbl, id, &index) &&
- index < len) {
- VALUE val = ptr[index];
- ptr[index] = Qundef;
- if (val != Qundef) {
- return val;
- }
- }
- break;
+ VALUE val = Qnil;
+ attr_index_t index;
+
+ switch (BUILTIN_TYPE(obj)) {
case T_CLASS:
case T_MODULE:
IVAR_ACCESSOR_SHOULD_BE_MAIN_RACTOR(id);
@@ -1345,11 +1265,33 @@ rb_ivar_delete(VALUE obj, ID id, VALUE undef)
}
}
break;
- default:
- if (FL_TEST(obj, FL_EXIVAR))
- return generic_ivar_delete(obj, id, undef);
+ case T_OBJECT: {
+ rb_shape_t * shape = rb_shape_get_shape(obj);
+ if (rb_shape_get_iv_index(shape, id, &index)) {
+ rb_shape_transition_shape_remove_ivar(obj, id, shape);
+ val = ROBJECT_IVPTR(obj)[index];
+ ROBJECT_IVPTR(obj)[index] = Qundef;
+ return val;
+ }
+
break;
+ }
+ default: {
+ rb_shape_t * shape = rb_shape_get_shape(obj);
+
+ if (rb_shape_get_iv_index(shape, id, &index)) {
+ rb_shape_transition_shape_remove_ivar(obj, id, shape);
+ struct gen_ivtbl *ivtbl;
+ rb_gen_ivtbl_get(obj, id, &ivtbl);
+ val = ivtbl->ivptr[index];
+ ivtbl->ivptr[index] = Qundef;
+ return val;
+ }
+
+ break;
+ }
}
+
return undef;
}
@@ -1359,67 +1301,34 @@ rb_attr_delete(VALUE obj, ID id)
return rb_ivar_delete(obj, id, Qnil);
}
-static st_table *
-iv_index_tbl_make(VALUE obj, VALUE klass)
-{
- st_table *iv_index_tbl;
-
- if (UNLIKELY(!klass)) {
- rb_raise(rb_eTypeError, "hidden object cannot have instance variables");
- }
-
- if ((iv_index_tbl = RCLASS_IV_INDEX_TBL(klass)) == NULL) {
- RB_VM_LOCK_ENTER();
- if ((iv_index_tbl = RCLASS_IV_INDEX_TBL(klass)) == NULL) {
- iv_index_tbl = RCLASS_IV_INDEX_TBL(klass) = st_init_numtable();
- }
- RB_VM_LOCK_LEAVE();
- }
-
- return iv_index_tbl;
-}
-
-static void
-iv_index_tbl_extend(struct ivar_update *ivup, ID id, VALUE klass)
-{
- ASSERT_vm_locking();
- st_data_t ent_data;
- struct rb_iv_index_tbl_entry *ent;
-
- if (st_lookup(ivup->u.iv_index_tbl, (st_data_t)id, &ent_data)) {
- ent = (void *)ent_data;
- ivup->index = ent->index;
- return;
- }
- if (ivup->u.iv_index_tbl->num_entries >= INT_MAX) {
- rb_raise(rb_eArgError, "too many instance variables");
- }
- ent = ALLOC(struct rb_iv_index_tbl_entry);
- ent->index = ivup->index = (uint32_t)ivup->u.iv_index_tbl->num_entries;
- ent->class_value = klass;
- ent->class_serial = RCLASS_SERIAL(klass);
- st_add_direct(ivup->u.iv_index_tbl, (st_data_t)id, (st_data_t)ent);
- ivup->iv_extended = 1;
-}
-
static void
generic_ivar_set(VALUE obj, ID id, VALUE val)
{
- VALUE klass = rb_obj_class(obj);
struct ivar_update ivup;
- ivup.iv_extended = 0;
- ivup.u.iv_index_tbl = iv_index_tbl_make(obj, klass);
+ // The returned shape will have `id` in its iv_table
+ rb_shape_t * shape = rb_shape_get_next(rb_shape_get_shape(obj), obj, id);
+ ivup.shape = shape;
RB_VM_LOCK_ENTER();
{
- iv_index_tbl_extend(&ivup, id, klass);
- st_update(generic_ivtbl(obj, id, false), (st_data_t)obj, generic_ivar_update,
- (st_data_t)&ivup);
+ attr_index_t ent_data;
+ if (rb_shape_get_iv_index(shape, id, &ent_data)) {
+ ivup.iv_index = (uint32_t) ent_data;
+ }
+ else {
+ rb_bug("unreachable. Shape was not found for id: %s", rb_id2name(id));
+ }
+
+ if (!st_update(generic_ivtbl(obj, id, false), (st_data_t)obj, generic_ivar_update,
+ (st_data_t)&ivup)) {
+ RB_OBJ_WRITTEN(obj, Qundef, shape);
+ }
}
RB_VM_LOCK_LEAVE();
- ivup.u.ivtbl->ivptr[ivup.index] = val;
+ ivup.ivtbl->ivptr[ivup.iv_index] = val;
+ rb_shape_set_shape(obj, shape);
RB_OBJ_WRITTEN(obj, Qundef, val);
}
@@ -1486,8 +1395,8 @@ rb_obj_transient_heap_evacuate(VALUE obj, int promote)
}
#endif
-static void
-init_iv_list(VALUE obj, uint32_t len, uint32_t newsize, st_table *index_tbl)
+void
+rb_ensure_iv_list_size(VALUE obj, uint32_t len, uint32_t newsize)
{
VALUE *ptr = ROBJECT_IVPTR(obj);
VALUE *newptr;
@@ -1510,35 +1419,34 @@ init_iv_list(VALUE obj, uint32_t len, uint32_t newsize, st_table *index_tbl)
#else
ROBJECT(obj)->as.heap.numiv = newsize;
#endif
- ROBJECT(obj)->as.heap.iv_index_tbl = index_tbl;
-}
-
-void
-rb_init_iv_list(VALUE obj)
-{
- st_table *index_tbl = ROBJECT_IV_INDEX_TBL(obj);
- uint32_t newsize = (uint32_t)index_tbl->num_entries;
- uint32_t len = ROBJECT_NUMIV(obj);
- init_iv_list(obj, len, newsize, index_tbl);
}
-// Retrieve or create the id-to-index mapping for a given object and an
-// instance variable name.
-static struct ivar_update
-obj_ensure_iv_index_mapping(VALUE obj, ID id)
+struct gen_ivtbl *
+rb_ensure_generic_iv_list_size(VALUE obj, uint32_t newsize)
{
- VALUE klass = rb_obj_class(obj);
- struct ivar_update ivup;
- ivup.iv_extended = 0;
- ivup.u.iv_index_tbl = iv_index_tbl_make(obj, klass);
+ struct gen_ivtbl * ivtbl = 0;
RB_VM_LOCK_ENTER();
{
- iv_index_tbl_extend(&ivup, id, klass);
+ if (UNLIKELY(!gen_ivtbl_get_unlocked(obj, 0, &ivtbl) || newsize > ivtbl->numiv)) {
+ ivtbl = gen_ivtbl_resize(ivtbl, newsize);
+ st_insert(generic_ivtbl_no_ractor_check(obj), (st_data_t)obj, (st_data_t)ivtbl);
+ FL_SET_RAW(obj, FL_EXIVAR);
+ }
}
RB_VM_LOCK_LEAVE();
- return ivup;
+ RUBY_ASSERT(ivtbl);
+
+ return ivtbl;
+}
+
+void
+rb_init_iv_list(VALUE obj)
+{
+ uint32_t newsize = rb_shape_get_shape(obj)->iv_count * 2.0;
+ uint32_t len = ROBJECT_NUMIV(obj);
+ rb_ensure_iv_list_size(obj, len, newsize < len ? len : newsize);
}
// Return the instance variable index for a given name and T_OBJECT object. The
@@ -1552,26 +1460,114 @@ uint32_t
rb_obj_ensure_iv_index_mapping(VALUE obj, ID id)
{
RUBY_ASSERT(RB_TYPE_P(obj, T_OBJECT));
- // This uint32_t cast shouldn't lose information as it's checked in
- // iv_index_tbl_extend(). The index is stored as an uint32_t in
- // struct rb_iv_index_tbl_entry.
- return (uint32_t)obj_ensure_iv_index_mapping(obj, id).index;
+ attr_index_t index;
+
+ // Ensure there is a transition for IVAR +id+
+ rb_shape_transition_shape(obj, id, rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj)));
+
+ // Get the current shape
+ rb_shape_t * shape = rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj));
+
+ if (!rb_shape_get_iv_index(shape, id, &index)) {
+ rb_bug("unreachable. Shape was not found for id: %s", rb_id2name(id));
+ }
+
+ uint32_t len = ROBJECT_NUMIV(obj);
+ if (len <= index) {
+ uint32_t newsize = (shape->iv_count + 1) * 1.25;
+ rb_ensure_iv_list_size(obj, len, newsize);
+ }
+ RUBY_ASSERT(index <= ROBJECT_NUMIV(obj));
+ return index;
}
static VALUE
obj_ivar_set(VALUE obj, ID id, VALUE val)
{
- uint32_t len;
- struct ivar_update ivup = obj_ensure_iv_index_mapping(obj, id);
+ attr_index_t index = rb_obj_ensure_iv_index_mapping(obj, id);
+ RB_OBJ_WRITE(obj, &ROBJECT_IVPTR(obj)[index], val);
+ return val;
+}
+
+/* Set the instance variable +val+ on object +obj+ at ivar name +id+.
+ * This function only works with T_OBJECT objects, so make sure
+ * +obj+ is of type T_OBJECT before using this function.
+ */
+VALUE
+rb_vm_set_ivar_id(VALUE obj, ID id, VALUE val)
+{
+ rb_check_frozen_internal(obj);
+ obj_ivar_set(obj, id, val);
+ return val;
+}
- len = ROBJECT_NUMIV(obj);
- if (len <= ivup.index) {
- uint32_t newsize = iv_index_tbl_newsize(&ivup);
- init_iv_list(obj, len, newsize, ivup.u.iv_index_tbl);
+bool
+rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id)
+{
+ if (rb_shape_get_shape_id(obj) == shape_id) {
+ return false;
}
- RB_OBJ_WRITE(obj, &ROBJECT_IVPTR(obj)[ivup.index], val);
- return val;
+#if SHAPE_IN_BASIC_FLAGS
+ RBASIC_SET_SHAPE_ID(obj, shape_id);
+#else
+ switch (BUILTIN_TYPE(obj)) {
+ case T_OBJECT:
+ ROBJECT_SET_SHAPE_ID(obj, shape_id);
+ break;
+ case T_CLASS:
+ case T_MODULE:
+ {
+ RCLASS_EXT(obj)->shape_id = shape_id;
+ break;
+ }
+ case T_IMEMO:
+ if (imemo_type(obj) == imemo_shape) {
+ RBASIC(obj)->flags &= 0xffffffff0000ffff;
+ RBASIC(obj)->flags |= ((uint32_t)(shape_id) << 16);
+ }
+ break;
+ default:
+ {
+ if (shape_id != FROZEN_ROOT_SHAPE_ID) {
+ struct gen_ivtbl *ivtbl = 0;
+ RB_VM_LOCK_ENTER();
+ {
+ st_table* global_iv_table = generic_ivtbl(obj, 0, false);
+
+ if (st_lookup(global_iv_table, obj, (st_data_t *)&ivtbl)) {
+ ivtbl->shape_id = shape_id;
+ }
+ else {
+ rb_bug("Expected shape_id entry in global iv table");
+ }
+ }
+ RB_VM_LOCK_LEAVE();
+ }
+ }
+ }
+#endif
+
+ return true;
+}
+
+/**
+ * Prevents further modifications to the given object. ::rb_eFrozenError shall
+ * be raised if modification is attempted.
+ *
+ * @param[out] x Object in question.
+ */
+void rb_obj_freeze_inline(VALUE x)
+{
+ if (RB_FL_ABLE(x)) {
+ RB_OBJ_FREEZE_RAW(x);
+
+ rb_shape_transition_shape_frozen(x);
+
+ if (RBASIC_CLASS(x) && !(RBASIC(x)->flags & RUBY_FL_SINGLETON)) {
+ rb_freeze_singleton_class(x);
+ }
+ }
}
static void
@@ -1581,10 +1577,14 @@ ivar_set(VALUE obj, ID id, VALUE val)
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
- obj_ivar_set(obj, id, val);
- break;
+ {
+ obj_ivar_set(obj, id, val);
+ break;
+ }
case T_CLASS:
case T_MODULE:
+ // TODO: Transition shapes on classes
+ //rb_shape_transition_shape(obj, id, rb_shape_get_shape_by_id(RCLASS_SHAPE_ID(obj)));
IVAR_ACCESSOR_SHOULD_BE_MAIN_RACTOR(id);
rb_class_ivar_set(obj, id, val);
break;
@@ -1614,161 +1614,86 @@ rb_ivar_set_internal(VALUE obj, ID id, VALUE val)
VALUE
rb_ivar_defined(VALUE obj, ID id)
{
- VALUE val;
- struct st_table *iv_index_tbl;
- uint32_t index;
+ attr_index_t index;
if (SPECIAL_CONST_P(obj)) return Qfalse;
switch (BUILTIN_TYPE(obj)) {
- case T_OBJECT:
- iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
- if (iv_index_tbl_lookup(iv_index_tbl, id, &index) &&
- index < ROBJECT_NUMIV(obj) &&
- (val = ROBJECT_IVPTR(obj)[index]) != Qundef) {
- return Qtrue;
- }
- break;
case T_CLASS:
case T_MODULE:
- if (RCLASS_IV_TBL(obj) && lock_st_is_member(RCLASS_IV_TBL(obj), (st_data_t)id))
+ if (RCLASS_IV_TBL(obj) && lock_st_is_member(RCLASS_IV_TBL(obj), (st_data_t)id)) {
return Qtrue;
- break;
+ }
+ else {
+ return Qfalse;
+ }
default:
- if (FL_TEST(obj, FL_EXIVAR))
- return generic_ivar_defined(obj, id);
- break;
+ return RBOOL(rb_shape_get_iv_index(rb_shape_get_shape(obj), id, &index));
}
- return Qfalse;
}
typedef int rb_ivar_foreach_callback_func(ID key, VALUE val, st_data_t arg);
st_data_t rb_st_nth_key(st_table *tab, st_index_t index);
-static ID
-iv_index_tbl_nth_id(st_table *iv_index_tbl, uint32_t index)
-{
- st_data_t key;
- RB_VM_LOCK_ENTER();
- {
- key = rb_st_nth_key(iv_index_tbl, index);
- }
- RB_VM_LOCK_LEAVE();
- return (ID)key;
-}
-
-static inline bool
-ivar_each_i(st_table *iv_index_tbl, VALUE val, uint32_t i, rb_ivar_foreach_callback_func *func, st_data_t arg)
-{
- if (val != Qundef) {
- ID id = iv_index_tbl_nth_id(iv_index_tbl, i);
- switch (func(id, val, arg)) {
- case ST_CHECK:
- case ST_CONTINUE:
- break;
- case ST_STOP:
- return true;
- default:
- rb_bug("unreachable");
- }
+static void
+iterate_over_shapes_with_callback(rb_shape_t *shape, VALUE* iv_list, rb_ivar_foreach_callback_func *callback, st_data_t arg) {
+ switch ((enum shape_type)shape->type) {
+ case SHAPE_ROOT:
+ return;
+ case SHAPE_IVAR:
+ iterate_over_shapes_with_callback(shape->parent, iv_list, callback, arg);
+ VALUE val = iv_list[shape->iv_count - 1];
+ if (val != Qundef) {
+ callback(shape->edge_name, val, arg);
+ }
+ return;
+ case SHAPE_IVAR_UNDEF:
+ case SHAPE_FROZEN:
+ iterate_over_shapes_with_callback(shape->parent, iv_list, callback, arg);
+ return;
}
- return false;
}
static void
obj_ivar_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg)
{
- st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
- if (!iv_index_tbl) return;
- uint32_t i=0;
-
- for (i=0; i < ROBJECT_NUMIV(obj); i++) {
- VALUE val = ROBJECT_IVPTR(obj)[i];
- if (ivar_each_i(iv_index_tbl, val, i, func, arg)) {
- return;
- }
- }
+ rb_shape_t* shape = rb_shape_get_shape(obj);
+ iterate_over_shapes_with_callback(shape, ROBJECT_IVPTR(obj), func, arg);
}
static void
gen_ivar_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg)
{
+ rb_shape_t *shape = rb_shape_get_shape(obj);
struct gen_ivtbl *ivtbl;
- st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj));
- if (!iv_index_tbl) return;
- if (!gen_ivtbl_get(obj, 0, &ivtbl)) return;
+ if (!rb_gen_ivtbl_get(obj, 0, &ivtbl)) return;
- for (uint32_t i=0; i<ivtbl->numiv; i++) {
- VALUE val = ivtbl->ivptr[i];
- if (ivar_each_i(iv_index_tbl, val, i, func, arg)) {
- return;
- }
- }
-}
-
-struct givar_copy {
- VALUE obj;
- VALUE klass;
- st_table *iv_index_tbl;
- struct gen_ivtbl *ivtbl;
-};
-
-static int
-gen_ivar_copy(ID id, VALUE val, st_data_t arg)
-{
- struct givar_copy *c = (struct givar_copy *)arg;
- struct ivar_update ivup;
-
- ivup.iv_extended = 0;
- ivup.u.iv_index_tbl = c->iv_index_tbl;
-
- RB_VM_LOCK_ENTER();
- {
- iv_index_tbl_extend(&ivup, id, c->klass);
- }
- RB_VM_LOCK_LEAVE();
-
- if (ivup.index >= c->ivtbl->numiv) {
- uint32_t newsize = iv_index_tbl_newsize(&ivup);
- c->ivtbl = gen_ivtbl_resize(c->ivtbl, newsize);
- }
- c->ivtbl->ivptr[ivup.index] = val;
-
- RB_OBJ_WRITTEN(c->obj, Qundef, val);
-
- return ST_CONTINUE;
+ iterate_over_shapes_with_callback(shape, ivtbl->ivptr, func, arg);
}
void
rb_copy_generic_ivar(VALUE clone, VALUE obj)
{
- struct gen_ivtbl *ivtbl;
+ struct gen_ivtbl *obj_ivtbl;
+ struct gen_ivtbl *new_ivtbl;
rb_check_frozen(clone);
if (!FL_TEST(obj, FL_EXIVAR)) {
goto clear;
}
- if (gen_ivtbl_get(obj, 0, &ivtbl)) {
- struct givar_copy c;
- uint32_t i;
- if (gen_ivtbl_count(ivtbl) == 0)
+ if (rb_gen_ivtbl_get(obj, 0, &obj_ivtbl)) {
+ if (gen_ivtbl_count(obj_ivtbl) == 0)
goto clear;
- if (gen_ivtbl_get(clone, 0, &c.ivtbl)) {
- for (i = 0; i < c.ivtbl->numiv; i++)
- c.ivtbl->ivptr[i] = Qundef;
- }
- else {
- c.ivtbl = gen_ivtbl_resize(0, ivtbl->numiv);
- FL_SET(clone, FL_EXIVAR);
+ new_ivtbl = gen_ivtbl_resize(0, obj_ivtbl->numiv);
+ FL_SET(clone, FL_EXIVAR);
+
+ for (uint32_t i=0; i<obj_ivtbl->numiv; i++) {
+ new_ivtbl->ivptr[i] = obj_ivtbl->ivptr[i];
+ RB_OBJ_WRITTEN(clone, Qundef, &new_ivtbl[i]);
}
- VALUE klass = rb_obj_class(clone);
- c.iv_index_tbl = iv_index_tbl_make(clone, klass);
- c.obj = clone;
- c.klass = klass;
- gen_ivar_each(obj, gen_ivar_copy, (st_data_t)&c);
/*
* c.ivtbl may change in gen_ivar_copy due to realloc,
* no need to free
@@ -1776,9 +1701,17 @@ rb_copy_generic_ivar(VALUE clone, VALUE obj)
RB_VM_LOCK_ENTER();
{
generic_ivtbl_no_ractor_check(clone);
- st_insert(generic_ivtbl_no_ractor_check(obj), (st_data_t)clone, (st_data_t)c.ivtbl);
+ st_insert(generic_ivtbl_no_ractor_check(obj), (st_data_t)clone, (st_data_t)new_ivtbl);
}
RB_VM_LOCK_LEAVE();
+
+ rb_shape_t * obj_shape = rb_shape_get_shape(obj);
+ if (rb_shape_frozen_shape_p(obj_shape)) {
+ rb_shape_set_shape(clone, obj_shape->parent);
+ }
+ else {
+ rb_shape_set_shape(clone, obj_shape);
+ }
}
return;
@@ -1846,17 +1779,17 @@ rb_ivar_count(VALUE obj)
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
- if (ROBJECT_IV_INDEX_TBL(obj) != 0) {
- st_index_t i, count, num = ROBJECT_NUMIV(obj);
- const VALUE *const ivptr = ROBJECT_IVPTR(obj);
- for (i = count = 0; i < num; ++i) {
- if (ivptr[i] != Qundef) {
- count++;
- }
- }
- return count;
- }
- break;
+ if (rb_shape_get_shape(obj)->iv_count > 0) {
+ st_index_t i, count, num = ROBJECT_NUMIV(obj);
+ const VALUE *const ivptr = ROBJECT_IVPTR(obj);
+ for (i = count = 0; i < num; ++i) {
+ if (ivptr[i] != Qundef) {
+ count++;
+ }
+ }
+ return count;
+ }
+ break;
case T_CLASS:
case T_MODULE:
if ((tbl = RCLASS_IV_TBL(obj)) != 0) {
@@ -1867,11 +1800,11 @@ rb_ivar_count(VALUE obj)
if (FL_TEST(obj, FL_EXIVAR)) {
struct gen_ivtbl *ivtbl;
- if (gen_ivtbl_get(obj, 0, &ivtbl)) {
- return gen_ivtbl_count(ivtbl);
- }
- }
- break;
+ if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
+ return gen_ivtbl_count(ivtbl);
+ }
+ }
+ break;
}
return 0;
}
@@ -1965,40 +1898,53 @@ rb_obj_remove_instance_variable(VALUE obj, VALUE name)
{
VALUE val = Qnil;
const ID id = id_for_var(obj, name, an, instance);
- st_data_t n, v;
- struct st_table *iv_index_tbl;
- uint32_t index;
+ // Frozen check comes here because it's expected that we raise a
+ // NameError (from the id_for_var check) before we raise a FrozenError
rb_check_frozen(obj);
+
+ attr_index_t index;
+
if (!id) {
goto not_defined;
}
switch (BUILTIN_TYPE(obj)) {
- case T_OBJECT:
- iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
- if (iv_index_tbl_lookup(iv_index_tbl, id, &index) &&
- index < ROBJECT_NUMIV(obj) &&
- (val = ROBJECT_IVPTR(obj)[index]) != Qundef) {
- ROBJECT_IVPTR(obj)[index] = Qundef;
- return val;
- }
- break;
case T_CLASS:
case T_MODULE:
IVAR_ACCESSOR_SHOULD_BE_MAIN_RACTOR(id);
- n = id;
- if (RCLASS_IV_TBL(obj) && lock_st_delete(RCLASS_IV_TBL(obj), &n, &v)) {
- return (VALUE)v;
+ if (RCLASS_IV_TBL(obj)) {
+ st_data_t id_data = (st_data_t)id, val;
+ if (lock_st_delete(RCLASS_IV_TBL(obj), &id_data, &val)) {
+ return (VALUE)val;
+ }
}
break;
- default:
- if (FL_TEST(obj, FL_EXIVAR)) {
- if (generic_ivar_remove(obj, id, &val)) {
- return val;
- }
+ case T_OBJECT: {
+ rb_shape_t * shape = rb_shape_get_shape(obj);
+ if (rb_shape_get_iv_index(shape, id, &index)) {
+ rb_shape_transition_shape_remove_ivar(obj, id, shape);
+ val = ROBJECT_IVPTR(obj)[index];
+ ROBJECT_IVPTR(obj)[index] = Qundef;
+ return val;
+ }
+
+ break;
+ }
+ default: {
+ rb_shape_t * shape = rb_shape_get_shape(obj);
+
+ if (rb_shape_get_iv_index(shape, id, &index)) {
+ rb_shape_transition_shape_remove_ivar(obj, id, shape);
+ struct gen_ivtbl *ivtbl;
+ rb_gen_ivtbl_get(obj, id, &ivtbl);
+ val = ivtbl->ivptr[index];
+ ivtbl->ivptr[index] = Qundef;
+ return val;
}
+
break;
+ }
}
not_defined:
diff --git a/variable.h b/variable.h
index 55596b00de..314ac82df0 100644
--- a/variable.h
+++ b/variable.h
@@ -11,11 +11,19 @@
/* per-object */
struct gen_ivtbl {
+#if !SHAPE_IN_BASIC_FLAGS
+ uint16_t shape_id;
+#endif
uint32_t numiv;
VALUE ivptr[FLEX_ARY_LEN];
};
int rb_ivar_generic_ivtbl_lookup(VALUE obj, struct gen_ivtbl **);
-VALUE rb_ivar_generic_lookup_with_index(VALUE obj, ID id, uint32_t index);
+
+#include "shape.h"
+#if !SHAPE_IN_BASIC_FLAGS
+shape_id_t rb_generic_shape_id(VALUE obj);
+#endif
+
#endif /* RUBY_TOPLEVEL_VARIABLE_H */
diff --git a/vm.c b/vm.c
index 0de461392f..77393458c2 100644
--- a/vm.c
+++ b/vm.c
@@ -26,6 +26,7 @@
#include "internal/thread.h"
#include "internal/vm.h"
#include "internal/sanitizers.h"
+#include "internal/variable.h"
#include "iseq.h"
#include "mjit.h"
#include "yjit.h"
@@ -2720,6 +2721,12 @@ rb_vm_update_references(void *ptr)
vm->top_self = rb_gc_location(vm->top_self);
vm->orig_progname = rb_gc_location(vm->orig_progname);
+ for (shape_id_t i = 0; i <= vm->max_shape_count; i++) {
+ if (vm->shape_list[i]) {
+ vm->shape_list[i] = (rb_shape_t *)rb_gc_location((VALUE)vm->shape_list[i]);
+ }
+ }
+
rb_gc_update_tbl_refs(vm->overloaded_cme_table);
if (vm->coverages) {
@@ -2801,6 +2808,8 @@ rb_vm_mark(void *ptr)
obj_ary++;
}
+ rb_gc_mark((VALUE)vm->root_shape);
+ rb_gc_mark((VALUE)vm->frozen_root_shape);
rb_gc_mark_movable(vm->load_path);
rb_gc_mark_movable(vm->load_path_snapshot);
RUBY_MARK_MOVABLE_UNLESS_NULL(vm->load_path_check_cache);
@@ -4021,6 +4030,11 @@ Init_BareVM(void)
rb_native_cond_initialize(&vm->ractor.sync.terminate_cond);
}
+#ifndef _WIN32
+#include <unistd.h>
+#include <sys/mman.h>
+#endif
+
void
Init_vm_objects(void)
{
@@ -4032,6 +4046,37 @@ Init_vm_objects(void)
vm->mark_object_ary = rb_ary_hidden_new(128);
vm->loading_table = st_init_strtable();
vm->frozen_strings = st_init_table_with_size(&rb_fstring_hash_type, 10000);
+
+#if HAVE_MMAP
+ vm->shape_list = (rb_shape_t **)mmap(NULL, rb_size_mul_or_raise(SHAPE_BITMAP_SIZE * 32, sizeof(rb_shape_t *), rb_eRuntimeError),
+ PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
+ if (vm->shape_list == MAP_FAILED) {
+ vm->shape_list = 0;
+ }
+#else
+ vm->shape_list = xcalloc(SHAPE_BITMAP_SIZE * 32, sizeof(rb_shape_t *));
+#endif
+
+ if (!vm->shape_list) {
+ rb_memerror();
+ }
+
+ // Root shape
+ vm->root_shape = rb_shape_alloc(ROOT_SHAPE_ID,
+ 0,
+ 0);
+ rb_shape_set_shape_by_id(ROOT_SHAPE_ID, vm->root_shape);
+ RB_OBJ_WRITTEN(vm->root_shape, Qundef, (VALUE)vm);
+
+ // Frozen root shape
+ vm->frozen_root_shape = rb_shape_alloc(FROZEN_ROOT_SHAPE_ID,
+ rb_make_internal_id(),
+ vm->root_shape);
+ vm->frozen_root_shape->type = (uint8_t)SHAPE_FROZEN;
+ RB_OBJ_FREEZE_RAW((VALUE)vm->frozen_root_shape);
+ rb_shape_set_shape_by_id(FROZEN_ROOT_SHAPE_ID, vm->frozen_root_shape);
+ RB_OBJ_WRITTEN(vm->frozen_root_shape, Qundef, (VALUE)vm);
+ vm->max_shape_count = 1;
}
/* Stub for builtin function when not building YJIT units*/
diff --git a/vm_callinfo.h b/vm_callinfo.h
index fd2215be7d..e5b04c0709 100644
--- a/vm_callinfo.h
+++ b/vm_callinfo.h
@@ -10,6 +10,7 @@
#include "debug_counter.h"
#include "internal/class.h"
+#include "shape.h"
enum vm_call_flag_bits {
VM_CALL_ARGS_SPLAT_bit, /* m(*args) */
@@ -284,14 +285,32 @@ struct rb_callcache {
const vm_call_handler call_;
union {
- const unsigned int attr_index;
+ struct {
+ const attr_index_t index;
+ shape_id_t dest_shape_id;
+ } attr;
const enum method_missing_reason method_missing_reason; /* used by method_missing */
VALUE v;
} aux_;
};
-#define VM_CALLCACHE_UNMARKABLE IMEMO_FL_USER0
-#define VM_CALLCACHE_ON_STACK IMEMO_FL_USER1
+#define VM_CALLCACHE_UNMARKABLE FL_FREEZE
+#define VM_CALLCACHE_ON_STACK FL_EXIVAR
+
+extern const struct rb_callcache *rb_vm_empty_cc(void);
+extern const struct rb_callcache *rb_vm_empty_cc_for_super(void);
+
+#define vm_cc_empty() rb_vm_empty_cc()
+
+static inline void
+vm_cc_attr_index_initialize(const struct rb_callcache *cc, shape_id_t shape_id)
+{
+ VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
+ VM_ASSERT(cc != vm_cc_empty());
+ IMEMO_SET_CACHED_SHAPE_ID((VALUE)cc, shape_id);
+ *(attr_index_t *)&cc->aux_.attr.index = 0;
+ *(shape_id_t *)&cc->aux_.attr.dest_shape_id = shape_id;
+}
static inline const struct rb_callcache *
vm_cc_new(VALUE klass,
@@ -299,6 +318,7 @@ vm_cc_new(VALUE klass,
vm_call_handler call)
{
const struct rb_callcache *cc = (const struct rb_callcache *)rb_imemo_new(imemo_callcache, (VALUE)cme, (VALUE)call, 0, klass);
+ vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
RB_DEBUG_COUNTER_INC(cc_new);
return cc;
}
@@ -350,30 +370,71 @@ vm_cc_call(const struct rb_callcache *cc)
return cc->call_;
}
-static inline unsigned int
+static inline attr_index_t
vm_cc_attr_index(const struct rb_callcache *cc)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
- return cc->aux_.attr_index - 1;
+ return cc->aux_.attr.index - 1;
}
static inline bool
vm_cc_attr_index_p(const struct rb_callcache *cc)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
- return cc->aux_.attr_index > 0;
+ return cc->aux_.attr.index != 0;
+}
+
+static inline shape_id_t
+vm_cc_attr_index_source_shape_id(const struct rb_callcache *cc)
+{
+ VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
+
+ return IMEMO_CACHED_SHAPE_ID((VALUE)cc);
}
-static inline uint32_t
-vm_ic_entry_index(const struct iseq_inline_iv_cache_entry *ic)
+static inline shape_id_t
+vm_cc_attr_shape_id(const struct rb_callcache *cc)
{
- return ic->entry->index;
+ VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
+ return vm_cc_attr_index_source_shape_id(cc);
+}
+
+static inline shape_id_t
+vm_cc_attr_index_dest_shape_id(const struct rb_callcache *cc)
+{
+ VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
+
+ return cc->aux_.attr.dest_shape_id;
+}
+
+static inline attr_index_t
+vm_ic_attr_index(const struct iseq_inline_iv_cache_entry *ic)
+{
+ return ic->attr_index - 1;
}
static inline bool
-vm_ic_entry_p(const struct iseq_inline_iv_cache_entry *ic)
+vm_ic_attr_index_p(const struct iseq_inline_iv_cache_entry *ic)
+{
+ return ic->attr_index > 0;
+}
+
+static inline shape_id_t
+vm_ic_attr_shape_id(const struct iseq_inline_iv_cache_entry *ic)
+{
+ return ic->source_shape_id;
+}
+
+static inline shape_id_t
+vm_ic_attr_index_source_shape_id(const struct iseq_inline_iv_cache_entry *ic)
{
- return ic->entry;
+ return ic->source_shape_id;
+}
+
+static inline shape_id_t
+vm_ic_attr_index_dest_shape_id(const struct iseq_inline_iv_cache_entry *ic)
+{
+ return ic->dest_shape_id;
}
static inline unsigned int
@@ -407,10 +468,6 @@ vm_cc_valid_p(const struct rb_callcache *cc, const rb_callable_method_entry_t *c
}
}
-extern const struct rb_callcache *rb_vm_empty_cc(void);
-extern const struct rb_callcache *rb_vm_empty_cc_for_super(void);
-#define vm_cc_empty() rb_vm_empty_cc()
-
/* callcache: mutate */
static inline void
@@ -422,26 +479,29 @@ vm_cc_call_set(const struct rb_callcache *cc, vm_call_handler call)
}
static inline void
-vm_cc_attr_index_set(const struct rb_callcache *cc, int index)
+vm_cc_attr_index_set(const struct rb_callcache *cc, attr_index_t index, shape_id_t source_shape_id, shape_id_t dest_shape_id)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc != vm_cc_empty());
- *(int *)&cc->aux_.attr_index = index + 1;
+ IMEMO_SET_CACHED_SHAPE_ID((VALUE)cc, source_shape_id);
+ *(attr_index_t *)&cc->aux_.attr.index = (index + 1);
+ *(shape_id_t *)&cc->aux_.attr.dest_shape_id = dest_shape_id;
}
static inline void
-vm_ic_entry_set(struct iseq_inline_iv_cache_entry *ic, struct rb_iv_index_tbl_entry *entry, const rb_iseq_t *iseq)
+vm_ic_attr_index_set(const rb_iseq_t *iseq, const struct iseq_inline_iv_cache_entry *ic, attr_index_t index, shape_id_t source_shape_id, shape_id_t dest_shape_id)
{
- ic->entry = entry;
- RB_OBJ_WRITTEN(iseq, Qundef, entry->class_value);
+ *(shape_id_t *)&ic->source_shape_id = source_shape_id;
+ *(shape_id_t *)&ic->dest_shape_id = dest_shape_id;
+ *(attr_index_t *)&ic->attr_index = index + 1;
}
static inline void
-vm_cc_attr_index_initialize(const struct rb_callcache *cc)
+vm_ic_attr_index_initialize(const struct iseq_inline_iv_cache_entry *ic, shape_id_t shape_id)
{
- VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
- VM_ASSERT(cc != vm_cc_empty());
- *(int *)&cc->aux_.attr_index = 0;
+ *(shape_id_t *)&ic->source_shape_id = shape_id;
+ *(shape_id_t *)&ic->dest_shape_id = shape_id;
+ *(attr_index_t *)&ic->attr_index = 0;
}
static inline void
diff --git a/vm_core.h b/vm_core.h
index e11838a59a..b4768be136 100644
--- a/vm_core.h
+++ b/vm_core.h
@@ -99,6 +99,7 @@ extern int ruby_assert_critical_section_entered;
#include "ruby/st.h"
#include "ruby_atomic.h"
#include "vm_opts.h"
+#include "shape.h"
#include "ruby/thread_native.h"
@@ -272,7 +273,9 @@ struct iseq_inline_constant_cache {
};
struct iseq_inline_iv_cache_entry {
- struct rb_iv_index_tbl_entry *entry;
+ shape_id_t source_shape_id;
+ shape_id_t dest_shape_id;
+ attr_index_t attr_index;
};
struct iseq_inline_cvar_cache_entry {
@@ -687,6 +690,12 @@ typedef struct rb_vm_struct {
VALUE mark_object_ary;
const VALUE special_exceptions[ruby_special_error_count];
+ /* object shapes */
+ rb_shape_t **shape_list;
+ rb_shape_t *root_shape;
+ rb_shape_t *frozen_root_shape;
+ shape_id_t max_shape_count;
+
/* load */
VALUE top_self;
VALUE load_path;
diff --git a/vm_eval.c b/vm_eval.c
index c0558fce2b..bfa812f3d3 100644
--- a/vm_eval.c
+++ b/vm_eval.c
@@ -47,7 +47,7 @@ rb_vm_call0(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE
{
struct rb_calling_info calling = {
.ci = &VM_CI_ON_STACK(id, kw_splat ? VM_CALL_KW_SPLAT : 0, argc, NULL),
- .cc = &VM_CC_ON_STACK(Qfalse, vm_call_general, { 0 }, cme),
+ .cc = &VM_CC_ON_STACK(Qfalse, vm_call_general, {{ 0 }}, cme),
.block_handler = vm_passed_block_handler(ec),
.recv = recv,
.argc = argc,
@@ -89,7 +89,7 @@ vm_call0_cc(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE
static VALUE
vm_call0_cme(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv, const rb_callable_method_entry_t *cme)
{
- calling->cc = &VM_CC_ON_STACK(Qfalse, vm_call_general, { 0 }, cme);
+ calling->cc = &VM_CC_ON_STACK(Qfalse, vm_call_general, {{ 0 }}, cme);
return vm_call0_body(ec, calling, argv);
}
diff --git a/vm_insnhelper.c b/vm_insnhelper.c
index a662de468d..1cd66cf1eb 100644
--- a/vm_insnhelper.c
+++ b/vm_insnhelper.c
@@ -50,6 +50,11 @@ MJIT_STATIC VALUE
ruby_vm_special_exception_copy(VALUE exc)
{
VALUE e = rb_obj_alloc(rb_class_real(RBASIC_CLASS(exc)));
+ rb_shape_t * shape = rb_shape_get_shape(exc);
+ if (rb_shape_frozen_shape_p(shape)) {
+ shape = shape->parent;
+ }
+ rb_shape_set_shape(e, shape);
rb_obj_copy_ivar(e, exc);
return e;
}
@@ -1085,35 +1090,19 @@ vm_get_cvar_base(const rb_cref_t *cref, const rb_control_frame_t *cfp, int top_l
return klass;
}
-static bool
-iv_index_tbl_lookup(struct st_table *iv_index_tbl, ID id, struct rb_iv_index_tbl_entry **ent)
-{
- int found;
- st_data_t ent_data;
-
- if (iv_index_tbl == NULL) return false;
-
- RB_VM_LOCK_ENTER();
- {
- found = st_lookup(iv_index_tbl, (st_data_t)id, &ent_data);
- }
- RB_VM_LOCK_LEAVE();
- if (found) *ent = (struct rb_iv_index_tbl_entry *)ent_data;
-
- return found ? true : false;
-}
-
-ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, struct rb_iv_index_tbl_entry *ent));
-
+ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id));
static inline void
-fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, struct rb_iv_index_tbl_entry *ent)
+fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id)
{
- // fill cache
- if (!is_attr) {
- vm_ic_entry_set(ic, ent, iseq);
+ if (is_attr) {
+ if (vm_cc_markable(cc)) {
+ vm_cc_attr_index_set(cc, index, shape_id, shape_id);
+ RB_OBJ_WRITTEN(cc, Qundef, rb_shape_get_shape_by_id(shape_id));
+ }
}
else {
- vm_cc_attr_index_set(cc, ent->index);
+ vm_ic_attr_index_set(iseq, ic, index, shape_id, shape_id);
+ RB_OBJ_WRITTEN(iseq, Qundef, rb_shape_get_shape_by_id(shape_id));
}
}
@@ -1123,68 +1112,120 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
{
#if OPT_IC_FOR_IVAR
VALUE val = Qundef;
+ shape_id_t shape_id;
+ VALUE * ivar_list;
if (SPECIAL_CONST_P(obj)) {
- // frozen?
+ return Qnil;
}
- else if (LIKELY(is_attr ?
- RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_unset, vm_cc_attr_index_p(cc)) :
- RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_serial, vm_ic_entry_p(ic) && ic->entry->class_serial == RCLASS_SERIAL(RBASIC(obj)->klass)))) {
- uint32_t index = !is_attr ? vm_ic_entry_index(ic): (vm_cc_attr_index(cc));
- RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
-
- if (LIKELY(BUILTIN_TYPE(obj) == T_OBJECT) &&
- LIKELY(index < ROBJECT_NUMIV(obj))) {
- val = ROBJECT_IVPTR(obj)[index];
+#if SHAPE_IN_BASIC_FLAGS
+ shape_id = RBASIC_SHAPE_ID(obj);
+#endif
+ switch (BUILTIN_TYPE(obj)) {
+ case T_OBJECT:
+ ivar_list = ROBJECT_IVPTR(obj);
VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
- }
- else if (FL_TEST_RAW(obj, FL_EXIVAR)) {
- val = rb_ivar_generic_lookup_with_index(obj, id, index);
- }
- goto ret;
+#if !SHAPE_IN_BASIC_FLAGS
+ shape_id = ROBJECT_SHAPE_ID(obj);
+#endif
+ break;
+ case T_CLASS:
+ case T_MODULE:
+ {
+ goto general_path;
+ }
+ default:
+ if (FL_TEST_RAW(obj, FL_EXIVAR)) {
+ struct gen_ivtbl *ivtbl;
+ rb_gen_ivtbl_get(obj, id, &ivtbl);
+#if !SHAPE_IN_BASIC_FLAGS
+ shape_id = ivtbl->shape_id;
+#endif
+ ivar_list = ivtbl->ivptr;
+ } else {
+ return Qnil;
+ }
}
- else {
- struct rb_iv_index_tbl_entry *ent;
- if (BUILTIN_TYPE(obj) == T_OBJECT) {
- struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
+ shape_id_t cached_id;
- if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &ent)) {
- fill_ivar_cache(iseq, ic, cc, is_attr, ent);
+ if (is_attr) {
+ cached_id = vm_cc_attr_shape_id(cc);
+ }
+ else {
+ cached_id = vm_ic_attr_shape_id(ic);
+ }
- // get value
- if (ent->index < ROBJECT_NUMIV(obj)) {
- val = ROBJECT_IVPTR(obj)[ent->index];
+ attr_index_t index;
- VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
- }
- }
+ if (LIKELY(cached_id == shape_id)) {
+ RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
+
+ if (is_attr && vm_cc_attr_index_p(cc)) {
+ index = vm_cc_attr_index(cc);
+ }
+ else if (!is_attr && vm_ic_attr_index_p(ic)) {
+ index = vm_ic_attr_index(ic);
+ }
+ else {
+ return Qnil;
}
- else if (FL_TEST_RAW(obj, FL_EXIVAR)) {
- struct st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj));
- if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &ent)) {
- fill_ivar_cache(iseq, ic, cc, is_attr, ent);
- val = rb_ivar_generic_lookup_with_index(obj, id, ent->index);
+ val = ivar_list[index];
+ VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
+ }
+ else { // cache miss case
+#if RUBY_DEBUG
+ if (is_attr) {
+ if (cached_id != INVALID_SHAPE_ID) {
+ RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
+ } else {
+ RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
}
}
else {
- // T_CLASS / T_MODULE
- goto general_path;
+ if (cached_id != INVALID_SHAPE_ID) {
+ RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
+ } else {
+ RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
+ }
}
+#endif
- ret:
- if (LIKELY(val != Qundef)) {
- return val;
+ attr_index_t index;
+ rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
+
+ if (rb_shape_get_iv_index(shape, id, &index)) {
+ // This fills in the cache with the shared cache object.
+ // "ent" is the shared cache object
+ fill_ivar_cache(iseq, ic, cc, is_attr, index, shape_id);
+
+ // We fetched the ivar list above
+ val = ivar_list[index];
}
else {
- return Qnil;
+ if (is_attr) {
+ if (vm_cc_markable(cc)) {
+ vm_cc_attr_index_initialize(cc, shape_id);
+ }
+ }
+ else {
+ vm_ic_attr_index_initialize(ic, shape_id);
+ }
+
+ val = Qnil;
}
+
}
- general_path:
+
+ RUBY_ASSERT(val != Qundef);
+
+ return val;
+
+general_path:
#endif /* OPT_IC_FOR_IVAR */
RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
@@ -1196,6 +1237,24 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
}
}
+static void
+populate_cache(attr_index_t index, rb_shape_t *shape, rb_shape_t *next_shape, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, bool is_attr)
+{
+ // Cache population code
+ if (is_attr) {
+ if (vm_cc_markable(cc)) {
+ vm_cc_attr_index_set(cc, index, SHAPE_ID(shape), SHAPE_ID(next_shape));
+ RB_OBJ_WRITTEN(cc, Qundef, (VALUE)shape);
+ RB_OBJ_WRITTEN(cc, Qundef, (VALUE)next_shape);
+ }
+ }
+ else {
+ vm_ic_attr_index_set(iseq, ic, index, SHAPE_ID(shape), SHAPE_ID(next_shape));
+ RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)shape);
+ RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)next_shape);
+ }
+}
+
ALWAYS_INLINE(static VALUE vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr));
NOINLINE(static VALUE vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic));
NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc));
@@ -1203,35 +1262,66 @@ NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, cons
static VALUE
vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
{
- rb_check_frozen_internal(obj);
-
#if OPT_IC_FOR_IVAR
- if (RB_TYPE_P(obj, T_OBJECT)) {
- struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
- struct rb_iv_index_tbl_entry *ent;
+ switch (BUILTIN_TYPE(obj)) {
+ case T_OBJECT:
+ {
+ rb_check_frozen_internal(obj);
+
+ attr_index_t index;
+
+ uint32_t num_iv = ROBJECT_NUMIV(obj);
+ rb_shape_t* shape = rb_shape_get_shape(obj);
+ rb_shape_t* next_shape = rb_shape_get_next(shape, obj, id);
+ if (shape != next_shape) {
+ rb_shape_set_shape(obj, next_shape);
+ }
- if (iv_index_tbl_lookup(iv_index_tbl, id, &ent)) {
- if (!is_attr) {
- vm_ic_entry_set(ic, ent, iseq);
- }
- else if (ent->index >= INT_MAX) {
- rb_raise(rb_eArgError, "too many instance variables");
- }
- else {
- vm_cc_attr_index_set(cc, (int)(ent->index));
- }
+ if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
+ if (index >= MAX_IVARS) {
+ rb_raise(rb_eArgError, "too many instance variables");
+ }
- uint32_t index = ent->index;
+ populate_cache(index, shape, next_shape, id, iseq, ic, cc, is_attr);
+ }
+ else {
+ rb_bug("Didn't find instance variable %s\n", rb_id2name(id));
+ }
+
+ // Ensure the IV buffer is wide enough to store the IV
+ if (UNLIKELY(index >= num_iv)) {
+ rb_init_iv_list(obj);
+ }
- if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
- rb_init_iv_list(obj);
+ VALUE *ptr = ROBJECT_IVPTR(obj);
+ RB_OBJ_WRITE(obj, &ptr[index], val);
+ RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
+
+ return val;
}
- VALUE *ptr = ROBJECT_IVPTR(obj);
- RB_OBJ_WRITE(obj, &ptr[index], val);
- RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
+ case T_CLASS:
+ case T_MODULE:
+ break;
+ default:
+ {
+ rb_shape_t * shape = rb_shape_get_shape(obj);
+ rb_ivar_set(obj, id, val);
+ rb_shape_t * next_shape = rb_shape_get_shape(obj);
+ attr_index_t index;
+
+ if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
+ if (index >= MAX_IVARS) {
+ rb_raise(rb_eArgError, "too many instance variables");
+ }
- return val;
- }
+ populate_cache(index, shape, next_shape, id, iseq, ic, cc, is_attr);
+ }
+ else {
+ rb_bug("didn't find the id\n");
+ }
+
+ return val;
+ }
}
#endif
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
@@ -1250,39 +1340,99 @@ vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache
return vm_setivar_slowpath(obj, id, val, NULL, NULL, cc, true);
}
+NOINLINE(static VALUE vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t source_shape_id, shape_id_t dest_shape_id, attr_index_t index));
+static VALUE
+vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t source_shape_id, shape_id_t dest_shape_id, attr_index_t index)
+{
+#if SHAPE_IN_BASIC_FLAGS
+ shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
+#else
+ shape_id_t shape_id = rb_generic_shape_id(obj);
+#endif
+
+ // Cache hit case
+ if (shape_id == source_shape_id) {
+ RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
+
+ struct gen_ivtbl *ivtbl = 0;
+ if (dest_shape_id != shape_id) {
+ ivtbl = rb_ensure_generic_iv_list_size(obj, index + 1);
+#if SHAPE_IN_BASIC_FLAGS
+ RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
+#else
+ ivtbl->shape_id = dest_shape_id;
+#endif
+ RB_OBJ_WRITTEN(obj, Qundef, rb_shape_get_shape_by_id(dest_shape_id));
+ }
+ else {
+ // Just get the IV table
+ RUBY_ASSERT(GET_VM()->shape_list[dest_shape_id]);
+ rb_gen_ivtbl_get(obj, 0, &ivtbl);
+ }
+
+ VALUE *ptr = ivtbl->ivptr;
+
+ RB_OBJ_WRITE(obj, &ptr[index], val);
+
+ RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
+
+ return val;
+ }
+
+ return Qundef;
+}
+
static inline VALUE
-vm_setivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
+vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t source_shape_id, shape_id_t dest_shape_id, attr_index_t index)
{
#if OPT_IC_FOR_IVAR
- if (LIKELY(RB_TYPE_P(obj, T_OBJECT)) &&
- LIKELY(!RB_OBJ_FROZEN_RAW(obj))) {
+ switch (BUILTIN_TYPE(obj)) {
+ case T_OBJECT:
+ {
+ VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
+ // If object's shape id is the same as the source
+ // then do the shape transition and write the ivar
+ // If object's shape id is the same as the dest
+ // then write the ivar
+ shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
+
+ // Do we have a cache hit *and* is the CC intitialized
+ if (shape_id == source_shape_id) {
+ RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
+
+ VM_ASSERT(!rb_ractor_shareable_p(obj));
+
+ if (dest_shape_id != shape_id) {
+ if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
+ rb_init_iv_list(obj);
+ }
+ ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
+ }
+ else {
+ RUBY_ASSERT(GET_VM()->shape_list[dest_shape_id]);
+ }
+
+ RUBY_ASSERT(index < ROBJECT_NUMIV(obj));
- VM_ASSERT(!rb_ractor_shareable_p(obj));
+ VALUE *ptr = ROBJECT_IVPTR(obj);
- if (LIKELY(
- (!is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_serial, vm_ic_entry_p(ic) && ic->entry->class_serial == RCLASS_SERIAL(RBASIC(obj)->klass))) ||
- ( is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_unset, vm_cc_attr_index_p(cc))))) {
- uint32_t index = !is_attr ? vm_ic_entry_index(ic) : vm_cc_attr_index(cc);
+ RB_OBJ_WRITE(obj, &ptr[index], val);
- if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
- rb_init_iv_list(obj);
+ RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
+
+ return val;
+ }
}
- VALUE *ptr = ROBJECT_IVPTR(obj);
- RB_OBJ_WRITE(obj, &ptr[index], val);
- RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
- return val; /* inline cache hit */
- }
- }
- else {
+ break;
+ case T_CLASS:
+ case T_MODULE:
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
+ default:
+ break;
}
+
+ return Qundef;
#endif /* OPT_IC_FOR_IVAR */
- if (is_attr) {
- return vm_setivar_slowpath_attr(obj, id, val, cc);
- }
- else {
- return vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
- }
}
static VALUE
@@ -1377,7 +1527,22 @@ vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
static inline void
vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
{
- vm_setivar(obj, id, val, iseq, ic, 0, 0);
+ shape_id_t source_shape_id = vm_ic_attr_index_source_shape_id(ic);
+ attr_index_t index = vm_ic_attr_index(ic);
+ shape_id_t dest_shape_id = vm_ic_attr_index_dest_shape_id(ic);
+ if (UNLIKELY(vm_setivar(obj, id, val, source_shape_id, dest_shape_id, index) == Qundef)) {
+ switch (BUILTIN_TYPE(obj)) {
+ case T_OBJECT:
+ case T_CLASS:
+ case T_MODULE:
+ break;
+ default:
+ if (vm_setivar_default(obj, id, val, source_shape_id, dest_shape_id, index) != Qundef) {
+ return;
+ }
+ }
+ vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
+ }
}
void
@@ -1386,28 +1551,6 @@ rb_vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IV
vm_setinstancevariable(iseq, obj, id, val, ic);
}
-/* Set the instance variable +val+ on object +obj+ at the +index+.
- * This function only works with T_OBJECT objects, so make sure
- * +obj+ is of type T_OBJECT before using this function.
- */
-VALUE
-rb_vm_set_ivar_idx(VALUE obj, uint32_t index, VALUE val)
-{
- RUBY_ASSERT(RB_TYPE_P(obj, T_OBJECT));
-
- rb_check_frozen_internal(obj);
-
- VM_ASSERT(!rb_ractor_shareable_p(obj));
-
- if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
- rb_init_iv_list(obj);
- }
- VALUE *ptr = ROBJECT_IVPTR(obj);
- RB_OBJ_WRITE(obj, &ptr[index], val);
-
- return val;
-}
-
static VALUE
vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
{
@@ -3100,17 +3243,45 @@ vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_call
const struct rb_callcache *cc = calling->cc;
RB_DEBUG_COUNTER_INC(ccf_ivar);
cfp->sp -= 1;
- return vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
+ VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
+ return ivar;
}
static VALUE
-vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
+vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_callcache *cc, VALUE obj)
{
- const struct rb_callcache *cc = calling->cc;
RB_DEBUG_COUNTER_INC(ccf_attrset);
VALUE val = *(cfp->sp - 1);
cfp->sp -= 2;
- return vm_setivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, val, NULL, NULL, cc, 1);
+ shape_id_t source_shape_id = vm_cc_attr_index_source_shape_id(cc);
+ attr_index_t index = vm_cc_attr_index(cc);
+ shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
+ ID id = vm_cc_cme(cc)->def->body.attr.id;
+ rb_check_frozen_internal(obj);
+ VALUE res = vm_setivar(obj, id, val, source_shape_id, dest_shape_id, index);
+ if (res == Qundef) {
+ switch (BUILTIN_TYPE(obj)) {
+ case T_OBJECT:
+ case T_CLASS:
+ case T_MODULE:
+ break;
+ default:
+ {
+ res = vm_setivar_default(obj, id, val, source_shape_id, dest_shape_id, index);
+ if (res != Qundef) {
+ return res;
+ }
+ }
+ }
+ res = vm_setivar_slowpath_attr(obj, id, val, cc);
+ }
+ return res;
+}
+
+static VALUE
+vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
+{
+ return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
}
bool
@@ -3219,7 +3390,7 @@ vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_cal
{
calling->cc = &VM_CC_ON_STACK(Qundef,
vm_call_general,
- { 0 },
+ {{0}},
aliased_callable_method_entry(vm_cc_cme(calling->cc)));
return vm_call_method_each_type(ec, cfp, calling);
@@ -3389,7 +3560,7 @@ vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_
ec->method_missing_reason = reason;
calling->ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci));
- calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, { 0 },
+ calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
rb_callable_method_entry_without_refinements(CLASS_OF(calling->recv), idMethodMissing, NULL));
return vm_call_method(ec, reg_cfp, calling);
}
@@ -3415,7 +3586,7 @@ vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_ca
cme = refined_method_callable_without_refinement(cme);
}
- calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, { 0 }, cme);
+ calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
return vm_call_method_each_type(ec, cfp, calling);
}
@@ -3522,7 +3693,7 @@ search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struc
static VALUE
vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
- struct rb_callcache *ref_cc = &VM_CC_ON_STACK(Qundef, vm_call_general, { 0 },
+ struct rb_callcache *ref_cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
search_refined_method(ec, cfp, calling));
if (vm_cc_cme(ref_cc)) {
@@ -3702,18 +3873,45 @@ vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, st
CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
rb_check_arity(calling->argc, 1, 1);
- vm_cc_attr_index_initialize(cc);
+
const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
- VM_CALL_METHOD_ATTR(v,
- vm_call_attrset(ec, cfp, calling),
- CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
+
+ if (vm_cc_markable(cc)) {
+ vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
+ VM_CALL_METHOD_ATTR(v,
+ vm_call_attrset_direct(ec, cfp, cc, calling->recv),
+ CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
+ } else {
+ cc = &((struct rb_callcache) {
+ .flags = T_IMEMO |
+ (imemo_callcache << FL_USHIFT) |
+ VM_CALLCACHE_UNMARKABLE |
+ ((VALUE)INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT) |
+ VM_CALLCACHE_ON_STACK,
+ .klass = cc->klass,
+ .cme_ = cc->cme_,
+ .call_ = cc->call_,
+ .aux_ = {
+ .attr = {
+ .index = 0,
+ .dest_shape_id = INVALID_SHAPE_ID,
+ }
+ },
+ });
+
+ VM_CALL_METHOD_ATTR(v,
+ vm_call_attrset_direct(ec, cfp, cc, calling->recv),
+ CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
+ }
return v;
case VM_METHOD_TYPE_IVAR:
CALLER_SETUP_ARG(cfp, calling, ci);
CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
rb_check_arity(calling->argc, 0, 0);
- vm_cc_attr_index_initialize(cc);
+ if (vm_cc_markable(cc)) {
+ vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
+ }
const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
VM_CALL_METHOD_ATTR(v,
vm_call_ivar(ec, cfp, calling),
diff --git a/yjit/bindgen/src/main.rs b/yjit/bindgen/src/main.rs
index c3d4a39a2b..4b50d888de 100644
--- a/yjit/bindgen/src/main.rs
+++ b/yjit/bindgen/src/main.rs
@@ -40,6 +40,7 @@ fn main() {
.header("internal.h")
.header("internal/re.h")
.header("include/ruby/ruby.h")
+ .header("shape.h")
.header("vm_core.h")
.header("vm_callinfo.h")
@@ -81,6 +82,12 @@ fn main() {
// This function prints info about a value and is useful for debugging
.allowlist_function("rb_obj_info_dump")
+ // From shape.h
+ .allowlist_function("rb_shape_get_shape_id")
+ .allowlist_function("rb_shape_get_shape_by_id")
+ .allowlist_function("rb_shape_flags_mask")
+ .allowlist_function("rb_shape_get_iv_index")
+
// From ruby/internal/intern/object.h
.allowlist_function("rb_obj_is_kind_of")
diff --git a/yjit/src/asm/x86_64/mod.rs b/yjit/src/asm/x86_64/mod.rs
index d310e3bf12..42d97b7e80 100644
--- a/yjit/src/asm/x86_64/mod.rs
+++ b/yjit/src/asm/x86_64/mod.rs
@@ -617,7 +617,7 @@ fn write_rm_multi(cb: &mut CodeBlock, op_mem_reg8: u8, op_mem_reg_pref: u8, op_r
write_rm(cb, sz_pref, rex_w, X86Opnd::None, opnd0, op_ext_imm, &[op_mem_imm_lrg]);
cb.write_int(uimm.value, if opnd_size > 32 { 32 } else { opnd_size.into() });
} else {
- panic!("immediate value too large");
+ panic!("immediate value too large (num_bits={})", num_bits);
}
},
_ => unreachable!()
diff --git a/yjit/src/codegen.rs b/yjit/src/codegen.rs
index c246c7b48f..4018a314fc 100644
--- a/yjit/src/codegen.rs
+++ b/yjit/src/codegen.rs
@@ -1938,14 +1938,12 @@ fn gen_set_ivar(
let val_opnd = ctx.stack_pop(1);
let recv_opnd = ctx.stack_pop(1);
- let ivar_index: u32 = unsafe { rb_obj_ensure_iv_index_mapping(recv, ivar_name) };
-
- // Call rb_vm_set_ivar_idx with the receiver, the index of the ivar, and the value
+ // Call rb_vm_set_ivar_id with the receiver, the ivar name, and the value
let val = asm.ccall(
- rb_vm_set_ivar_idx as *const u8,
+ rb_vm_set_ivar_id as *const u8,
vec![
recv_opnd,
- Opnd::Imm(ivar_index.into()),
+ Opnd::UImm(ivar_name.into()),
val_opnd,
],
);
@@ -2023,81 +2021,82 @@ fn gen_get_ivar(
return EndBlock;
}
- // FIXME: Mapping the index could fail when there is too many ivar names. If we're
- // compiling for a branch stub that can cause the exception to be thrown from the
- // wrong PC.
- let ivar_index =
- unsafe { rb_obj_ensure_iv_index_mapping(comptime_receiver, ivar_name) }.as_usize();
+ let ivar_index = unsafe {
+ let shape_id = comptime_receiver.shape_of();
+ let shape = rb_shape_get_shape_by_id(shape_id);
+ let mut ivar_index: u32 = 0;
+ if rb_shape_get_iv_index(shape, ivar_name, &mut ivar_index) {
+ Some(ivar_index as usize)
+ } else {
+ None
+ }
+ };
+
+ // must be before stack_pop
+ let recv_type = ctx.get_opnd_type(recv_opnd);
+
+ // Upgrade type
+ if !recv_type.is_heap() {
+ ctx.upgrade_opnd_type(recv_opnd, Type::UnknownHeap);
+ }
// Pop receiver if it's on the temp stack
if recv_opnd != SelfOpnd {
ctx.stack_pop(1);
}
- if USE_RVARGC != 0 {
- // Check that the ivar table is big enough
- // Check that the slot is inside the ivar table (num_slots > index)
- let num_slots = Opnd::mem(32, recv, ROBJECT_OFFSET_NUMIV);
- asm.cmp(num_slots, Opnd::UImm(ivar_index as u64));
- asm.jbe(counted_exit!(ocb, side_exit, getivar_idx_out_of_range).into());
+ // Guard heap object
+ if !recv_type.is_heap() {
+ guard_object_is_heap(asm, recv, side_exit);
}
// Compile time self is embedded and the ivar index lands within the object
- let test_result = unsafe { FL_TEST_RAW(comptime_receiver, VALUE(ROBJECT_EMBED.as_usize())) != VALUE(0) };
- if test_result {
- // See ROBJECT_IVPTR() from include/ruby/internal/core/robject.h
+ let embed_test_result = unsafe { FL_TEST_RAW(comptime_receiver, VALUE(ROBJECT_EMBED.as_usize())) != VALUE(0) };
- // Guard that self is embedded
- // TODO: BT and JC is shorter
- asm.comment("guard embedded getivar");
- let flags_opnd = Opnd::mem(64, recv, RUBY_OFFSET_RBASIC_FLAGS);
- asm.test(flags_opnd, Opnd::UImm(ROBJECT_EMBED as u64));
- let side_exit = counted_exit!(ocb, side_exit, getivar_megamorphic);
- jit_chain_guard(
- JCC_JZ,
- jit,
- &starting_context,
- asm,
- ocb,
- max_chain_depth,
- side_exit,
- );
+ let flags_mask: usize = unsafe { rb_shape_flags_mask() }.as_usize();
+ let expected_flags_mask: usize = (RUBY_T_MASK as usize) | !flags_mask | (ROBJECT_EMBED as usize);
+ let expected_flags = comptime_receiver.builtin_flags() & expected_flags_mask;
+
+ // Combined guard for all flags: shape, embeddedness, and T_OBJECT
+ let flags_opnd = Opnd::mem(64, recv, RUBY_OFFSET_RBASIC_FLAGS);
+
+ asm.comment("guard shape, embedded, and T_OBJECT");
+ let flags_opnd = asm.and(flags_opnd, Opnd::UImm(expected_flags_mask as u64));
+ asm.cmp(flags_opnd, Opnd::UImm(expected_flags as u64));
+ jit_chain_guard(
+ JCC_JNE,
+ jit,
+ &starting_context,
+ asm,
+ ocb,
+ max_chain_depth,
+ side_exit,
+ );
+
+ // If there is no IVAR index, then the ivar was undefined
+ // when we entered the compiler. That means we can just return
+ // nil for this shape + iv name
+ if ivar_index.is_none() {
+ let out_opnd = ctx.stack_push(Type::Nil);
+ asm.mov(out_opnd, Qnil.into());
+ } else if embed_test_result {
+ // See ROBJECT_IVPTR() from include/ruby/internal/core/robject.h
// Load the variable
- let offs = ROBJECT_OFFSET_AS_ARY + (ivar_index * SIZEOF_VALUE) as i32;
+ let offs = ROBJECT_OFFSET_AS_ARY + (ivar_index.unwrap() * SIZEOF_VALUE) as i32;
let ivar_opnd = Opnd::mem(64, recv, offs);
- // Guard that the variable is not Qundef
- asm.cmp(ivar_opnd, Qundef.into());
- let out_val = asm.csel_e(Qnil.into(), ivar_opnd);
-
// Push the ivar on the stack
let out_opnd = ctx.stack_push(Type::Unknown);
- asm.mov(out_opnd, out_val);
+ asm.mov(out_opnd, ivar_opnd);
} else {
// Compile time value is *not* embedded.
- // Guard that value is *not* embedded
- // See ROBJECT_IVPTR() from include/ruby/internal/core/robject.h
- asm.comment("guard extended getivar");
- let flags_opnd = Opnd::mem(64, recv, RUBY_OFFSET_RBASIC_FLAGS);
- asm.test(flags_opnd, Opnd::UImm(ROBJECT_EMBED as u64));
- let megamorphic_side_exit = counted_exit!(ocb, side_exit, getivar_megamorphic);
- jit_chain_guard(
- JCC_JNZ,
- jit,
- &starting_context,
- asm,
- ocb,
- max_chain_depth,
- megamorphic_side_exit,
- );
-
if USE_RVARGC == 0 {
// Check that the extended table is big enough
// Check that the slot is inside the extended table (num_slots > index)
let num_slots = Opnd::mem(32, recv, ROBJECT_OFFSET_NUMIV);
- asm.cmp(num_slots, Opnd::UImm(ivar_index as u64));
+ asm.cmp(num_slots, Opnd::UImm(ivar_index.unwrap() as u64));
asm.jbe(counted_exit!(ocb, side_exit, getivar_idx_out_of_range).into());
}
@@ -2105,15 +2104,10 @@ fn gen_get_ivar(
let tbl_opnd = asm.load(Opnd::mem(64, recv, ROBJECT_OFFSET_AS_HEAP_IVPTR));
// Read the ivar from the extended table
- let ivar_opnd = Opnd::mem(64, tbl_opnd, (SIZEOF_VALUE * ivar_index) as i32);
-
- // Check that the ivar is not Qundef
- asm.cmp(ivar_opnd, Qundef.into());
- let out_val = asm.csel_ne(ivar_opnd, Qnil.into());
+ let ivar_opnd = Opnd::mem(64, tbl_opnd, (SIZEOF_VALUE * ivar_index.unwrap()) as i32);
- // Push the ivar on the stack
let out_opnd = ctx.stack_push(Type::Unknown);
- asm.mov(out_opnd, out_val);
+ asm.mov(out_opnd, ivar_opnd);
}
// Jump to next instruction. This allows guard chains to share the same successor.
@@ -2136,25 +2130,12 @@ fn gen_getinstancevariable(
let ivar_name = jit_get_arg(jit, 0).as_u64();
let comptime_val = jit_peek_at_self(jit);
- let comptime_val_klass = comptime_val.class_of();
// Generate a side exit
let side_exit = get_side_exit(jit, ocb, ctx);
// Guard that the receiver has the same class as the one from compile time.
let self_asm_opnd = Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SELF);
- jit_guard_known_klass(
- jit,
- ctx,
- asm,
- ocb,
- comptime_val_klass,
- self_asm_opnd,
- SelfOpnd,
- comptime_val,
- GET_IVAR_MAX_DEPTH,
- side_exit,
- );
gen_get_ivar(
jit,
diff --git a/yjit/src/cruby.rs b/yjit/src/cruby.rs
index 25149ab730..65f398f075 100644
--- a/yjit/src/cruby.rs
+++ b/yjit/src/cruby.rs
@@ -120,7 +120,7 @@ extern "C" {
obj: VALUE,
v: VALUE,
) -> bool;
- pub fn rb_vm_set_ivar_idx(obj: VALUE, idx: u32, val: VALUE) -> VALUE;
+ pub fn rb_vm_set_ivar_id(obj: VALUE, idx: u32, val: VALUE) -> VALUE;
pub fn rb_vm_setinstancevariable(iseq: IseqPtr, obj: VALUE, id: ID, val: VALUE, ic: IVC);
pub fn rb_aliased_callable_method_entry(
me: *const rb_callable_method_entry_t,
@@ -354,18 +354,26 @@ impl VALUE {
/// Read the flags bits from the RBasic object, then return a Ruby type enum (e.g. RUBY_T_ARRAY)
pub fn builtin_type(self) -> ruby_value_type {
+ (self.builtin_flags() & (RUBY_T_MASK as usize)) as ruby_value_type
+ }
+
+ pub fn builtin_flags(self) -> usize {
assert!(!self.special_const_p());
let VALUE(cval) = self;
let rbasic_ptr = cval as *const RBasic;
let flags_bits: usize = unsafe { (*rbasic_ptr).flags }.as_usize();
- (flags_bits & (RUBY_T_MASK as usize)) as ruby_value_type
+ return flags_bits;
}
pub fn class_of(self) -> VALUE {
unsafe { CLASS_OF(self) }
}
+ pub fn shape_of(self) -> u32 {
+ unsafe { rb_shape_get_shape_id(self) }
+ }
+
pub fn as_isize(self) -> isize {
let VALUE(is) = self;
is as isize
diff --git a/yjit/src/cruby_bindings.inc.rs b/yjit/src/cruby_bindings.inc.rs
index f58bf1ca05..84a778cbc5 100644
--- a/yjit/src/cruby_bindings.inc.rs
+++ b/yjit/src/cruby_bindings.inc.rs
@@ -269,6 +269,30 @@ extern "C" {
extern "C" {
pub fn rb_reg_new_ary(ary: VALUE, options: ::std::os::raw::c_int) -> VALUE;
}
+pub type attr_index_t = u32;
+pub type shape_id_t = u32;
+#[repr(C)]
+pub struct rb_shape {
+ pub flags: VALUE,
+ pub parent: *mut rb_shape,
+ pub edges: *mut rb_id_table,
+ pub edge_name: ID,
+ pub iv_count: attr_index_t,
+ pub type_: u8,
+}
+pub type rb_shape_t = rb_shape;
+extern "C" {
+ pub fn rb_shape_get_shape_by_id(shape_id: shape_id_t) -> *mut rb_shape_t;
+}
+extern "C" {
+ pub fn rb_shape_get_shape_id(obj: VALUE) -> shape_id_t;
+}
+extern "C" {
+ pub fn rb_shape_get_iv_index(shape: *mut rb_shape_t, id: ID, value: *mut attr_index_t) -> bool;
+}
+extern "C" {
+ pub fn rb_shape_flags_mask() -> VALUE;
+}
pub const idDot2: ruby_method_ids = 128;
pub const idDot3: ruby_method_ids = 129;
pub const idUPlus: ruby_method_ids = 132;
@@ -513,6 +537,7 @@ pub const imemo_parser_strterm: imemo_type = 10;
pub const imemo_callinfo: imemo_type = 11;
pub const imemo_callcache: imemo_type = 12;
pub const imemo_constcache: imemo_type = 13;
+pub const imemo_shape: imemo_type = 14;
pub type imemo_type = u32;
pub const METHOD_VISI_UNDEF: rb_method_visibility_t = 0;
pub const METHOD_VISI_PUBLIC: rb_method_visibility_t = 1;
@@ -572,6 +597,11 @@ pub const OPTIMIZED_METHOD_TYPE_STRUCT_AREF: method_optimized_type = 3;
pub const OPTIMIZED_METHOD_TYPE_STRUCT_ASET: method_optimized_type = 4;
pub const OPTIMIZED_METHOD_TYPE__MAX: method_optimized_type = 5;
pub type method_optimized_type = u32;
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct rb_id_table {
+ _unused: [u8; 0],
+}
extern "C" {
pub fn rb_method_entry_at(obj: VALUE, id: ID) -> *const rb_method_entry_t;
}
@@ -600,9 +630,10 @@ pub struct iseq_inline_constant_cache {
pub segments: *const ID,
}
#[repr(C)]
-#[derive(Debug, Copy, Clone)]
pub struct iseq_inline_iv_cache_entry {
- pub entry: *mut rb_iv_index_tbl_entry,
+ pub source_shape_id: shape_id_t,
+ pub dest_shape_id: shape_id_t,
+ pub attr_index: attr_index_t,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
@@ -698,12 +729,6 @@ extern "C" {
) -> *const rb_callable_method_entry_t;
}
#[repr(C)]
-pub struct rb_iv_index_tbl_entry {
- pub index: u32,
- pub class_serial: rb_serial_t,
- pub class_value: VALUE,
-}
-#[repr(C)]
pub struct rb_cvar_class_tbl_entry {
pub index: u32,
pub global_cvar_state: rb_serial_t,