summaryrefslogtreecommitdiff
path: root/shape.h
diff options
context:
space:
mode:
Diffstat (limited to 'shape.h')
-rw-r--r--shape.h499
1 files changed, 376 insertions, 123 deletions
diff --git a/shape.h b/shape.h
index 60084a4aff..96c78f2bc1 100644
--- a/shape.h
+++ b/shape.h
@@ -2,230 +2,483 @@
#define RUBY_SHAPE_H
#include "internal/gc.h"
+#include "internal/struct.h"
-#if (SIZEOF_UINT64_T == SIZEOF_VALUE)
-#define SIZEOF_SHAPE_T 4
-#define SHAPE_IN_BASIC_FLAGS 1
-typedef uint32_t attr_index_t;
-#else
-#define SIZEOF_SHAPE_T 2
-#define SHAPE_IN_BASIC_FLAGS 0
typedef uint16_t attr_index_t;
-#endif
+typedef uint32_t shape_id_t;
+#define SHAPE_ID_NUM_BITS 32
+#define SHAPE_ID_OFFSET_NUM_BITS 19
-#define MAX_IVARS (attr_index_t)(-1)
+STATIC_ASSERT(shape_id_num_bits, SHAPE_ID_NUM_BITS == sizeof(shape_id_t) * CHAR_BIT);
-#if SIZEOF_SHAPE_T == 4
-typedef uint32_t shape_id_t;
-# define SHAPE_ID_NUM_BITS 32
-#else
-typedef uint16_t shape_id_t;
-# define SHAPE_ID_NUM_BITS 16
-#endif
+#define SHAPE_BUFFER_SIZE (1 << SHAPE_ID_OFFSET_NUM_BITS)
+#define SHAPE_ID_OFFSET_MASK (SHAPE_BUFFER_SIZE - 1)
+
+#define SHAPE_ID_HEAP_INDEX_BITS 3
+#define SHAPE_ID_HEAP_INDEX_MAX ((1 << SHAPE_ID_HEAP_INDEX_BITS) - 1)
+
+#define SHAPE_ID_FL_USHIFT (SHAPE_ID_OFFSET_NUM_BITS + SHAPE_ID_HEAP_INDEX_BITS)
+#define SHAPE_ID_HEAP_INDEX_OFFSET SHAPE_ID_FL_USHIFT
+
+// shape_id_t bits:
+// 0-18 SHAPE_ID_OFFSET_MASK
+// index in rb_shape_tree.shape_list. Allow to access `rb_shape_t *`.
+// 19-21 SHAPE_ID_HEAP_INDEX_MASK
+// index in rb_shape_tree.capacities. Allow to access slot size.
+// Always 0 except for T_OBJECT.
+// 22 SHAPE_ID_FL_FROZEN
+// Whether the object is frozen or not.
+// 23 SHAPE_ID_FL_HAS_OBJECT_ID
+// Whether the object has an `SHAPE_OBJ_ID` transition.
+// 24 SHAPE_ID_FL_TOO_COMPLEX
+// The object is backed by a `st_table`.
+
+enum shape_id_fl_type {
+#define RBIMPL_SHAPE_ID_FL(n) (1<<(SHAPE_ID_FL_USHIFT+n))
+
+ SHAPE_ID_HEAP_INDEX_MASK = RBIMPL_SHAPE_ID_FL(0) | RBIMPL_SHAPE_ID_FL(1) | RBIMPL_SHAPE_ID_FL(2),
+
+ SHAPE_ID_FL_FROZEN = RBIMPL_SHAPE_ID_FL(3),
+ SHAPE_ID_FL_HAS_OBJECT_ID = RBIMPL_SHAPE_ID_FL(4),
+ SHAPE_ID_FL_TOO_COMPLEX = RBIMPL_SHAPE_ID_FL(5),
+
+ SHAPE_ID_FL_NON_CANONICAL_MASK = SHAPE_ID_FL_FROZEN | SHAPE_ID_FL_HAS_OBJECT_ID,
+ SHAPE_ID_FLAGS_MASK = SHAPE_ID_HEAP_INDEX_MASK | SHAPE_ID_FL_NON_CANONICAL_MASK | SHAPE_ID_FL_TOO_COMPLEX,
+
+#undef RBIMPL_SHAPE_ID_FL
+};
+
+// This mask allows to check if a shape_id contains any ivar.
+// It relies on ROOT_SHAPE_WITH_OBJ_ID==1.
+enum shape_id_mask {
+ SHAPE_ID_HAS_IVAR_MASK = SHAPE_ID_FL_TOO_COMPLEX | (SHAPE_ID_OFFSET_MASK - 1),
+};
+
+// The interpreter doesn't care about frozen status or slot size when reading ivars.
+// So we normalize shape_id by clearing these bits to improve cache hits.
+// JITs however might care about it.
+#define SHAPE_ID_READ_ONLY_MASK (~(SHAPE_ID_FL_FROZEN | SHAPE_ID_HEAP_INDEX_MASK))
-# define SHAPE_MASK (((uintptr_t)1 << SHAPE_ID_NUM_BITS) - 1)
-# define SHAPE_FLAG_MASK (((VALUE)-1) >> SHAPE_ID_NUM_BITS)
+typedef uint32_t redblack_id_t;
-# define SHAPE_FLAG_SHIFT ((SIZEOF_VALUE * 8) - SHAPE_ID_NUM_BITS)
+#define SHAPE_MAX_FIELDS (attr_index_t)(-1)
+#define SHAPE_FLAG_SHIFT ((SIZEOF_VALUE * CHAR_BIT) - SHAPE_ID_NUM_BITS)
+#define SHAPE_FLAG_MASK (((VALUE)-1) >> SHAPE_ID_NUM_BITS)
-# define SHAPE_BITMAP_SIZE 16384
+#define SHAPE_MAX_VARIATIONS 8
-# define SHAPE_MAX_VARIATIONS 8
+#define INVALID_SHAPE_ID ((shape_id_t)-1)
+#define ATTR_INDEX_NOT_SET ((attr_index_t)-1)
-# define MAX_SHAPE_ID (SHAPE_MASK - 1)
-# define INVALID_SHAPE_ID SHAPE_MASK
-# define ROOT_SHAPE_ID 0x0
+#define ROOT_SHAPE_ID 0x0
+#define ROOT_SHAPE_WITH_OBJ_ID 0x1
+#define ROOT_TOO_COMPLEX_SHAPE_ID (ROOT_SHAPE_ID | SHAPE_ID_FL_TOO_COMPLEX)
+#define ROOT_TOO_COMPLEX_WITH_OBJ_ID (ROOT_SHAPE_WITH_OBJ_ID | SHAPE_ID_FL_TOO_COMPLEX | SHAPE_ID_FL_HAS_OBJECT_ID)
-# define SPECIAL_CONST_SHAPE_ID (SIZE_POOL_COUNT * 2)
-# define OBJ_TOO_COMPLEX_SHAPE_ID (SPECIAL_CONST_SHAPE_ID + 1)
+typedef struct redblack_node redblack_node_t;
struct rb_shape {
- struct rb_id_table * edges; // id_table from ID (ivar) to next shape
+ VALUE edges; // id_table from ID (ivar) to next shape
ID edge_name; // ID (ivar) for transition from parent to rb_shape
- attr_index_t next_iv_index;
- uint32_t capacity; // Total capacity of the object with this shape
- uint8_t type;
- uint8_t size_pool_index;
+ redblack_node_t *ancestor_index;
shape_id_t parent_id;
+ attr_index_t next_field_index; // Fields are either ivars or internal properties like `object_id`
+ attr_index_t capacity; // Total capacity of the object with this shape
+ uint8_t type;
};
typedef struct rb_shape rb_shape_t;
+struct redblack_node {
+ ID key;
+ rb_shape_t *value;
+ redblack_id_t l;
+ redblack_id_t r;
+};
+
enum shape_type {
SHAPE_ROOT,
SHAPE_IVAR,
- SHAPE_FROZEN,
- SHAPE_CAPACITY_CHANGE,
- SHAPE_INITIAL_CAPACITY,
- SHAPE_T_OBJECT,
- SHAPE_OBJ_TOO_COMPLEX,
+ SHAPE_OBJ_ID,
+};
+
+enum shape_flags {
+ SHAPE_FL_FROZEN = 1 << 0,
+ SHAPE_FL_HAS_OBJECT_ID = 1 << 1,
+ SHAPE_FL_TOO_COMPLEX = 1 << 2,
+
+ SHAPE_FL_NON_CANONICAL_MASK = SHAPE_FL_FROZEN | SHAPE_FL_HAS_OBJECT_ID,
+};
+
+typedef struct {
+ /* object shapes */
+ rb_shape_t *shape_list;
+ rb_shape_t *root_shape;
+ const attr_index_t *capacities;
+ rb_atomic_t next_shape_id;
+
+ redblack_node_t *shape_cache;
+ unsigned int cache_size;
+} rb_shape_tree_t;
+
+RUBY_SYMBOL_EXPORT_BEGIN
+RUBY_EXTERN rb_shape_tree_t rb_shape_tree;
+RUBY_SYMBOL_EXPORT_END
+
+static inline shape_id_t
+rb_shapes_count(void)
+{
+ return (shape_id_t)RUBY_ATOMIC_LOAD(rb_shape_tree.next_shape_id);
+}
+
+union rb_attr_index_cache {
+ uint64_t pack;
+ struct {
+ shape_id_t shape_id;
+ attr_index_t index;
+ } unpack;
};
-#if SHAPE_IN_BASIC_FLAGS
static inline shape_id_t
RBASIC_SHAPE_ID(VALUE obj)
{
RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
- return (shape_id_t)(SHAPE_MASK & ((RBASIC(obj)->flags) >> SHAPE_FLAG_SHIFT));
+ RUBY_ASSERT(!RB_TYPE_P(obj, T_IMEMO) || IMEMO_TYPE_P(obj, imemo_fields));
+#if RBASIC_SHAPE_ID_FIELD
+ return (shape_id_t)((RBASIC(obj)->shape_id));
+#else
+ return (shape_id_t)((RBASIC(obj)->flags) >> SHAPE_FLAG_SHIFT);
+#endif
}
+// Same as RBASIC_SHAPE_ID but with flags that have no impact
+// on reads removed. e.g. Remove FL_FROZEN.
+static inline shape_id_t
+RBASIC_SHAPE_ID_FOR_READ(VALUE obj)
+{
+ return RBASIC_SHAPE_ID(obj) & SHAPE_ID_READ_ONLY_MASK;
+}
+
+#if RUBY_DEBUG
+bool rb_shape_verify_consistency(VALUE obj, shape_id_t shape_id);
+#endif
+
static inline void
-RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
+RBASIC_SET_SHAPE_ID_NO_CHECKS(VALUE obj, shape_id_t shape_id)
{
- // Ractors are occupying the upper 32 bits of flags, but only in debug mode
+#if RBASIC_SHAPE_ID_FIELD
+ RBASIC(obj)->shape_id = (VALUE)shape_id;
+#else
// Object shapes are occupying top bits
RBASIC(obj)->flags &= SHAPE_FLAG_MASK;
RBASIC(obj)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT);
+#endif
}
-static inline shape_id_t
-ROBJECT_SHAPE_ID(VALUE obj)
+static inline void
+RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
{
- RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
- return RBASIC_SHAPE_ID(obj);
+ RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
+ RUBY_ASSERT(!RB_TYPE_P(obj, T_IMEMO) || IMEMO_TYPE_P(obj, imemo_fields));
+
+ RBASIC_SET_SHAPE_ID_NO_CHECKS(obj, shape_id);
+
+ RUBY_ASSERT(rb_shape_verify_consistency(obj, shape_id));
}
+void rb_set_boxed_class_shape_id(VALUE obj, shape_id_t shape_id);
+
static inline void
-ROBJECT_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
+RB_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
{
- RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
- RBASIC_SET_SHAPE_ID(obj, shape_id);
+ switch (BUILTIN_TYPE(obj)) {
+ case T_CLASS:
+ case T_MODULE:
+ rb_set_boxed_class_shape_id(obj, shape_id);
+ break;
+ default:
+ RBASIC_SET_SHAPE_ID(obj, shape_id);
+ break;
+ }
}
-static inline shape_id_t
-RCLASS_SHAPE_ID(VALUE obj)
+static inline rb_shape_t *
+RSHAPE(shape_id_t shape_id)
{
- RUBY_ASSERT(RB_TYPE_P(obj, T_CLASS) || RB_TYPE_P(obj, T_MODULE));
- return RBASIC_SHAPE_ID(obj);
+ uint32_t offset = (shape_id & SHAPE_ID_OFFSET_MASK);
+ RUBY_ASSERT(offset != INVALID_SHAPE_ID);
+
+ return &rb_shape_tree.shape_list[offset];
}
-#else
+int32_t rb_shape_id_offset(void);
+
+RUBY_FUNC_EXPORTED shape_id_t rb_obj_shape_id(VALUE obj);
+shape_id_t rb_shape_get_next_iv_shape(shape_id_t shape_id, ID id);
+bool rb_shape_get_iv_index(shape_id_t shape_id, ID id, attr_index_t *value);
+bool rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint);
+bool rb_shape_find_ivar(shape_id_t shape_id, ID id, shape_id_t *ivar_shape);
+
+typedef int rb_shape_foreach_transition_callback(shape_id_t shape_id, void *data);
+bool rb_shape_foreach_field(shape_id_t shape_id, rb_shape_foreach_transition_callback func, void *data);
+
+shape_id_t rb_shape_transition_frozen(VALUE obj);
+shape_id_t rb_shape_transition_complex(VALUE obj);
+shape_id_t rb_shape_transition_remove_ivar(VALUE obj, ID id, shape_id_t *removed_shape_id);
+shape_id_t rb_shape_transition_add_ivar(VALUE obj, ID id);
+shape_id_t rb_shape_transition_add_ivar_no_warnings(VALUE klass, shape_id_t original_shape_id, ID id);
+shape_id_t rb_shape_transition_object_id(VALUE obj);
+shape_id_t rb_shape_transition_heap(VALUE obj, size_t heap_index);
+shape_id_t rb_shape_object_id(shape_id_t original_shape_id);
+
+void rb_shape_free_all(void);
+
+shape_id_t rb_shape_rebuild(shape_id_t initial_shape_id, shape_id_t dest_shape_id);
+void rb_shape_copy_fields(VALUE dest, VALUE *dest_buf, shape_id_t dest_shape_id, VALUE *src_buf, shape_id_t src_shape_id);
+void rb_shape_copy_complex_ivars(VALUE dest, VALUE obj, shape_id_t src_shape_id, st_table *fields_table);
+
+static inline bool
+rb_shape_too_complex_p(shape_id_t shape_id)
+{
+ return shape_id & SHAPE_ID_FL_TOO_COMPLEX;
+}
+
+static inline bool
+rb_shape_obj_too_complex_p(VALUE obj)
+{
+ return !RB_SPECIAL_CONST_P(obj) && rb_shape_too_complex_p(RBASIC_SHAPE_ID(obj));
+}
+
+static inline bool
+rb_shape_has_object_id(shape_id_t shape_id)
+{
+ return shape_id & SHAPE_ID_FL_HAS_OBJECT_ID;
+}
+
+static inline bool
+rb_shape_canonical_p(shape_id_t shape_id)
+{
+ return !(shape_id & SHAPE_ID_FL_NON_CANONICAL_MASK);
+}
+
+static inline uint8_t
+rb_shape_heap_index(shape_id_t shape_id)
+{
+ return (uint8_t)((shape_id & SHAPE_ID_HEAP_INDEX_MASK) >> SHAPE_ID_HEAP_INDEX_OFFSET);
+}
static inline shape_id_t
-ROBJECT_SHAPE_ID(VALUE obj)
+rb_shape_root(size_t heap_id)
{
- RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
- return (shape_id_t)(SHAPE_MASK & (RBASIC(obj)->flags >> SHAPE_FLAG_SHIFT));
+ shape_id_t heap_index = (shape_id_t)(heap_id + 1);
+ shape_id_t heap_flags = heap_index << SHAPE_ID_HEAP_INDEX_OFFSET;
+
+ RUBY_ASSERT((heap_flags & SHAPE_ID_HEAP_INDEX_MASK) == heap_flags);
+ RUBY_ASSERT(rb_shape_heap_index(heap_flags) == heap_index);
+
+ return ROOT_SHAPE_ID | heap_flags;
}
-static inline void
-ROBJECT_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
+static inline shape_id_t
+RSHAPE_PARENT_RAW_ID(shape_id_t shape_id)
{
- RBASIC(obj)->flags &= SHAPE_FLAG_MASK;
- RBASIC(obj)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT);
+ return RSHAPE(shape_id)->parent_id;
}
-MJIT_SYMBOL_EXPORT_BEGIN
-shape_id_t rb_rclass_shape_id(VALUE obj);
-MJIT_SYMBOL_EXPORT_END
+static inline bool
+RSHAPE_DIRECT_CHILD_P(shape_id_t parent_id, shape_id_t child_id)
+{
+ return (parent_id & SHAPE_ID_FLAGS_MASK) == (child_id & SHAPE_ID_FLAGS_MASK) &&
+ RSHAPE(child_id)->parent_id == (parent_id & SHAPE_ID_OFFSET_MASK);
+}
-static inline shape_id_t RCLASS_SHAPE_ID(VALUE obj)
+static inline enum shape_type
+RSHAPE_TYPE(shape_id_t shape_id)
{
- return rb_rclass_shape_id(obj);
+ return RSHAPE(shape_id)->type;
}
-#endif
+static inline bool
+RSHAPE_TYPE_P(shape_id_t shape_id, enum shape_type type)
+{
+ return RSHAPE_TYPE(shape_id) == type;
+}
-bool rb_shape_root_shape_p(rb_shape_t* shape);
-rb_shape_t * rb_shape_get_root_shape(void);
-uint8_t rb_shape_id_num_bits(void);
-int32_t rb_shape_id_offset(void);
+static inline attr_index_t
+RSHAPE_EMBEDDED_CAPACITY(shape_id_t shape_id)
+{
+ uint8_t heap_index = rb_shape_heap_index(shape_id);
+ if (heap_index) {
+ return rb_shape_tree.capacities[heap_index - 1];
+ }
+ return 0;
+}
-rb_shape_t* rb_shape_get_shape_by_id_without_assertion(shape_id_t shape_id);
-rb_shape_t * rb_shape_get_parent(rb_shape_t * shape);
+static inline attr_index_t
+RSHAPE_CAPACITY(shape_id_t shape_id)
+{
+ attr_index_t embedded_capacity = RSHAPE_EMBEDDED_CAPACITY(shape_id);
-MJIT_SYMBOL_EXPORT_BEGIN
-rb_shape_t* rb_shape_get_shape_by_id(shape_id_t shape_id);
-shape_id_t rb_shape_get_shape_id(VALUE obj);
-rb_shape_t * rb_shape_get_next_iv_shape(rb_shape_t * shape, ID id);
-bool rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t * value);
-bool rb_shape_obj_too_complex(VALUE obj);
-MJIT_SYMBOL_EXPORT_END
+ if (embedded_capacity > RSHAPE(shape_id)->capacity) {
+ return embedded_capacity;
+ }
+ else {
+ return RSHAPE(shape_id)->capacity;
+ }
+}
-void rb_shape_set_shape(VALUE obj, rb_shape_t* shape);
-rb_shape_t* rb_shape_get_shape(VALUE obj);
-int rb_shape_frozen_shape_p(rb_shape_t* shape);
-void rb_shape_transition_shape_frozen(VALUE obj);
-void rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape, VALUE * removed);
-rb_shape_t * rb_shape_transition_shape_capa(rb_shape_t * shape, uint32_t new_capacity);
-rb_shape_t* rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id);
+static inline attr_index_t
+RSHAPE_LEN(shape_id_t shape_id)
+{
+ return RSHAPE(shape_id)->next_field_index;
+}
-rb_shape_t * rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape);
+static inline attr_index_t
+RSHAPE_INDEX(shape_id_t shape_id)
+{
+ RUBY_ASSERT(RSHAPE_LEN(shape_id) > 0);
+ return RSHAPE_LEN(shape_id) - 1;
+}
+
+static inline ID
+RSHAPE_EDGE_NAME(shape_id_t shape_id)
+{
+ return RSHAPE(shape_id)->edge_name;
+}
static inline uint32_t
-ROBJECT_IV_CAPACITY(VALUE obj)
+ROBJECT_FIELDS_CAPACITY(VALUE obj)
{
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
// Asking for capacity doesn't make sense when the object is using
// a hash table for storing instance variables
- RUBY_ASSERT(ROBJECT_SHAPE_ID(obj) != OBJ_TOO_COMPLEX_SHAPE_ID);
- return rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj))->capacity;
+ RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));
+ return RSHAPE_CAPACITY(RBASIC_SHAPE_ID(obj));
}
-static inline struct rb_id_table *
-ROBJECT_IV_HASH(VALUE obj)
+static inline st_table *
+ROBJECT_FIELDS_HASH(VALUE obj)
{
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
- RUBY_ASSERT(ROBJECT_SHAPE_ID(obj) == OBJ_TOO_COMPLEX_SHAPE_ID);
- return (struct rb_id_table *)ROBJECT(obj)->as.heap.ivptr;
+ RUBY_ASSERT(rb_shape_obj_too_complex_p(obj));
+ RUBY_ASSERT(FL_TEST_RAW(obj, ROBJECT_HEAP));
+
+ return (st_table *)ROBJECT(obj)->as.heap.fields;
}
static inline void
-ROBJECT_SET_IV_HASH(VALUE obj, const struct rb_id_table *tbl)
+ROBJECT_SET_FIELDS_HASH(VALUE obj, const st_table *tbl)
{
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
- RUBY_ASSERT(ROBJECT_SHAPE_ID(obj) == OBJ_TOO_COMPLEX_SHAPE_ID);
- ROBJECT(obj)->as.heap.ivptr = (VALUE *)tbl;
+ RUBY_ASSERT(rb_shape_obj_too_complex_p(obj));
+ RUBY_ASSERT(FL_TEST_RAW(obj, ROBJECT_HEAP));
+
+ ROBJECT(obj)->as.heap.fields = (VALUE *)tbl;
}
-size_t rb_id_table_size(const struct rb_id_table *tbl);
+static inline uint32_t
+ROBJECT_FIELDS_COUNT_COMPLEX(VALUE obj)
+{
+ return (uint32_t)rb_st_table_size(ROBJECT_FIELDS_HASH(obj));
+}
static inline uint32_t
-ROBJECT_IV_COUNT(VALUE obj)
+ROBJECT_FIELDS_COUNT_NOT_COMPLEX(VALUE obj)
{
- if (ROBJECT_SHAPE_ID(obj) == OBJ_TOO_COMPLEX_SHAPE_ID) {
- return (uint32_t)rb_id_table_size(ROBJECT_IV_HASH(obj));
+ RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
+ RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));
+ return RSHAPE(RBASIC_SHAPE_ID(obj))->next_field_index;
+}
+
+static inline uint32_t
+ROBJECT_FIELDS_COUNT(VALUE obj)
+{
+ if (rb_shape_obj_too_complex_p(obj)) {
+ return ROBJECT_FIELDS_COUNT_COMPLEX(obj);
}
else {
- RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
- RUBY_ASSERT(ROBJECT_SHAPE_ID(obj) != OBJ_TOO_COMPLEX_SHAPE_ID);
- return rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj))->next_iv_index;
+ return ROBJECT_FIELDS_COUNT_NOT_COMPLEX(obj);
}
}
static inline uint32_t
-RBASIC_IV_COUNT(VALUE obj)
+RBASIC_FIELDS_COUNT(VALUE obj)
{
- return rb_shape_get_shape_by_id(rb_shape_get_shape_id(obj))->next_iv_index;
+ return RSHAPE(RBASIC_SHAPE_ID(obj))->next_field_index;
}
-static inline uint32_t
-RCLASS_IV_COUNT(VALUE obj)
+static inline bool
+rb_shape_obj_has_id(VALUE obj)
+{
+ return rb_shape_has_object_id(RBASIC_SHAPE_ID(obj));
+}
+
+static inline bool
+rb_shape_has_ivars(shape_id_t shape_id)
{
- RUBY_ASSERT(RB_TYPE_P(obj, RUBY_T_CLASS) || RB_TYPE_P(obj, RUBY_T_MODULE));
- uint32_t ivc = rb_shape_get_shape_by_id(RCLASS_SHAPE_ID(obj))->next_iv_index;
- return ivc;
+ return shape_id & SHAPE_ID_HAS_IVAR_MASK;
}
-rb_shape_t * rb_shape_alloc(ID edge_name, rb_shape_t * parent);
-rb_shape_t * rb_shape_alloc_with_size_pool_index(ID edge_name, rb_shape_t * parent, uint8_t size_pool_index);
-rb_shape_t * rb_shape_alloc_with_parent_id(ID edge_name, shape_id_t parent_id);
+static inline bool
+rb_shape_obj_has_ivars(VALUE obj)
+{
+ return rb_shape_has_ivars(RBASIC_SHAPE_ID(obj));
+}
-rb_shape_t *rb_shape_traverse_from_new_root(rb_shape_t *initial_shape, rb_shape_t *orig_shape);
+static inline bool
+rb_shape_has_fields(shape_id_t shape_id)
+{
+ return shape_id & (SHAPE_ID_OFFSET_MASK | SHAPE_ID_FL_TOO_COMPLEX);
+}
-bool rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id);
+static inline bool
+rb_shape_obj_has_fields(VALUE obj)
+{
+ return rb_shape_has_fields(RBASIC_SHAPE_ID(obj));
+}
-VALUE rb_obj_debug_shape(VALUE self, VALUE obj);
-void rb_shape_set_too_complex(VALUE obj);
+static inline bool
+rb_obj_gen_fields_p(VALUE obj)
+{
+ switch (TYPE(obj)) {
+ case T_NONE:
+ case T_OBJECT:
+ case T_CLASS:
+ case T_MODULE:
+ case T_IMEMO:
+ return false;
+ default:
+ break;
+ }
+ return rb_shape_obj_has_fields(obj);
+}
+
+static inline bool
+rb_obj_using_gen_fields_table_p(VALUE obj)
+{
+ switch (BUILTIN_TYPE(obj)) {
+ case T_DATA:
+ if (RTYPEDDATA_P(obj)) return false;
+ break;
+
+ case T_STRUCT:
+ if (!FL_TEST_RAW(obj, RSTRUCT_GEN_FIELDS)) return false;
+ break;
+
+ default:
+ break;
+ }
+
+ return rb_obj_gen_fields_p(obj);
+}
// For ext/objspace
RUBY_SYMBOL_EXPORT_BEGIN
-typedef void each_shape_callback(rb_shape_t * shape, void *data);
-void rb_shape_each_shape(each_shape_callback callback, void *data);
-size_t rb_shape_memsize(rb_shape_t *shape);
-size_t rb_shape_edges_count(rb_shape_t *shape);
-size_t rb_shape_depth(rb_shape_t *shape);
-shape_id_t rb_shape_id(rb_shape_t * shape);
+typedef void each_shape_callback(shape_id_t shape_id, void *data);
+void rb_shape_each_shape_id(each_shape_callback callback, void *data);
+size_t rb_shape_memsize(shape_id_t shape);
+size_t rb_shape_edges_count(shape_id_t shape_id);
+size_t rb_shape_depth(shape_id_t shape_id);
RUBY_SYMBOL_EXPORT_END
#endif