summaryrefslogtreecommitdiff
path: root/shape.c
diff options
context:
space:
mode:
authorAaron Patterson <tenderlove@ruby-lang.org>2022-11-17 15:57:11 -0800
committerAaron Patterson <aaron.patterson@gmail.com>2022-11-18 08:31:56 -0800
commit10788166e7e568fdcd0b748e8d5dab442dcdc7ef (patch)
treeee195186a3fed8333bc1875f66ddab0c85c3d7db /shape.c
parentf0ce1186620273a1182e6084559765143099eb88 (diff)
downloadruby-10788166e7e568fdcd0b748e8d5dab442dcdc7ef.tar.gz
Differentiate T_OBJECT shapes from other objects
We would like to differentiate types of objects via their shape. This commit adds a special T_OBJECT shape when we allocate an instance of T_OBJECT. This allows us to avoid testing whether an object is an instance of a T_OBJECT or not, we can just check the shape.
Diffstat (limited to 'shape.c')
-rw-r--r--shape.c23
1 files changed, 21 insertions, 2 deletions
diff --git a/shape.c b/shape.c
index bb51abecbf..d98e596eb1 100644
--- a/shape.c
+++ b/shape.c
@@ -8,6 +8,7 @@
#include <stdbool.h>
static ID id_frozen;
+static ID id_t_object;
static ID size_pool_edge_names[SIZE_POOL_COUNT];
/*
@@ -152,6 +153,7 @@ get_next_shape_internal(rb_shape_t * shape, ID id, enum shape_type shape_type)
case SHAPE_CAPACITY_CHANGE:
case SHAPE_IVAR_UNDEF:
case SHAPE_FROZEN:
+ case SHAPE_T_OBJECT:
new_shape->next_iv_index = shape->next_iv_index;
break;
case SHAPE_INITIAL_CAPACITY:
@@ -264,6 +266,7 @@ rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t *value)
case SHAPE_IVAR_UNDEF:
case SHAPE_ROOT:
case SHAPE_INITIAL_CAPACITY:
+ case SHAPE_T_OBJECT:
return false;
case SHAPE_FROZEN:
rb_bug("Ivar should not exist on transition\n");
@@ -333,14 +336,16 @@ rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape)
{
rb_shape_t * midway_shape;
- if (dest_shape->type != SHAPE_ROOT) {
+ RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
+
+ if (dest_shape->type != initial_shape->type) {
midway_shape = rb_shape_rebuild_shape(initial_shape, rb_shape_get_parent(dest_shape));
}
else {
midway_shape = initial_shape;
}
- switch (dest_shape->type) {
+ switch ((enum shape_type)dest_shape->type) {
case SHAPE_IVAR:
if (midway_shape->capacity <= midway_shape->next_iv_index) {
// There isn't enough room to write this IV, so we need to increase the capacity
@@ -355,6 +360,8 @@ rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape)
case SHAPE_ROOT:
case SHAPE_FROZEN:
case SHAPE_CAPACITY_CHANGE:
+ case SHAPE_INITIAL_CAPACITY:
+ case SHAPE_T_OBJECT:
break;
}
@@ -592,6 +599,7 @@ void
Init_default_shapes(void)
{
id_frozen = rb_make_internal_id();
+ id_t_object = rb_make_internal_id();
// Shapes by size pool
for (int i = 0; i < SIZE_POOL_COUNT; i++) {
@@ -615,6 +623,16 @@ Init_default_shapes(void)
RUBY_ASSERT(rb_shape_id(new_shape) == (shape_id_t)i);
}
+ // Make shapes for T_OBJECT
+ for (int i = 0; i < SIZE_POOL_COUNT; i++) {
+ rb_shape_t * shape = rb_shape_get_shape_by_id(i);
+#if RUBY_DEBUG
+ rb_shape_t * t_object_shape =
+#endif
+ get_next_shape_internal(shape, id_t_object, SHAPE_T_OBJECT);
+ RUBY_ASSERT(rb_shape_id(t_object_shape) == (shape_id_t)(i + SIZE_POOL_COUNT));
+ }
+
// Special const shape
#if RUBY_DEBUG
rb_shape_t * special_const_shape =
@@ -644,6 +662,7 @@ Init_shape(void)
rb_define_method(rb_cShape, "capacity", rb_shape_capacity, 0);
rb_define_const(rb_cShape, "SHAPE_ROOT", INT2NUM(SHAPE_ROOT));
rb_define_const(rb_cShape, "SHAPE_IVAR", INT2NUM(SHAPE_IVAR));
+ rb_define_const(rb_cShape, "SHAPE_T_OBJECT", INT2NUM(SHAPE_T_OBJECT));
rb_define_const(rb_cShape, "SHAPE_IVAR_UNDEF", INT2NUM(SHAPE_IVAR_UNDEF));
rb_define_const(rb_cShape, "SHAPE_FROZEN", INT2NUM(SHAPE_FROZEN));
rb_define_const(rb_cShape, "SHAPE_BITS", INT2NUM(SHAPE_BITS));