summaryrefslogtreecommitdiff
path: root/vm_insnhelper.c
diff options
context:
space:
mode:
authorJemma Issroff <jemmaissroff@gmail.com>2022-12-08 17:16:52 -0500
committerAaron Patterson <aaron.patterson@gmail.com>2022-12-15 10:06:04 -0800
commitc1ab6ddc9a6fa228caa5d26b118b54855051279c (patch)
treea3361c22480e38d798dfa975bdabf47a832a9fb0 /vm_insnhelper.c
parenta3d552aedd190b0f21a4f6479f0ef1d2ce90189b (diff)
downloadruby-c1ab6ddc9a6fa228caa5d26b118b54855051279c.tar.gz
Transition complex objects to "too complex" shape
When an object becomes "too complex" (in other words it has too many variations in the shape tree), we transition it to use a "too complex" shape and use a hash for storing instance variables. Without this patch, there were rare cases where shape tree growth could "explode" and cause performance degradation on what would otherwise have been cached fast paths. This patch puts a limit on shape tree growth, and gracefully degrades in the rare case where there could be a factorial growth in the shape tree. For example: ```ruby class NG; end HUGE_NUMBER.times do NG.new.instance_variable_set(:"@unique_ivar_#{_1}", 1) end ``` We consider objects to be "too complex" when the object's class has more than SHAPE_MAX_VARIATIONS (currently 8) leaf nodes in the shape tree and the object introduces a new variation (a new leaf node) associated with that class. For example, new variations on instances of the following class would be considered "too complex" because those instances create more than 8 leaves in the shape tree: ```ruby class Foo; end 9.times { Foo.new.instance_variable_set(":@uniq_#{_1}", 1) } ``` However, the following class is *not* too complex because it only has one leaf in the shape tree: ```ruby class Foo def initialize @a = @b = @c = @d = @e = @f = @g = @h = @i = nil end end 9.times { Foo.new } `` This case is rare, so we don't expect this change to impact performance of most applications, but it needs to be handled. Co-Authored-By: Aaron Patterson <tenderlove@ruby-lang.org>
Diffstat (limited to 'vm_insnhelper.c')
-rw-r--r--vm_insnhelper.c43
1 files changed, 29 insertions, 14 deletions
diff --git a/vm_insnhelper.c b/vm_insnhelper.c
index 84fe32c1b7..68b8d89abb 100644
--- a/vm_insnhelper.c
+++ b/vm_insnhelper.c
@@ -1213,6 +1213,8 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
}
if (LIKELY(cached_id == shape_id)) {
+ RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
+
if (index == ATTR_INDEX_NOT_SET) {
return Qnil;
}
@@ -1242,24 +1244,31 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
- if (rb_shape_get_iv_index(shape, id, &index)) {
- // This fills in the cache with the shared cache object.
- // "ent" is the shared cache object
- fill_ivar_cache(iseq, ic, cc, is_attr, index, shape_id);
-
- // We fetched the ivar list above
- val = ivar_list[index];
- RUBY_ASSERT(!UNDEF_P(val));
+ if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
+ if (!rb_id_table_lookup(ROBJECT_IV_HASH(obj), id, &val)) {
+ val = Qnil;
+ }
}
else {
- if (is_attr) {
- vm_cc_attr_index_initialize(cc, shape_id);
+ if (rb_shape_get_iv_index(shape, id, &index)) {
+ // This fills in the cache with the shared cache object.
+ // "ent" is the shared cache object
+ fill_ivar_cache(iseq, ic, cc, is_attr, index, shape_id);
+
+ // We fetched the ivar list above
+ val = ivar_list[index];
+ RUBY_ASSERT(!UNDEF_P(val));
}
else {
- vm_ic_attr_index_initialize(ic, shape_id);
- }
+ if (is_attr) {
+ vm_cc_attr_index_initialize(cc, shape_id);
+ }
+ else {
+ vm_ic_attr_index_initialize(ic, shape_id);
+ }
- val = Qnil;
+ val = Qnil;
+ }
}
}
@@ -1283,6 +1292,8 @@ general_path:
static void
populate_cache(attr_index_t index, shape_id_t next_shape_id, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, bool is_attr)
{
+ RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
+
// Cache population code
if (is_attr) {
vm_cc_attr_index_set(cc, index, next_shape_id);
@@ -1309,7 +1320,9 @@ vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic,
shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
- populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
+ if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
+ populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
+ }
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
return val;
@@ -1413,6 +1426,7 @@ vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t i
VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
+ RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
if (LIKELY(shape_id == dest_shape_id)) {
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
@@ -1440,6 +1454,7 @@ vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t i
VALUE *ptr = ROBJECT_IVPTR(obj);
+ RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
RB_OBJ_WRITE(obj, &ptr[index], val);
RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);