From 1f0888ab3e699a1083cddad84b0d8cb28e15ad8e Mon Sep 17 00:00:00 2001 From: Peter Zhu Date: Thu, 17 Nov 2022 09:47:18 -0500 Subject: Speed up shape transitions This commit significantly speeds up shape transitions as it changes get_next_shape_internal to not perform a lookup (and instead require the caller to perform the lookup). This avoids double lookups during shape transitions. There is a significant (~2x) speedup in the following micro-benchmark: puts(Benchmark.measure do o = Object.new 100_000.times do |i| o.instance_variable_set(:"@a#{i}", 0) end end) Before: 22.393194 0.201639 22.594833 ( 22.684237) After: 11.323086 0.022284 11.345370 ( 11.389346) --- variable.c | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) (limited to 'variable.c') diff --git a/variable.c b/variable.c index 635e613c77..926719e82b 100644 --- a/variable.c +++ b/variable.c @@ -1276,28 +1276,33 @@ static void generic_ivar_set(VALUE obj, ID id, VALUE val) { struct ivar_update ivup; + + attr_index_t index; // The returned shape will have `id` in its iv_table - rb_shape_t * shape = rb_shape_get_next(rb_shape_get_shape(obj), obj, id); + rb_shape_t *shape = rb_shape_get_shape(obj); + bool found = rb_shape_get_iv_index(shape, id, &index); + if (!found) { + index = shape->next_iv_index; + shape = rb_shape_get_next(shape, obj, id); + RUBY_ASSERT(index == (shape->next_iv_index - 1)); + } + ivup.shape = shape; RB_VM_LOCK_ENTER(); { - attr_index_t ent_data; - if (rb_shape_get_iv_index(shape, id, &ent_data)) { - ivup.iv_index = (uint32_t) ent_data; - } - else { - rb_bug("unreachable. Shape was not found for id: %s", rb_id2name(id)); - } + ivup.iv_index = (uint32_t)index; st_update(generic_ivtbl(obj, id, false), (st_data_t)obj, generic_ivar_update, (st_data_t)&ivup); } RB_VM_LOCK_LEAVE(); ivup.ivtbl->ivptr[ivup.iv_index] = val; - - rb_shape_set_shape(obj, shape); RB_OBJ_WRITTEN(obj, Qundef, val); + + if (!found) { + rb_shape_set_shape(obj, shape); + } } static VALUE * -- cgit v1.2.1