summaryrefslogtreecommitdiff
path: root/iseq.c
diff options
context:
space:
mode:
authorKoichi Sasada <ko1@atdot.net>2023-03-10 01:30:30 +0900
committerKoichi Sasada <ko1@atdot.net>2023-03-23 14:03:12 +0900
commitc9fd81b860b5ec193ba57c73c740955937452497 (patch)
tree37d7f0ac40f0122c222b1dc952ef0f0907440b0a /iseq.c
parente5a5e43c36443e06756aba6de95c94b41b910a82 (diff)
downloadruby-c9fd81b860b5ec193ba57c73c740955937452497.tar.gz
`vm_call_single_noarg_inline_builtin`
If the iseq only contains `opt_invokebuiltin_delegate_leave` insn and the builtin-function (bf) is inline-able, the caller doesn't need to build a method frame. `vm_call_single_noarg_inline_builtin` is fast path for such cases.
Diffstat (limited to 'iseq.c')
-rw-r--r--iseq.c55
1 files changed, 47 insertions, 8 deletions
diff --git a/iseq.c b/iseq.c
index 2e072f9ac2..1e9a3f7497 100644
--- a/iseq.c
+++ b/iseq.c
@@ -3550,9 +3550,32 @@ rb_iseq_trace_set(const rb_iseq_t *iseq, rb_event_flag_t turnon_events)
}
}
-bool rb_vm_call_ivar_attrset_p(const vm_call_handler ch);
void rb_vm_cc_general(const struct rb_callcache *cc);
+static bool
+clear_attr_cc(VALUE v)
+{
+ if (imemo_type_p(v, imemo_callcache) && vm_cc_ivar_p((const struct rb_callcache *)v)) {
+ rb_vm_cc_general((struct rb_callcache *)v);
+ return true;
+ }
+ else {
+ return false;
+ }
+}
+
+static bool
+clear_bf_cc(VALUE v)
+{
+ if (imemo_type_p(v, imemo_callcache) && vm_cc_bf_p((const struct rb_callcache *)v)) {
+ rb_vm_cc_general((struct rb_callcache *)v);
+ return true;
+ }
+ else {
+ return false;
+ }
+}
+
static int
clear_attr_ccs_i(void *vstart, void *vend, size_t stride, void *data)
{
@@ -3560,11 +3583,7 @@ clear_attr_ccs_i(void *vstart, void *vend, size_t stride, void *data)
for (; v != (VALUE)vend; v += stride) {
void *ptr = asan_poisoned_object_p(v);
asan_unpoison_object(v, false);
-
- if (imemo_type_p(v, imemo_callcache) && rb_vm_call_ivar_attrset_p(((const struct rb_callcache *)v)->call_)) {
- rb_vm_cc_general((struct rb_callcache *)v);
- }
-
+ clear_attr_cc(v);
asan_poison_object_if(ptr, v);
}
return 0;
@@ -3577,6 +3596,25 @@ rb_clear_attr_ccs(void)
}
static int
+clear_bf_ccs_i(void *vstart, void *vend, size_t stride, void *data)
+{
+ VALUE v = (VALUE)vstart;
+ for (; v != (VALUE)vend; v += stride) {
+ void *ptr = asan_poisoned_object_p(v);
+ asan_unpoison_object(v, false);
+ clear_bf_cc(v);
+ asan_poison_object_if(ptr, v);
+ }
+ return 0;
+}
+
+void
+rb_clear_bf_ccs(void)
+{
+ rb_objspace_each_objects(clear_bf_ccs_i, NULL);
+}
+
+static int
trace_set_i(void *vstart, void *vend, size_t stride, void *data)
{
rb_event_flag_t turnon_events = *(rb_event_flag_t *)data;
@@ -3589,8 +3627,9 @@ trace_set_i(void *vstart, void *vend, size_t stride, void *data)
if (rb_obj_is_iseq(v)) {
rb_iseq_trace_set(rb_iseq_check((rb_iseq_t *)v), turnon_events);
}
- else if (imemo_type_p(v, imemo_callcache) && rb_vm_call_ivar_attrset_p(((const struct rb_callcache *)v)->call_)) {
- rb_vm_cc_general((struct rb_callcache *)v);
+ else if (clear_attr_cc(v)) {
+ }
+ else if (clear_bf_cc(v)) {
}
asan_poison_object_if(ptr, v);