summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/ruby_vm/mjit/assembler.rb10
-rw-r--r--lib/ruby_vm/mjit/compiler.rb14
-rw-r--r--lib/ruby_vm/mjit/exit_compiler.rb2
-rw-r--r--lib/ruby_vm/mjit/hooks.rb32
-rw-r--r--lib/ruby_vm/mjit/insn_compiler.rb241
-rw-r--r--lib/ruby_vm/mjit/invariants.rb28
-rw-r--r--mjit.c33
-rw-r--r--mjit.rb1
-rw-r--r--mjit_c.c2
-rw-r--r--mjit_c.rb10
-rwxr-xr-xtool/mjit/bindgen.rb1
-rw-r--r--vm_method.c5
12 files changed, 303 insertions, 76 deletions
diff --git a/lib/ruby_vm/mjit/assembler.rb b/lib/ruby_vm/mjit/assembler.rb
index c9f179c3b9..6fd5d12bd8 100644
--- a/lib/ruby_vm/mjit/assembler.rb
+++ b/lib/ruby_vm/mjit/assembler.rb
@@ -387,6 +387,16 @@ module RubyVM::MJIT
def sub(dst, src)
case [dst, src]
+ # SUB r/m64, imm8
+ in [Symbol => dst_reg, Integer => src_imm] if r64?(dst_reg) && imm8?(src_imm)
+ # REX.W + 83 /5 ib
+ # MI: Operand 1: ModRM:r/m (r, w), Operand 2: imm8/16/32
+ insn(
+ prefix: REX_W,
+ opcode: 0x83,
+ mod_rm: ModRM[mod: Mod11, reg: 5, rm: dst_reg],
+ imm: imm8(src_imm),
+ )
# SUB r/m64, r64 (Mod 11: reg)
in [Symbol => dst_reg, Symbol => src_reg] if r64?(dst_reg) && r64?(src_reg)
# REX.W + 29 /r
diff --git a/lib/ruby_vm/mjit/compiler.rb b/lib/ruby_vm/mjit/compiler.rb
index 3fc56dac5e..f855c8a6a5 100644
--- a/lib/ruby_vm/mjit/compiler.rb
+++ b/lib/ruby_vm/mjit/compiler.rb
@@ -43,7 +43,7 @@ module RubyVM::MJIT
@cb = CodeBlock.new(mem_block: mem_block, mem_size: mem_size / 2)
@ocb = CodeBlock.new(mem_block: mem_block + mem_size / 2, mem_size: mem_size / 2, outlined: true)
@exit_compiler = ExitCompiler.new
- @insn_compiler = InsnCompiler.new(@ocb, @exit_compiler)
+ @insn_compiler = InsnCompiler.new(@cb, @ocb, @exit_compiler)
@leave_exit = Assembler.new.then do |asm|
@exit_compiler.compile_leave_exit(asm)
@@ -58,11 +58,15 @@ module RubyVM::MJIT
# TODO: Support has_opt
return if iseq.body.param.flags.has_opt
+ jit = JITState.new(iseq:, cfp:)
asm = Assembler.new
asm.comment("Block: #{iseq.body.location.label}@#{C.rb_iseq_path(iseq)}:#{iseq.body.location.first_lineno}")
compile_prologue(asm)
- compile_block(asm, jit: JITState.new(iseq:, cfp:))
- iseq.body.jit_func = @cb.write(asm)
+ compile_block(asm, jit:)
+ @cb.write(asm).tap do |addr|
+ jit.block.start_addr = addr
+ iseq.body.jit_func = addr
+ end
rescue Exception => e
$stderr.puts e.full_message # TODO: check verbose
end
@@ -76,8 +80,8 @@ module RubyVM::MJIT
cfp.pc = block_stub.pc
# Prepare the jump target
+ jit = JITState.new(iseq: block_stub.iseq, cfp:)
new_asm = Assembler.new.tap do |asm|
- jit = JITState.new(iseq: block_stub.iseq, cfp:)
compile_block(asm, jit:, pc: block_stub.pc, ctx: block_stub.ctx)
end
@@ -95,6 +99,8 @@ module RubyVM::MJIT
@cb.write(asm)
end
new_addr
+ end.tap do |addr|
+ jit.block.start_addr = addr
end
end
diff --git a/lib/ruby_vm/mjit/exit_compiler.rb b/lib/ruby_vm/mjit/exit_compiler.rb
index d49a293399..f21ccced85 100644
--- a/lib/ruby_vm/mjit/exit_compiler.rb
+++ b/lib/ruby_vm/mjit/exit_compiler.rb
@@ -26,6 +26,8 @@ module RubyVM::MJIT
# @param ocb [CodeBlock]
def compile_leave_exit(asm)
+ asm.comment('default cfp->jit_return')
+
# Restore callee-saved registers
asm.pop(SP)
asm.pop(EC)
diff --git a/lib/ruby_vm/mjit/hooks.rb b/lib/ruby_vm/mjit/hooks.rb
new file mode 100644
index 0000000000..6aef14b56a
--- /dev/null
+++ b/lib/ruby_vm/mjit/hooks.rb
@@ -0,0 +1,32 @@
+module RubyVM::MJIT::Hooks # :nodoc: all
+ C = RubyVM::MJIT.const_get(:C, false)
+
+ def self.on_bop_redefined(_redefined_flag, _bop)
+ # C.mjit_cancel_all("BOP is redefined")
+ end
+
+ def self.on_cme_invalidate(_cme)
+ # to be used later
+ end
+
+ def self.on_ractor_spawn
+ # C.mjit_cancel_all("Ractor is spawned")
+ end
+
+ def self.on_constant_state_changed(_id)
+ # to be used later
+ end
+
+ def self.on_constant_ic_update(_iseq, _ic, _insn_idx)
+ # to be used later
+ end
+
+ def self.on_tracing_invalidate_all(new_iseq_events)
+ # # Stop calling all JIT-ed code. We can't rewrite existing JIT-ed code to trace_ insns for now.
+ # # :class events are triggered only in ISEQ_TYPE_CLASS, but mjit_target_iseq_p ignores such iseqs.
+ # # Thus we don't need to cancel JIT-ed code for :class events.
+ # if new_iseq_events != C.RUBY_EVENT_CLASS
+ # C.mjit_cancel_all("TracePoint is enabled")
+ # end
+ end
+end
diff --git a/lib/ruby_vm/mjit/insn_compiler.rb b/lib/ruby_vm/mjit/insn_compiler.rb
index ccfca5eaa1..e5a8cb58d1 100644
--- a/lib/ruby_vm/mjit/insn_compiler.rb
+++ b/lib/ruby_vm/mjit/insn_compiler.rb
@@ -2,10 +2,10 @@ module RubyVM::MJIT
class InsnCompiler
# @param ocb [CodeBlock]
# @param exit_compiler [RubyVM::MJIT::ExitCompiler]
- def initialize(ocb, exit_compiler)
+ def initialize(cb, ocb, exit_compiler)
@ocb = ocb
@exit_compiler = exit_compiler
- @invariants = Invariants.new(ocb, exit_compiler)
+ @invariants = Invariants.new(cb, ocb, exit_compiler)
# freeze # workaround a binding.irb issue. TODO: resurrect this
end
@@ -151,7 +151,7 @@ module RubyVM::MJIT
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def putnil(jit, ctx, asm)
- raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
+ assert_equal(ctx.sp_offset, ctx.stack_size) # TODO: support SP motion
asm.mov([SP, C.VALUE.size * ctx.stack_size], Qnil)
ctx.stack_push(1)
KeepCompiling
@@ -161,7 +161,7 @@ module RubyVM::MJIT
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def putself(jit, ctx, asm)
- raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
+ assert_equal(ctx.sp_offset, ctx.stack_size) # TODO: support SP motion
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)])
asm.mov([SP, C.VALUE.size * ctx.stack_size], :rax)
ctx.stack_push(1)
@@ -174,7 +174,7 @@ module RubyVM::MJIT
def putobject(jit, ctx, asm, val: jit.operand(0))
# Push it to the stack
# TODO: GC offsets
- raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
+ assert_equal(ctx.sp_offset, ctx.stack_size) # TODO: support SP motion
if asm.imm32?(val)
asm.mov([SP, C.VALUE.size * ctx.stack_size], val)
else # 64-bit immediates can't be directly written to memory
@@ -224,7 +224,7 @@ module RubyVM::MJIT
# @param cd `RubyVM::MJIT::CPointer::Struct_rb_call_data`
def opt_send_without_block(jit, ctx, asm)
cd = C.rb_call_data.new(jit.operand(0))
- compile_send_general(jit, ctx, asm, cd)
+ jit_call_method(jit, ctx, asm, cd)
end
# objtostring
@@ -240,7 +240,7 @@ module RubyVM::MJIT
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def leave(jit, ctx, asm)
- assert_eq!(ctx.stack_size, 1)
+ assert_equal(ctx.stack_size, 1)
compile_check_ints(jit, ctx, asm)
@@ -250,10 +250,11 @@ module RubyVM::MJIT
asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], :rax)
# Return a value (for compile_leave_exit)
+ assert_equal(ctx.sp_offset, ctx.stack_size) # TODO: support SP motion
asm.mov(:rax, [SP])
# Set caller's SP and push a value to its stack (for JIT)
- asm.mov(SP, [CFP, C.rb_control_frame_t.offsetof(:sp)])
+ asm.mov(SP, [CFP, C.rb_control_frame_t.offsetof(:sp)]) # Note: SP is in the position after popping a receiver and arguments
asm.mov([SP], :rax)
# Jump to cfp->jit_return
@@ -342,7 +343,7 @@ module RubyVM::MJIT
return CantCompile
end
- raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
+ assert_equal(ctx.sp_offset, ctx.stack_size) # TODO: support SP motion
recv_index = ctx.stack_size - 2
obj_index = ctx.stack_size - 1
@@ -391,7 +392,7 @@ module RubyVM::MJIT
return CantCompile
end
- raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
+ assert_equal(ctx.sp_offset, ctx.stack_size) # TODO: support SP motion
recv_index = ctx.stack_size - 2
obj_index = ctx.stack_size - 1
@@ -489,138 +490,180 @@ module RubyVM::MJIT
asm.jnz(side_exit(jit, ctx))
end
+ # vm_call_method (vm_sendish -> vm_call_general -> vm_call_method)
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
# @param cd `RubyVM::MJIT::CPointer::Struct_rb_call_data`
- def compile_send_general(jit, ctx, asm, cd)
+ def jit_call_method(jit, ctx, asm, cd)
ci = cd.ci
argc = C.vm_ci_argc(ci)
mid = C.vm_ci_mid(ci)
flags = C.vm_ci_flag(ci)
- if flags & C.VM_CALL_KW_SPLAT != 0
- return CantCompile
- end
-
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
- raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
+ if flags & C.VM_CALL_KW_SPLAT != 0
+ # recv_index calculation may not work for this
+ return CantCompile
+ end
+ assert_equal(ctx.sp_offset, ctx.stack_size) # TODO: support SP motion
recv_depth = argc + ((flags & C.VM_CALL_ARGS_BLOCKARG == 0) ? 0 : 1)
recv_index = ctx.stack_size - 1 - recv_depth
comptime_recv = jit.peek_at_stack(recv_depth)
comptime_recv_klass = C.rb_class_of(comptime_recv)
- # Guard known class
+ # Guard the receiver class (part of vm_search_method_fastpath)
if comptime_recv_klass.singleton_class?
asm.comment('guard known object with singleton class')
asm.mov(:rax, C.to_value(comptime_recv))
asm.cmp([SP, C.VALUE.size * recv_index], :rax)
asm.jne(side_exit(jit, ctx))
else
+ # TODO: support more classes
return CantCompile
end
- # Do method lookup
+ # Do method lookup (vm_cc_cme(cc) != NULL)
cme = C.rb_callable_method_entry(comptime_recv_klass, mid)
if cme.nil?
- return CantCompile
+ return CantCompile # We don't support vm_call_method_name
end
+ # The main check of vm_call_method before vm_call_method_each_type
case C.METHOD_ENTRY_VISI(cme)
when C.METHOD_VISI_PUBLIC
# You can always call public methods
when C.METHOD_VISI_PRIVATE
+ # Allow only callsites without a receiver
if flags & C.VM_CALL_FCALL == 0
- # VM_CALL_FCALL: Callsites without a receiver of an explicit `self` receiver
return CantCompile
end
when C.METHOD_VISI_PROTECTED
return CantCompile # TODO: support this
else
- raise 'cmes should always have a visibility'
+ raise 'unreachable'
end
- # TODO: assume_method_lookup_stable
+ # Invalidate on redefinition (part of vm_search_method_fastpath)
+ @invariants.assume_method_lookup_stable(jit, cme)
- if flags & C.VM_CALL_ARGS_SPLAT != 0 && cme.def.type != C.VM_METHOD_TYPE_ISEQ
- return CantCompile
- end
+ jit_call_method_each_type(jit, ctx, asm, ci, argc, flags, cme)
+ end
+ # vm_call_method_each_type
+ # @param jit [RubyVM::MJIT::JITState]
+ # @param ctx [RubyVM::MJIT::Context]
+ # @param asm [RubyVM::MJIT::Assembler]
+ def jit_call_method_each_type(jit, ctx, asm, ci, argc, flags, cme)
case cme.def.type
when C.VM_METHOD_TYPE_ISEQ
- iseq = def_iseq_ptr(cme.def)
- frame_type = C.VM_FRAME_MAGIC_METHOD | C.VM_ENV_FLAG_LOCAL
- compile_send_iseq(jit, ctx, asm, iseq, ci, frame_type, cme, flags, argc)
+ jit_call_iseq_setup(jit, ctx, asm, ci, cme, flags, argc)
else
return CantCompile
end
end
- def compile_send_iseq(jit, ctx, asm, iseq, ci, frame_type, cme, flags, argc)
- # TODO: check a bunch of CantCompile cases
-
- compile_check_ints(jit, ctx, asm)
-
- # TODO: stack overflow check
+ # vm_call_iseq_setup
+ # @param jit [RubyVM::MJIT::JITState]
+ # @param ctx [RubyVM::MJIT::Context]
+ # @param asm [RubyVM::MJIT::Assembler]
+ def jit_call_iseq_setup(jit, ctx, asm, ci, cme, flags, argc)
+ iseq = def_iseq_ptr(cme.def)
+ opt_pc = jit_callee_setup_arg(jit, ctx, asm, ci, flags, iseq)
+ if opt_pc == CantCompile
+ # We hit some unsupported path of vm_callee_setup_arg
+ return CantCompile
+ end
- # TODO: more flag checks
+ if flags & C.VM_CALL_TAILCALL != 0
+ # We don't support vm_call_iseq_setup_tailcall
+ return CantCompile
+ end
+ jit_call_iseq_setup_normal(jit, ctx, asm, ci, cme, flags, argc, iseq)
+ end
- # Pop arguments and a receiver for the current caller frame
- raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
- sp_index = ctx.stack_size - argc - 1 # arguments and receiver
+ # vm_call_iseq_setup_normal (vm_call_iseq_setup_2 -> vm_call_iseq_setup_normal)
+ def jit_call_iseq_setup_normal(jit, ctx, asm, ci, cme, flags, argc, iseq)
+ # Save caller SP and PC before pushing a callee frame for backtrace and side exits
asm.comment('save SP to caller CFP')
- asm.lea(:rax, [SP, sp_index])
+ assert_equal(ctx.sp_offset, ctx.stack_size) # TODO: support SP motion
+ sp_index = ctx.stack_size - 1 - argc - ((flags & C.VM_CALL_ARGS_BLOCKARG == 0) ? 0 : 1) # Pop receiver and arguments for side exits
+ asm.lea(:rax, [SP, C.VALUE.size * sp_index])
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], :rax)
- # TODO: do something about ctx.sp_index
- asm.comment('save PC to CFP')
- next_pc = jit.pc + jit.insn.len * C.VALUE.size
+ asm.comment('save PC to caller CFP')
+ next_pc = jit.pc + jit.insn.len * C.VALUE.size # Use the next one for backtrace and side exits
asm.mov(:rax, next_pc)
- asm.mov([CFP, C.rb_control_frame_t.offsetof(:pc)], :rax) # cfp->pc = rax
-
- # TODO: push cme, specval, frame type
- # TODO: push callee control frame
+ asm.mov([CFP, C.rb_control_frame_t.offsetof(:pc)], :rax)
- asm.comment('switch to new CFP')
- asm.lea(:rax, [CFP, -C.rb_control_frame_t.size])
- asm.mov(CFP, :rax);
- asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], :rax)
+ frame_type = C.VM_FRAME_MAGIC_METHOD | C.VM_ENV_FLAG_LOCAL
+ jit_push_frame(jit, ctx, asm, ci, cme, flags, argc, iseq, frame_type, next_pc)
+ end
- asm.comment('save SP to callee CFP')
- num_locals = 0 # TODO
- sp_offset = C.VALUE.size * (3 + num_locals + ctx.stack_size)
- asm.add(SP, sp_offset)
- asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], SP)
+ # vm_push_frame
+ #
+ # Frame structure:
+ # | args | locals | cme/cref | block_handler/prev EP | frame type (EP here) | stack bottom (SP here)
+ def jit_push_frame(jit, ctx, asm, ci, cme, flags, argc, iseq, frame_type, next_pc)
+ # TODO: stack overflow check
- asm.comment('save ISEQ to callee CFP')
- asm.mov(:rax, iseq.to_i)
- asm.mov([CFP, C.rb_control_frame_t.offsetof(:iseq)], :rax)
+ local_size = iseq.body.local_table_size
+ if local_size > 0
+ # TODO: support local variables
+ return CantCompile
+ end
- asm.comment('save EP to callee CFP')
- asm.lea(:rax, [SP, -C.VALUE.size])
- asm.mov([CFP, C.rb_control_frame_t.offsetof(:ep)], :rax)
+ asm.comment('move SP register to callee SP')
+ assert_equal(ctx.sp_offset, ctx.stack_size) # TODO: support SP motion
+ sp_offset = ctx.stack_size + local_size + 3
+ asm.add(SP, C.VALUE.size * sp_offset)
- asm.comment('set frame type')
- asm.mov([SP, C.VALUE.size * -1], C.VM_FRAME_MAGIC_METHOD | C.VM_ENV_FLAG_LOCAL)
+ asm.comment('set cme')
+ asm.mov(:rax, cme.to_i)
+ asm.mov([SP, C.VALUE.size * -3], :rax)
asm.comment('set specval')
asm.mov([SP, C.VALUE.size * -2], C.VM_BLOCK_HANDLER_NONE)
- # Stub the return destination from the callee
- # TODO: set up return ctx correctly
- jit_return_stub = BlockStub.new(iseq: jit.iseq, pc: next_pc, ctx: ctx.dup)
+ asm.comment('set frame type')
+ asm.mov([SP, C.VALUE.size * -1], frame_type)
+
+ asm.comment('move CFP register to callee CFP')
+ asm.sub(CFP, C.rb_control_frame_t.size);
+
+ # Not setting PC since JIT code will do that as needed
+ asm.comment('set SP to callee CFP')
+ asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], SP)
+ asm.comment('set ISEQ to callee CFP')
+ asm.mov(:rax, iseq.to_i)
+ asm.mov([CFP, C.rb_control_frame_t.offsetof(:iseq)], :rax)
+ asm.comment('set self to callee CFP')
+ self_index = -(1 + argc + ((flags & C.VM_CALL_ARGS_BLOCKARG == 0) ? 0 : 1) + local_size + 3)
+ asm.mov(:rax, [SP, C.VALUE.size * self_index])
+ asm.mov([CFP, C.rb_control_frame_t.offsetof(:self)], :rax)
+ asm.comment('set EP to callee CFP')
+ asm.lea(:rax, [SP, C.VALUE.size * -1])
+ asm.mov([CFP, C.rb_control_frame_t.offsetof(:ep)], :rax)
+ asm.comment('set block_code to callee CFP')
+ asm.mov([CFP, C.rb_control_frame_t.offsetof(:block_code)], 0)
+ asm.comment('set BP to callee CFP')
+ asm.mov([CFP, C.rb_control_frame_t.offsetof(:__bp__)], SP) # TODO: get rid of this!!
+
+ # Stub cfp->jit_return
+ return_ctx = ctx.dup
+ return_ctx.sp_offset = 1 # SP is in the position after popping a receiver and arguments
+ jit_return_stub = BlockStub.new(iseq: jit.iseq, pc: next_pc, ctx: return_ctx)
jit_return = Assembler.new.then do |ocb_asm|
@exit_compiler.compile_block_stub(ctx, ocb_asm, jit_return_stub)
@ocb.write(ocb_asm)
end
-
jit_return_stub.change_block = proc do |jump_asm, new_addr|
- jump_asm.comment('update cfp->jit_return')
+ jump_asm.comment('set jit_return to callee CFP')
jump_asm.stub(jit_return_stub) do
jump_asm.mov(:rax, new_addr)
jump_asm.mov([CFP, C.rb_control_frame_t.offsetof(:jit_return)], :rax)
@@ -628,13 +671,73 @@ module RubyVM::MJIT
end
jit_return_stub.change_block.call(asm, jit_return)
+ asm.comment('set callee CFP to ec->cfp')
+ asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], CFP)
+
+ # Jump to a stub for the callee ISEQ
callee_ctx = Context.new
compile_block_stub(iseq, iseq.body.iseq_encoded.to_i, callee_ctx, asm)
EndBlock
end
- def assert_eq!(left, right)
+ # vm_callee_setup_arg: Set up args and return opt_pc (or CantCompile)
+ # @param jit [RubyVM::MJIT::JITState]
+ # @param ctx [RubyVM::MJIT::Context]
+ # @param asm [RubyVM::MJIT::Assembler]
+ def jit_callee_setup_arg(jit, ctx, asm, ci, flags, iseq)
+ if flags & C.VM_CALL_KW_SPLAT == 0
+ if C.rb_simple_iseq_p(iseq)
+ if jit_caller_setup_arg(jit, ctx, asm, flags) == CantCompile
+ return CantCompile
+ end
+ if jit_caller_remove_empty_kw_splat(jit, ctx, asm, flags) == CantCompile
+ return CantCompile
+ end
+
+ if C.vm_ci_argc(ci) != iseq.body.param.lead_num
+ # argument_arity_error
+ return CantCompile
+ end
+
+ return 0
+ else
+ # We don't support the remaining `else if`s yet.
+ return CantCompile
+ end
+ end
+
+ # We don't support setup_parameters_complex
+ return CantCompile
+ end
+
+ # CALLER_SETUP_ARG: Return CantCompile if not supported
+ # @param jit [RubyVM::MJIT::JITState]
+ # @param ctx [RubyVM::MJIT::Context]
+ # @param asm [RubyVM::MJIT::Assembler]
+ def jit_caller_setup_arg(jit, ctx, asm, flags)
+ if flags & C.VM_CALL_ARGS_SPLAT != 0
+ # We don't support vm_caller_setup_arg_splat
+ return CantCompile
+ end
+ if flags & (C.VM_CALL_KWARG | C.VM_CALL_KW_SPLAT) != 0
+ # We don't support keyword args either
+ return CantCompile
+ end
+ end
+
+ # CALLER_REMOVE_EMPTY_KW_SPLAT: Return CantCompile if not supported
+ # @param jit [RubyVM::MJIT::JITState]
+ # @param ctx [RubyVM::MJIT::Context]
+ # @param asm [RubyVM::MJIT::Assembler]
+ def jit_caller_remove_empty_kw_splat(jit, ctx, asm, flags)
+ if (flags & C.VM_CALL_KW_SPLAT) > 0
+ # We don't support removing the last Hash argument
+ return CantCompile
+ end
+ end
+
+ def assert_equal(left, right)
if left != right
raise "'#{left.inspect}' was not '#{right.inspect}'"
end
diff --git a/lib/ruby_vm/mjit/invariants.rb b/lib/ruby_vm/mjit/invariants.rb
index c9d9b9fb6a..fdd39ba466 100644
--- a/lib/ruby_vm/mjit/invariants.rb
+++ b/lib/ruby_vm/mjit/invariants.rb
@@ -2,12 +2,22 @@ require 'set'
module RubyVM::MJIT
class Invariants
+ # @param cb [CodeBlock]
# @param ocb [CodeBlock]
# @param exit_compiler [RubyVM::MJIT::ExitCompiler]
- def initialize(ocb, exit_compiler)
+ def initialize(cb, ocb, exit_compiler)
+ @cb = cb
@ocb = ocb
@exit_compiler = exit_compiler
@bop_blocks = Set.new # TODO: actually invalidate this
+ @cme_blocks = Hash.new { |h, k| h[k] = Set.new }
+
+ invariants = self
+ hooks = Module.new
+ hooks.define_method(:on_cme_invalidate) do |cme|
+ invariants.on_cme_invalidate(cme)
+ end
+ Hooks.singleton_class.prepend(hooks)
end
# @param jit [RubyVM::MJIT::JITState]
@@ -21,6 +31,22 @@ module RubyVM::MJIT
true
end
+ # @param jit [RubyVM::MJIT::JITState]
+ def assume_method_lookup_stable(jit, cme)
+ ensure_block_entry_exit(jit.block, cause: 'assume_method_lookup_stable')
+ @cme_blocks[cme.to_i] << jit.block
+ end
+
+ def on_cme_invalidate(cme)
+ @cme_blocks.fetch(cme.to_i, []).each do |block|
+ @cb.with_write_addr(block.start_addr) do
+ asm = Assembler.new
+ asm.jmp(block.entry_exit)
+ @cb.write(asm)
+ end
+ end
+ end
+
private
# @param block [RubyVM::MJIT::Block]
diff --git a/mjit.c b/mjit.c
index a63e09d385..76e249de62 100644
--- a/mjit.c
+++ b/mjit.c
@@ -67,6 +67,8 @@ bool mjit_stats_enabled = false;
// true if JIT-ed code should be called. When `ruby_vm_event_enabled_global_flags & ISEQ_TRACE_EVENTS`
// and `mjit_call_p == false`, any JIT-ed code execution is cancelled as soon as possible.
bool mjit_call_p = false;
+// A flag to communicate that mjit_call_p should be disabled while it's temporarily false.
+bool mjit_cancel_p = false;
#include "mjit_config.h"
@@ -101,7 +103,14 @@ mjit_capture_cc_entries(const struct rb_iseq_constant_body *compiled_iseq, const
void
mjit_cancel_all(const char *reason)
{
- // TODO: remove this
+ if (!mjit_enabled)
+ return;
+
+ mjit_call_p = false;
+ mjit_cancel_p = true;
+ if (mjit_opts.warnings || mjit_opts.verbose) {
+ fprintf(stderr, "JIT cancel: Disabled JIT-ed code because %s\n", reason);
+ }
}
void
@@ -132,6 +141,8 @@ static VALUE rb_MJITCompiler = 0;
static VALUE rb_cMJITIseqPtr = 0;
// RubyVM::MJIT::CPointer::Struct_rb_control_frame_t
static VALUE rb_cMJITCfpPtr = 0;
+// RubyVM::MJIT::Hooks
+static VALUE rb_mMJITHooks = 0;
void
rb_mjit_add_iseq_to_process(const rb_iseq_t *iseq)
@@ -305,6 +316,14 @@ rb_mjit_collect_vm_usage_insn(int insn)
#endif // YJIT_STATS
+#define WITH_MJIT_DISABLED(stmt) do { \
+ bool original_call_p = mjit_call_p; \
+ mjit_call_p = false; \
+ stmt; \
+ mjit_call_p = original_call_p; \
+ if (mjit_cancel_p) mjit_call_p = false; \
+} while (0);
+
void
rb_mjit_bop_redefined(int redefined_flag, enum ruby_basic_operators bop)
{
@@ -313,6 +332,17 @@ rb_mjit_bop_redefined(int redefined_flag, enum ruby_basic_operators bop)
}
void
+rb_mjit_cme_invalidate(rb_callable_method_entry_t *cme)
+{
+ if (!mjit_enabled || !mjit_call_p || !rb_mMJITHooks) return;
+ WITH_MJIT_DISABLED({
+ VALUE cme_klass = rb_funcall(rb_mMJITC, rb_intern("rb_callable_method_entry_struct"), 0);
+ VALUE cme_ptr = rb_funcall(cme_klass, rb_intern("new"), 1, SIZET2NUM((size_t)cme));
+ rb_funcall(rb_mMJITHooks, rb_intern("on_cme_invalidate"), 1, cme_ptr);
+ });
+}
+
+void
rb_mjit_before_ractor_spawn(void)
{
if (!mjit_call_p) return;
@@ -434,6 +464,7 @@ mjit_init(const struct mjit_options *opts)
SIZET2NUM((size_t)rb_mjit_mem_block), UINT2NUM(MJIT_CODE_SIZE));
rb_cMJITIseqPtr = rb_funcall(rb_mMJITC, rb_intern("rb_iseq_t"), 0);
rb_cMJITCfpPtr = rb_funcall(rb_mMJITC, rb_intern("rb_control_frame_t"), 0);
+ rb_mMJITHooks = rb_const_get(rb_mMJIT, rb_intern("Hooks"));
mjit_call_p = true;
mjit_stats_p = mjit_opts.stats;
diff --git a/mjit.rb b/mjit.rb
index 712b508ace..695c56fad1 100644
--- a/mjit.rb
+++ b/mjit.rb
@@ -29,5 +29,6 @@ if RubyVM::MJIT.enabled?
require 'ruby_vm/mjit/c_type'
require 'ruby_vm/mjit/compiler'
+ require 'ruby_vm/mjit/hooks'
require 'ruby_vm/mjit/stats'
end
diff --git a/mjit_c.c b/mjit_c.c
index 596e1eb544..89b0b7a1d0 100644
--- a/mjit_c.c
+++ b/mjit_c.c
@@ -86,6 +86,8 @@ mjit_enabled_p(rb_execution_context_t *ec, VALUE self)
return RBOOL(mjit_enabled);
}
+extern bool rb_simple_iseq_p(const rb_iseq_t *iseq);
+
#include "mjit_c.rbinc"
#endif // USE_MJIT
diff --git a/mjit_c.rb b/mjit_c.rb
index 0282abbbda..c08c888196 100644
--- a/mjit_c.rb
+++ b/mjit_c.rb
@@ -96,6 +96,11 @@ module RubyVM::MJIT # :nodoc: all
Primitive.cexpr! 'UINT2NUM(METHOD_ENTRY_VISI((const rb_callable_method_entry_t *)NUM2SIZET(_cme_addr)))'
end
+ def rb_simple_iseq_p(iseq)
+ _iseq_addr = iseq.to_i
+ Primitive.cexpr! 'RBOOL(rb_simple_iseq_p((rb_iseq_t *)NUM2SIZET(_iseq_addr)))'
+ end
+
#========================================================================================
#
# Old stuff
@@ -177,7 +182,6 @@ module RubyVM::MJIT # :nodoc: all
_cc_addr = cc_ptr.to_i
_iseq_addr = iseq_ptr.to_i
Primitive.cstmt! %q{
- extern bool rb_simple_iseq_p(const rb_iseq_t *iseq);
CALL_INFO ci = (CALL_INFO)NUM2PTR(_ci_addr);
CALL_CACHE cc = (CALL_CACHE)NUM2PTR(_cc_addr);
const rb_iseq_t *iseq = (rb_iseq_t *)NUM2PTR(_iseq_addr);
@@ -325,6 +329,10 @@ module RubyVM::MJIT # :nodoc: all
Primitive.cexpr! %q{ UINT2NUM(VM_CALL_FCALL) }
end
+ def C.VM_CALL_KWARG
+ Primitive.cexpr! %q{ UINT2NUM(VM_CALL_KWARG) }
+ end
+
def C.VM_CALL_KW_SPLAT
Primitive.cexpr! %q{ UINT2NUM(VM_CALL_KW_SPLAT) }
end
diff --git a/tool/mjit/bindgen.rb b/tool/mjit/bindgen.rb
index 4e1a29cb89..109ad15761 100755
--- a/tool/mjit/bindgen.rb
+++ b/tool/mjit/bindgen.rb
@@ -365,6 +365,7 @@ generator = BindingGenerator.new(
VM_CALL_ARGS_BLOCKARG
VM_CALL_ARGS_SPLAT
VM_CALL_FCALL
+ VM_CALL_KWARG
VM_CALL_KW_SPLAT
VM_CALL_KW_SPLAT_bit
VM_CALL_TAILCALL
diff --git a/vm_method.c b/vm_method.c
index 82c29624c1..4cb96cc7fd 100644
--- a/vm_method.c
+++ b/vm_method.c
@@ -124,6 +124,7 @@ vm_cme_invalidate(rb_callable_method_entry_t *cme)
RB_DEBUG_COUNTER_INC(cc_cme_invalidate);
rb_yjit_cme_invalidate(cme);
+ rb_mjit_cme_invalidate(cme);
}
static int
@@ -188,6 +189,7 @@ clear_method_cache_by_id_in_class(VALUE klass, ID mid)
if (cc_tbl && rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
rb_yjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
+ rb_mjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
if (NIL_P(ccs->cme->owner)) invalidate_negative_cache(mid);
rb_vm_ccs_free(ccs);
rb_id_table_delete(cc_tbl, mid);
@@ -201,6 +203,9 @@ clear_method_cache_by_id_in_class(VALUE klass, ID mid)
if (rb_yjit_enabled_p() && rb_id_table_lookup(cm_tbl, mid, &cme)) {
rb_yjit_cme_invalidate((rb_callable_method_entry_t *)cme);
}
+ if (mjit_enabled && rb_id_table_lookup(cm_tbl, mid, &cme)) {
+ rb_mjit_cme_invalidate((rb_callable_method_entry_t *)cme);
+ }
rb_id_table_delete(cm_tbl, mid);
RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_callable);
}