diff options
Diffstat (limited to 'deps/v8/src/builtins')
-rw-r--r-- | deps/v8/src/builtins/arm/builtins-arm.cc | 13 | ||||
-rw-r--r-- | deps/v8/src/builtins/arm64/builtins-arm64.cc | 13 | ||||
-rw-r--r-- | deps/v8/src/builtins/ia32/builtins-ia32.cc | 15 | ||||
-rw-r--r-- | deps/v8/src/builtins/mips/builtins-mips.cc | 13 | ||||
-rw-r--r-- | deps/v8/src/builtins/mips64/builtins-mips64.cc | 13 | ||||
-rw-r--r-- | deps/v8/src/builtins/ppc/builtins-ppc.cc | 14 | ||||
-rw-r--r-- | deps/v8/src/builtins/s390/builtins-s390.cc | 13 | ||||
-rw-r--r-- | deps/v8/src/builtins/x64/builtins-x64.cc | 13 |
8 files changed, 24 insertions, 83 deletions
diff --git a/deps/v8/src/builtins/arm/builtins-arm.cc b/deps/v8/src/builtins/arm/builtins-arm.cc index bf359d69e9..e8fa690660 100644 --- a/deps/v8/src/builtins/arm/builtins-arm.cc +++ b/deps/v8/src/builtins/arm/builtins-arm.cc @@ -782,22 +782,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, Runtime::kCompileOptimized_Concurrent); { - // Otherwise, the marker is InOptimizationQueue. + // Otherwise, the marker is InOptimizationQueue, so fall through hoping + // that an interrupt will eventually update the slot with optimized code. if (FLAG_debug_code) { __ cmp( optimized_code_entry, Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue))); __ Assert(eq, kExpectedOptimizationSentinel); } - // Checking whether the queued function is ready for install is - // optional, since we come across interrupts and stack checks elsewhere. - // However, not checking may delay installing ready functions, and - // always checking would be quite expensive. A good compromise is to - // first check against stack limit as a cue for an interrupt signal. - __ LoadRoot(scratch2, Heap::kStackLimitRootIndex); - __ cmp(sp, Operand(scratch2)); - __ b(hs, &fallthrough); - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); + __ jmp(&fallthrough); } } diff --git a/deps/v8/src/builtins/arm64/builtins-arm64.cc b/deps/v8/src/builtins/arm64/builtins-arm64.cc index b1d5d32b9a..7aaa2d0003 100644 --- a/deps/v8/src/builtins/arm64/builtins-arm64.cc +++ b/deps/v8/src/builtins/arm64/builtins-arm64.cc @@ -788,22 +788,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, Runtime::kCompileOptimized_Concurrent); { - // Otherwise, the marker is InOptimizationQueue. + // Otherwise, the marker is InOptimizationQueue, so fall through hoping + // that an interrupt will eventually update the slot with optimized code. if (FLAG_debug_code) { __ Cmp( optimized_code_entry, Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue))); __ Assert(eq, kExpectedOptimizationSentinel); } - - // Checking whether the queued function is ready for install is optional, - // since we come across interrupts and stack checks elsewhere. However, - // not checking may delay installing ready functions, and always checking - // would be quite expensive. A good compromise is to first check against - // stack limit as a cue for an interrupt signal. - __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex); - __ B(hs, &fallthrough); - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); + __ B(&fallthrough); } } diff --git a/deps/v8/src/builtins/ia32/builtins-ia32.cc b/deps/v8/src/builtins/ia32/builtins-ia32.cc index ee15025520..a689c3131d 100644 --- a/deps/v8/src/builtins/ia32/builtins-ia32.cc +++ b/deps/v8/src/builtins/ia32/builtins-ia32.cc @@ -698,24 +698,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, Runtime::kCompileOptimized_Concurrent); { - // Otherwise, the marker is InOptimizationQueue. + // Otherwise, the marker is InOptimizationQueue, so fall through hoping + // that an interrupt will eventually update the slot with optimized code. if (FLAG_debug_code) { __ cmp( optimized_code_entry, Immediate(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue))); __ Assert(equal, kExpectedOptimizationSentinel); } - - // Checking whether the queued function is ready for install is optional, - // since we come across interrupts and stack checks elsewhere. However, - // not checking may delay installing ready functions, and always checking - // would be quite expensive. A good compromise is to first check against - // stack limit as a cue for an interrupt signal. - ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(masm->isolate()); - __ cmp(esp, Operand::StaticVariable(stack_limit)); - __ j(above_equal, &fallthrough); - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); + __ jmp(&fallthrough); } } diff --git a/deps/v8/src/builtins/mips/builtins-mips.cc b/deps/v8/src/builtins/mips/builtins-mips.cc index e8f846c10a..4835fb0b1b 100644 --- a/deps/v8/src/builtins/mips/builtins-mips.cc +++ b/deps/v8/src/builtins/mips/builtins-mips.cc @@ -760,21 +760,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, Runtime::kCompileOptimized_Concurrent); { - // Otherwise, the marker is InOptimizationQueue. + // Otherwise, the marker is InOptimizationQueue, so fall through hoping + // that an interrupt will eventually update the slot with optimized code. if (FLAG_debug_code) { __ Assert( eq, kExpectedOptimizationSentinel, optimized_code_entry, Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue))); } - - // Checking whether the queued function is ready for install is optional, - // since we come across interrupts and stack checks elsewhere. However, - // not checking may delay installing ready functions, and always checking - // would be quite expensive. A good compromise is to first check against - // stack limit as a cue for an interrupt signal. - __ LoadRoot(at, Heap::kStackLimitRootIndex); - __ Branch(&fallthrough, hs, sp, Operand(at)); - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); + __ jmp(&fallthrough); } } diff --git a/deps/v8/src/builtins/mips64/builtins-mips64.cc b/deps/v8/src/builtins/mips64/builtins-mips64.cc index f62750b061..2584444f1f 100644 --- a/deps/v8/src/builtins/mips64/builtins-mips64.cc +++ b/deps/v8/src/builtins/mips64/builtins-mips64.cc @@ -760,21 +760,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, Runtime::kCompileOptimized_Concurrent); { - // Otherwise, the marker is InOptimizationQueue. + // Otherwise, the marker is InOptimizationQueue, so fall through hoping + // that an interrupt will eventually update the slot with optimized code. if (FLAG_debug_code) { __ Assert( eq, kExpectedOptimizationSentinel, optimized_code_entry, Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue))); } - - // Checking whether the queued function is ready for install is optional, - // since we come across interrupts and stack checks elsewhere. However, - // not checking may delay installing ready functions, and always checking - // would be quite expensive. A good compromise is to first check against - // stack limit as a cue for an interrupt signal. - __ LoadRoot(t0, Heap::kStackLimitRootIndex); - __ Branch(&fallthrough, hs, sp, Operand(t0)); - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); + __ jmp(&fallthrough); } } diff --git a/deps/v8/src/builtins/ppc/builtins-ppc.cc b/deps/v8/src/builtins/ppc/builtins-ppc.cc index 3ed3eb686d..c242be5cf8 100644 --- a/deps/v8/src/builtins/ppc/builtins-ppc.cc +++ b/deps/v8/src/builtins/ppc/builtins-ppc.cc @@ -780,23 +780,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, Runtime::kCompileOptimized_Concurrent); { - // Otherwise, the marker is InOptimizationQueue. + // Otherwise, the marker is InOptimizationQueue, so fall through hoping + // that an interrupt will eventually update the slot with optimized code. if (FLAG_debug_code) { __ CmpSmiLiteral( optimized_code_entry, Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0); __ Assert(eq, kExpectedOptimizationSentinel); } - - // Checking whether the queued function is ready for install is optional, - // since we come across interrupts and stack checks elsewhere. However, - // not checking may delay installing ready functions, and always checking - // would be quite expensive. A good compromise is to first check against - // stack limit as a cue for an interrupt signal. - __ LoadRoot(ip, Heap::kStackLimitRootIndex); - __ cmpl(sp, ip); - __ bge(&fallthrough); - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); + __ b(&fallthrough); } } diff --git a/deps/v8/src/builtins/s390/builtins-s390.cc b/deps/v8/src/builtins/s390/builtins-s390.cc index e9ef390c69..aa9e62f217 100644 --- a/deps/v8/src/builtins/s390/builtins-s390.cc +++ b/deps/v8/src/builtins/s390/builtins-s390.cc @@ -783,22 +783,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, Runtime::kCompileOptimized_Concurrent); { - // Otherwise, the marker is InOptimizationQueue. + // Otherwise, the marker is InOptimizationQueue, so fall through hoping + // that an interrupt will eventually update the slot with optimized code. if (FLAG_debug_code) { __ CmpSmiLiteral( optimized_code_entry, Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0); __ Assert(eq, kExpectedOptimizationSentinel); } - - // Checking whether the queued function is ready for install is optional, - // since we come across interrupts and stack checks elsewhere. However, - // not checking may delay installing ready functions, and always checking - // would be quite expensive. A good compromise is to first check against - // stack limit as a cue for an interrupt signal. - __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex)); - __ bge(&fallthrough, Label::kNear); - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); + __ b(&fallthrough, Label::kNear); } } diff --git a/deps/v8/src/builtins/x64/builtins-x64.cc b/deps/v8/src/builtins/x64/builtins-x64.cc index 713475cd34..81c92681d5 100644 --- a/deps/v8/src/builtins/x64/builtins-x64.cc +++ b/deps/v8/src/builtins/x64/builtins-x64.cc @@ -781,21 +781,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, Runtime::kCompileOptimized_Concurrent); { - // Otherwise, the marker is InOptimizationQueue. + // Otherwise, the marker is InOptimizationQueue, so fall through hoping + // that an interrupt will eventually update the slot with optimized code. if (FLAG_debug_code) { __ SmiCompare(optimized_code_entry, Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)); __ Assert(equal, kExpectedOptimizationSentinel); } - - // Checking whether the queued function is ready for install is optional, - // since we come across interrupts and stack checks elsewhere. However, - // not checking may delay installing ready functions, and always checking - // would be quite expensive. A good compromise is to first check against - // stack limit as a cue for an interrupt signal. - __ CompareRoot(rsp, Heap::kStackLimitRootIndex); - __ j(above_equal, &fallthrough); - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); + __ jmp(&fallthrough); } } |