summaryrefslogtreecommitdiff
path: root/chromium/v8
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/v8')
-rw-r--r--chromium/v8/build/toolchain.gypi8
-rw-r--r--chromium/v8/src/arm/lithium-arm.cc44
-rw-r--r--chromium/v8/src/arm/lithium-arm.h6
-rw-r--r--chromium/v8/src/arm/lithium-codegen-arm.cc8
-rw-r--r--chromium/v8/src/arm/lithium-codegen-arm.h1
-rw-r--r--chromium/v8/src/arraybuffer.js11
-rw-r--r--chromium/v8/src/cpu.cc4
-rw-r--r--chromium/v8/src/deoptimizer.cc24
-rw-r--r--chromium/v8/src/heap.cc3
-rw-r--r--chromium/v8/src/hydrogen-instructions.cc16
-rw-r--r--chromium/v8/src/hydrogen-instructions.h59
-rw-r--r--chromium/v8/src/hydrogen.cc3
-rw-r--r--chromium/v8/src/ia32/lithium-codegen-ia32.cc4
-rw-r--r--chromium/v8/src/ia32/lithium-ia32.cc45
-rw-r--r--chromium/v8/src/ia32/lithium-ia32.h6
-rw-r--r--chromium/v8/src/ic.cc5
-rw-r--r--chromium/v8/src/mips/lithium-codegen-mips.cc7
-rw-r--r--chromium/v8/src/mips/lithium-codegen-mips.h1
-rw-r--r--chromium/v8/src/mips/lithium-mips.cc44
-rw-r--r--chromium/v8/src/mips/lithium-mips.h6
-rw-r--r--chromium/v8/src/runtime.cc4
-rw-r--r--chromium/v8/src/safepoint-table.h3
-rw-r--r--chromium/v8/src/serialize.h2
-rw-r--r--chromium/v8/src/typedarray.js4
-rw-r--r--chromium/v8/src/version.cc2
-rw-r--r--chromium/v8/src/x64/lithium-codegen-x64.cc58
-rw-r--r--chromium/v8/src/x64/lithium-codegen-x64.h1
-rw-r--r--chromium/v8/src/x64/lithium-x64.cc49
-rw-r--r--chromium/v8/src/x64/lithium-x64.h12
29 files changed, 243 insertions, 197 deletions
diff --git a/chromium/v8/build/toolchain.gypi b/chromium/v8/build/toolchain.gypi
index 2dc3c04e1f1..99f357a965a 100644
--- a/chromium/v8/build/toolchain.gypi
+++ b/chromium/v8/build/toolchain.gypi
@@ -444,7 +444,7 @@
['v8_optimized_debug==0', {
'Optimization': '0',
'conditions': [
- ['component=="shared_library" or use_qt==1', {
+ ['component=="shared_library"', {
'RuntimeLibrary': '3', # /MDd
}, {
'RuntimeLibrary': '1', # /MTd
@@ -459,7 +459,7 @@
'StringPooling': 'true',
'BasicRuntimeChecks': '0',
'conditions': [
- ['component=="shared_library" or use_qt==1', {
+ ['component=="shared_library"', {
'RuntimeLibrary': '3', # /MDd
}, {
'RuntimeLibrary': '1', # /MTd
@@ -474,7 +474,7 @@
'StringPooling': 'true',
'BasicRuntimeChecks': '0',
'conditions': [
- ['component=="shared_library" or use_qt==1', {
+ ['component=="shared_library"', {
'RuntimeLibrary': '3', #/MDd
}, {
'RuntimeLibrary': '1', #/MTd
@@ -666,7 +666,7 @@
'FavorSizeOrSpeed': '0',
'StringPooling': 'true',
'conditions': [
- ['component=="shared_library" or use_qt==1', {
+ ['component=="shared_library"', {
'RuntimeLibrary': '2', #/MD
}, {
'RuntimeLibrary': '0', #/MT
diff --git a/chromium/v8/src/arm/lithium-arm.cc b/chromium/v8/src/arm/lithium-arm.cc
index 6119b248810..0b7a02eb664 100644
--- a/chromium/v8/src/arm/lithium-arm.cc
+++ b/chromium/v8/src/arm/lithium-arm.cc
@@ -631,15 +631,6 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
instr->MarkAsCall();
instr = AssignPointerMap(instr);
- if (hinstr->HasObservableSideEffects()) {
- ASSERT(hinstr->next()->IsSimulate());
- HSimulate* sim = HSimulate::cast(hinstr->next());
- ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_.IsNone());
- instruction_pending_deoptimization_environment_ = instr;
- pending_deoptimization_ast_id_ = sim->ast_id();
- }
-
// If instruction does not have side-effects lazy deoptimization
// after the call will try to deoptimize to the point before the call.
// Thus we still need to attach environment to this call even if
@@ -921,6 +912,26 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) {
instr = AssignEnvironment(instr);
}
chunk_->AddInstruction(instr, current_block_);
+
+ if (instr->IsCall()) {
+ HValue* hydrogen_value_for_lazy_bailout = current;
+ LInstruction* instruction_needing_environment = NULL;
+ if (current->HasObservableSideEffects()) {
+ HSimulate* sim = HSimulate::cast(current->next());
+ instruction_needing_environment = instr;
+ sim->ReplayEnvironment(current_block_->last_environment());
+ hydrogen_value_for_lazy_bailout = sim;
+ }
+ LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
+ bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
+ chunk_->AddInstruction(bailout, current_block_);
+ if (instruction_needing_environment != NULL) {
+ // Store the lazy deopt environment with the instruction if needed.
+ // Right now it is only used for LInstanceOfKnownGlobal.
+ instruction_needing_environment->
+ SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
+ }
+ }
}
current_instruction_ = old_current;
}
@@ -2579,21 +2590,6 @@ LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instr->ReplayEnvironment(current_block_->last_environment());
-
- // If there is an instruction pending deoptimization environment create a
- // lazy bailout instruction to capture the environment.
- if (pending_deoptimization_ast_id_ == instr->ast_id()) {
- LInstruction* result = new(zone()) LLazyBailout;
- result = AssignEnvironment(result);
- // Store the lazy deopt environment with the instruction if needed. Right
- // now it is only used for LInstanceOfKnownGlobal.
- instruction_pending_deoptimization_environment_->
- SetDeferredLazyDeoptimizationEnvironment(result->environment());
- instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = BailoutId::None();
- return result;
- }
-
return NULL;
}
diff --git a/chromium/v8/src/arm/lithium-arm.h b/chromium/v8/src/arm/lithium-arm.h
index cfafc0645a0..4f420a3d262 100644
--- a/chromium/v8/src/arm/lithium-arm.h
+++ b/chromium/v8/src/arm/lithium-arm.h
@@ -2753,9 +2753,7 @@ class LChunkBuilder V8_FINAL BASE_EMBEDDED {
next_block_(NULL),
argument_count_(0),
allocator_(allocator),
- position_(RelocInfo::kNoPosition),
- instruction_pending_deoptimization_environment_(NULL),
- pending_deoptimization_ast_id_(BailoutId::None()) { }
+ position_(RelocInfo::kNoPosition) { }
// Build the sequence for the graph.
LPlatformChunk* Build();
@@ -2908,8 +2906,6 @@ class LChunkBuilder V8_FINAL BASE_EMBEDDED {
int argument_count_;
LAllocator* allocator_;
int position_;
- LInstruction* instruction_pending_deoptimization_environment_;
- BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};
diff --git a/chromium/v8/src/arm/lithium-codegen-arm.cc b/chromium/v8/src/arm/lithium-codegen-arm.cc
index 56990ca2284..0a3f043bc76 100644
--- a/chromium/v8/src/arm/lithium-codegen-arm.cc
+++ b/chromium/v8/src/arm/lithium-codegen-arm.cc
@@ -259,6 +259,13 @@ void LCodeGen::GenerateOsrPrologue() {
}
+void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
+ if (!instr->IsLazyBailout() && !instr->IsGap()) {
+ safepoints_.BumpLastLazySafepointIndex();
+ }
+}
+
+
bool LCodeGen::GenerateDeferredCode() {
ASSERT(is_generating());
if (deferred_.length() > 0) {
@@ -2190,7 +2197,6 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
// is in the correct position.
Assembler::BlockConstPoolScope block_const_pool(masm());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
- __ nop(); // Signals no inlined code.
}
diff --git a/chromium/v8/src/arm/lithium-codegen-arm.h b/chromium/v8/src/arm/lithium-codegen-arm.h
index 3f2ba35899a..de27a36fdcb 100644
--- a/chromium/v8/src/arm/lithium-codegen-arm.h
+++ b/chromium/v8/src/arm/lithium-codegen-arm.h
@@ -191,6 +191,7 @@ class LCodeGen: public LCodeGenBase {
// Code generation passes. Returns true if code generation should
// continue.
+ void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
bool GeneratePrologue();
bool GenerateDeferredCode();
bool GenerateDeoptJumpTable();
diff --git a/chromium/v8/src/arraybuffer.js b/chromium/v8/src/arraybuffer.js
index 6125f0f61cb..cfaa8d7efca 100644
--- a/chromium/v8/src/arraybuffer.js
+++ b/chromium/v8/src/arraybuffer.js
@@ -57,17 +57,18 @@ function ArrayBufferSlice(start, end) {
var relativeStart = TO_INTEGER(start);
var first;
+ var byte_length = %ArrayBufferGetByteLength(this);
if (relativeStart < 0) {
- first = MathMax(this.byteLength + relativeStart, 0);
+ first = MathMax(byte_length + relativeStart, 0);
} else {
- first = MathMin(relativeStart, this.byteLength);
+ first = MathMin(relativeStart, byte_length);
}
- var relativeEnd = IS_UNDEFINED(end) ? this.byteLength : TO_INTEGER(end);
+ var relativeEnd = IS_UNDEFINED(end) ? byte_length : TO_INTEGER(end);
var fin;
if (relativeEnd < 0) {
- fin = MathMax(this.byteLength + relativeEnd, 0);
+ fin = MathMax(byte_length + relativeEnd, 0);
} else {
- fin = MathMin(relativeEnd, this.byteLength);
+ fin = MathMin(relativeEnd, byte_length);
}
if (fin < first) {
diff --git a/chromium/v8/src/cpu.cc b/chromium/v8/src/cpu.cc
index e67b397f917..2bf51a7f6c0 100644
--- a/chromium/v8/src/cpu.cc
+++ b/chromium/v8/src/cpu.cc
@@ -380,12 +380,10 @@ CPU::CPU() : stepping_(0),
// ARMv6 device that reports architecture 7.
if (architecture_ == 7) {
char* processor = cpu_info.ExtractField("Processor");
- char* model_name = cpu_info.ExtractField("model name");
- if (HasListItem(processor, "(v6l)") || HasListItem(model_name, "(v6l)")) {
+ if (HasListItem(processor, "(v6l)")) {
architecture_ = 6;
}
delete[] processor;
- delete[] model_name;
}
}
diff --git a/chromium/v8/src/deoptimizer.cc b/chromium/v8/src/deoptimizer.cc
index 76f2fa9bd76..6c3100a6381 100644
--- a/chromium/v8/src/deoptimizer.cc
+++ b/chromium/v8/src/deoptimizer.cc
@@ -393,9 +393,33 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
element = next;
}
+#ifdef DEBUG
+ // Make sure all activations of optimized code can deopt at their current PC.
+ for (StackFrameIterator it(isolate, isolate->thread_local_top());
+ !it.done(); it.Advance()) {
+ StackFrame::Type type = it.frame()->type();
+ if (type == StackFrame::OPTIMIZED) {
+ Code* code = it.frame()->LookupCode();
+ if (FLAG_trace_deopt) {
+ JSFunction* function =
+ static_cast<OptimizedFrame*>(it.frame())->function();
+ CodeTracer::Scope scope(isolate->GetCodeTracer());
+ PrintF(scope.file(), "[deoptimizer patches for lazy deopt: ");
+ function->PrintName(scope.file());
+ PrintF(scope.file(),
+ " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
+ }
+ SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
+ int deopt_index = safepoint.deoptimization_index();
+ CHECK(deopt_index != Safepoint::kNoDeoptimizationIndex);
+ }
+ }
+#endif
+
// TODO(titzer): we need a handle scope only because of the macro assembler,
// which is only used in EnsureCodeForDeoptimizationEntry.
HandleScope scope(isolate);
+
// Now patch all the codes for deoptimization.
for (int i = 0; i < codes.length(); i++) {
// It is finally time to die, code object.
diff --git a/chromium/v8/src/heap.cc b/chromium/v8/src/heap.cc
index 1e9091b30d8..d9dc8c19b07 100644
--- a/chromium/v8/src/heap.cc
+++ b/chromium/v8/src/heap.cc
@@ -561,6 +561,9 @@ void Heap::GarbageCollectionEpilogue() {
if (FLAG_code_stats) ReportCodeStatistics("After GC");
#endif
if (FLAG_deopt_every_n_garbage_collections > 0) {
+ // TODO(jkummerow/ulan/jarin): This is not safe! We can't assume that
+ // the topmost optimized frame can be deoptimized safely, because it
+ // might not have a lazy bailout point right after its current PC.
if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) {
Deoptimizer::DeoptimizeAll(isolate());
gcs_since_last_deopt_ = 0;
diff --git a/chromium/v8/src/hydrogen-instructions.cc b/chromium/v8/src/hydrogen-instructions.cc
index d418954aad2..6bf662a638f 100644
--- a/chromium/v8/src/hydrogen-instructions.cc
+++ b/chromium/v8/src/hydrogen-instructions.cc
@@ -2436,6 +2436,7 @@ void HSimulate::PrintDataTo(StringStream* stream) {
void HSimulate::ReplayEnvironment(HEnvironment* env) {
+ if (done_with_replay_) return;
ASSERT(env != NULL);
env->set_ast_id(ast_id());
env->Drop(pop_count());
@@ -2447,6 +2448,7 @@ void HSimulate::ReplayEnvironment(HEnvironment* env) {
env->Push(value);
}
}
+ done_with_replay_ = true;
}
@@ -2572,7 +2574,11 @@ HConstant::HConstant(int32_t integer_value,
boolean_value_(integer_value != 0),
int32_value_(integer_value),
double_value_(FastI2D(integer_value)) {
- set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber());
+ // It's possible to create a constant with a value in Smi-range but stored
+ // in a (pre-existing) HeapNumber. See crbug.com/349878.
+ bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
+ bool is_smi = has_smi_value_ && !could_be_heapobject;
+ set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
Initialize(r);
}
@@ -2592,7 +2598,11 @@ HConstant::HConstant(double double_value,
int32_value_(DoubleToInt32(double_value)),
double_value_(double_value) {
has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
- set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber());
+ // It's possible to create a constant with a value in Smi-range but stored
+ // in a (pre-existing) HeapNumber. See crbug.com/349878.
+ bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
+ bool is_smi = has_smi_value_ && !could_be_heapobject;
+ set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
Initialize(r);
}
@@ -2991,7 +3001,7 @@ void HCompareObjectEqAndBranch::PrintDataTo(StringStream* stream) {
bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
if (left()->IsConstant() && right()->IsConstant()) {
bool comparison_result =
- HConstant::cast(left())->Equals(HConstant::cast(right()));
+ HConstant::cast(left())->DataEquals(HConstant::cast(right()));
*block = comparison_result
? FirstSuccessor()
: SecondSuccessor();
diff --git a/chromium/v8/src/hydrogen-instructions.h b/chromium/v8/src/hydrogen-instructions.h
index cf83928529c..158e4c103ad 100644
--- a/chromium/v8/src/hydrogen-instructions.h
+++ b/chromium/v8/src/hydrogen-instructions.h
@@ -1802,7 +1802,8 @@ class HSimulate V8_FINAL : public HInstruction {
values_(2, zone),
assigned_indexes_(2, zone),
zone_(zone),
- removable_(removable) {}
+ removable_(removable),
+ done_with_replay_(false) {}
~HSimulate() {}
virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE;
@@ -1885,7 +1886,8 @@ class HSimulate V8_FINAL : public HInstruction {
ZoneList<HValue*> values_;
ZoneList<int> assigned_indexes_;
Zone* zone_;
- RemovableSimulate removable_;
+ RemovableSimulate removable_ : 2;
+ bool done_with_replay_ : 1;
#ifdef DEBUG
Handle<JSFunction> closure_;
@@ -3657,15 +3659,6 @@ class HConstant V8_FINAL : public HTemplateInstruction<0> {
return object_;
}
-#ifdef DEBUG
- virtual void Verify() V8_OVERRIDE { }
-#endif
-
- DECLARE_CONCRETE_INSTRUCTION(Constant)
-
- protected:
- virtual Range* InferRange(Zone* zone) V8_OVERRIDE;
-
virtual bool DataEquals(HValue* other) V8_OVERRIDE {
HConstant* other_constant = HConstant::cast(other);
if (has_int32_value_) {
@@ -3690,6 +3683,15 @@ class HConstant V8_FINAL : public HTemplateInstruction<0> {
}
}
+#ifdef DEBUG
+ virtual void Verify() V8_OVERRIDE { }
+#endif
+
+ DECLARE_CONCRETE_INSTRUCTION(Constant)
+
+ protected:
+ virtual Range* InferRange(Zone* zone) V8_OVERRIDE;
+
private:
friend class HGraph;
HConstant(Handle<Object> handle, Representation r = Representation::None());
@@ -4335,24 +4337,6 @@ class HCompareMinusZeroAndBranch V8_FINAL : public HUnaryControlInstruction {
class HCompareObjectEqAndBranch : public HTemplateControlInstruction<2, 2> {
public:
- HCompareObjectEqAndBranch(HValue* left,
- HValue* right,
- HBasicBlock* true_target = NULL,
- HBasicBlock* false_target = NULL) {
- // TODO(danno): make this private when the IfBuilder properly constructs
- // control flow instructions.
- ASSERT(!left->IsConstant() ||
- (!HConstant::cast(left)->HasInteger32Value() ||
- HConstant::cast(left)->HasSmiValue()));
- ASSERT(!right->IsConstant() ||
- (!HConstant::cast(right)->HasInteger32Value() ||
- HConstant::cast(right)->HasSmiValue()));
- SetOperandAt(0, left);
- SetOperandAt(1, right);
- SetSuccessorAt(0, true_target);
- SetSuccessorAt(1, false_target);
- }
-
DECLARE_INSTRUCTION_FACTORY_P2(HCompareObjectEqAndBranch, HValue*, HValue*);
DECLARE_INSTRUCTION_FACTORY_P4(HCompareObjectEqAndBranch, HValue*, HValue*,
HBasicBlock*, HBasicBlock*);
@@ -4373,6 +4357,23 @@ class HCompareObjectEqAndBranch : public HTemplateControlInstruction<2, 2> {
}
DECLARE_CONCRETE_INSTRUCTION(CompareObjectEqAndBranch)
+
+ private:
+ HCompareObjectEqAndBranch(HValue* left,
+ HValue* right,
+ HBasicBlock* true_target = NULL,
+ HBasicBlock* false_target = NULL) {
+ ASSERT(!left->IsConstant() ||
+ (!HConstant::cast(left)->HasInteger32Value() ||
+ HConstant::cast(left)->HasSmiValue()));
+ ASSERT(!right->IsConstant() ||
+ (!HConstant::cast(right)->HasInteger32Value() ||
+ HConstant::cast(right)->HasSmiValue()));
+ SetOperandAt(0, left);
+ SetOperandAt(1, right);
+ SetSuccessorAt(0, true_target);
+ SetSuccessorAt(1, false_target);
+ }
};
diff --git a/chromium/v8/src/hydrogen.cc b/chromium/v8/src/hydrogen.cc
index cdf69e7c72c..c40d2e77ffc 100644
--- a/chromium/v8/src/hydrogen.cc
+++ b/chromium/v8/src/hydrogen.cc
@@ -7553,11 +7553,12 @@ bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
HValue* function = Top();
AddCheckConstantFunction(expr->holder(), function, function_map);
- Drop(1);
CHECK_ALIVE_OR_RETURN(VisitForValue(args->at(0)), true);
HValue* receiver = Pop();
+ Drop(1); // Pop the function.
+
if (function_state()->outer() == NULL) {
HInstruction* elements = Add<HArgumentsElements>(false);
HInstruction* length = Add<HArgumentsLength>(elements);
diff --git a/chromium/v8/src/ia32/lithium-codegen-ia32.cc b/chromium/v8/src/ia32/lithium-codegen-ia32.cc
index df2d4c5294d..80369516dc8 100644
--- a/chromium/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/chromium/v8/src/ia32/lithium-codegen-ia32.cc
@@ -384,6 +384,9 @@ void LCodeGen::GenerateOsrPrologue() {
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
+ if (!instr->IsLazyBailout() && !instr->IsGap()) {
+ safepoints_.BumpLastLazySafepointIndex();
+ }
if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr);
}
@@ -2358,7 +2361,6 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
- __ nop(); // Signals no inlined code.
}
diff --git a/chromium/v8/src/ia32/lithium-ia32.cc b/chromium/v8/src/ia32/lithium-ia32.cc
index aa35e9d6b40..5c92580c354 100644
--- a/chromium/v8/src/ia32/lithium-ia32.cc
+++ b/chromium/v8/src/ia32/lithium-ia32.cc
@@ -696,15 +696,6 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
instr->MarkAsCall();
instr = AssignPointerMap(instr);
- if (hinstr->HasObservableSideEffects()) {
- ASSERT(hinstr->next()->IsSimulate());
- HSimulate* sim = HSimulate::cast(hinstr->next());
- ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_.IsNone());
- instruction_pending_deoptimization_environment_ = instr;
- pending_deoptimization_ast_id_ = sim->ast_id();
- }
-
// If instruction does not have side-effects lazy deoptimization
// after the call will try to deoptimize to the point before the call.
// Thus we still need to attach environment to this call even if
@@ -995,6 +986,26 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) {
chunk_->AddInstruction(clobber, current_block_);
}
chunk_->AddInstruction(instr, current_block_);
+
+ if (instr->IsCall()) {
+ HValue* hydrogen_value_for_lazy_bailout = current;
+ LInstruction* instruction_needing_environment = NULL;
+ if (current->HasObservableSideEffects()) {
+ HSimulate* sim = HSimulate::cast(current->next());
+ instruction_needing_environment = instr;
+ sim->ReplayEnvironment(current_block_->last_environment());
+ hydrogen_value_for_lazy_bailout = sim;
+ }
+ LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
+ bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
+ chunk_->AddInstruction(bailout, current_block_);
+ if (instruction_needing_environment != NULL) {
+ // Store the lazy deopt environment with the instruction if needed.
+ // Right now it is only used for LInstanceOfKnownGlobal.
+ instruction_needing_environment->
+ SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
+ }
+ }
}
current_instruction_ = old_current;
}
@@ -2692,22 +2703,6 @@ LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instr->ReplayEnvironment(current_block_->last_environment());
-
- // If there is an instruction pending deoptimization environment create a
- // lazy bailout instruction to capture the environment.
- if (!pending_deoptimization_ast_id_.IsNone()) {
- ASSERT(pending_deoptimization_ast_id_ == instr->ast_id());
- LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
- LInstruction* result = AssignEnvironment(lazy_bailout);
- // Store the lazy deopt environment with the instruction if needed. Right
- // now it is only used for LInstanceOfKnownGlobal.
- instruction_pending_deoptimization_environment_->
- SetDeferredLazyDeoptimizationEnvironment(result->environment());
- instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = BailoutId::None();
- return result;
- }
-
return NULL;
}
diff --git a/chromium/v8/src/ia32/lithium-ia32.h b/chromium/v8/src/ia32/lithium-ia32.h
index ea4fef8a710..c865d8d0e9a 100644
--- a/chromium/v8/src/ia32/lithium-ia32.h
+++ b/chromium/v8/src/ia32/lithium-ia32.h
@@ -2767,9 +2767,7 @@ class LChunkBuilder V8_FINAL BASE_EMBEDDED {
current_block_(NULL),
next_block_(NULL),
argument_count_(0),
- allocator_(allocator),
- instruction_pending_deoptimization_environment_(NULL),
- pending_deoptimization_ast_id_(BailoutId::None()) { }
+ allocator_(allocator) { }
// Build the sequence for the graph.
LPlatformChunk* Build();
@@ -2931,8 +2929,6 @@ class LChunkBuilder V8_FINAL BASE_EMBEDDED {
HBasicBlock* next_block_;
int argument_count_;
LAllocator* allocator_;
- LInstruction* instruction_pending_deoptimization_environment_;
- BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};
diff --git a/chromium/v8/src/ic.cc b/chromium/v8/src/ic.cc
index fc1ca53290e..cd508707e7f 100644
--- a/chromium/v8/src/ic.cc
+++ b/chromium/v8/src/ic.cc
@@ -1746,14 +1746,15 @@ Handle<Code> KeyedStoreIC::StoreElementStub(Handle<JSObject> receiver,
transitioned_receiver_map =
ComputeTransitionedMap(receiver, store_mode);
}
- if (IsTransitionOfMonomorphicTarget(MapToType(transitioned_receiver_map))) {
+ if (receiver_map.is_identical_to(previous_receiver_map) ||
+ IsTransitionOfMonomorphicTarget(MapToType(transitioned_receiver_map))) {
// Element family is the same, use the "worst" case map.
store_mode = GetNonTransitioningStoreMode(store_mode);
return isolate()->stub_cache()->ComputeKeyedStoreElement(
transitioned_receiver_map, strict_mode(), store_mode);
} else if (*previous_receiver_map == receiver->map() &&
old_store_mode == STANDARD_STORE &&
- (IsGrowStoreMode(store_mode) ||
+ (store_mode == STORE_AND_GROW_NO_TRANSITION ||
store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS ||
store_mode == STORE_NO_TRANSITION_HANDLE_COW)) {
// A "normal" IC that handles stores can switch to a version that can
diff --git a/chromium/v8/src/mips/lithium-codegen-mips.cc b/chromium/v8/src/mips/lithium-codegen-mips.cc
index 3bf0d130829..423ff9f5058 100644
--- a/chromium/v8/src/mips/lithium-codegen-mips.cc
+++ b/chromium/v8/src/mips/lithium-codegen-mips.cc
@@ -256,6 +256,13 @@ void LCodeGen::GenerateOsrPrologue() {
}
+void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
+ if (!instr->IsLazyBailout() && !instr->IsGap()) {
+ safepoints_.BumpLastLazySafepointIndex();
+ }
+}
+
+
bool LCodeGen::GenerateDeferredCode() {
ASSERT(is_generating());
if (deferred_.length() > 0) {
diff --git a/chromium/v8/src/mips/lithium-codegen-mips.h b/chromium/v8/src/mips/lithium-codegen-mips.h
index 9fbd336b1e6..71cc34fb8b4 100644
--- a/chromium/v8/src/mips/lithium-codegen-mips.h
+++ b/chromium/v8/src/mips/lithium-codegen-mips.h
@@ -191,6 +191,7 @@ class LCodeGen: public LCodeGenBase {
// Code generation passes. Returns true if code generation should
// continue.
+ void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
bool GeneratePrologue();
bool GenerateDeferredCode();
bool GenerateDeoptJumpTable();
diff --git a/chromium/v8/src/mips/lithium-mips.cc b/chromium/v8/src/mips/lithium-mips.cc
index a441ba515a9..0358feeef55 100644
--- a/chromium/v8/src/mips/lithium-mips.cc
+++ b/chromium/v8/src/mips/lithium-mips.cc
@@ -636,15 +636,6 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
instr->MarkAsCall();
instr = AssignPointerMap(instr);
- if (hinstr->HasObservableSideEffects()) {
- ASSERT(hinstr->next()->IsSimulate());
- HSimulate* sim = HSimulate::cast(hinstr->next());
- ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_.IsNone());
- instruction_pending_deoptimization_environment_ = instr;
- pending_deoptimization_ast_id_ = sim->ast_id();
- }
-
// If instruction does not have side-effects lazy deoptimization
// after the call will try to deoptimize to the point before the call.
// Thus we still need to attach environment to this call even if
@@ -929,6 +920,26 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) {
instr = AssignEnvironment(instr);
}
chunk_->AddInstruction(instr, current_block_);
+
+ if (instr->IsCall()) {
+ HValue* hydrogen_value_for_lazy_bailout = current;
+ LInstruction* instruction_needing_environment = NULL;
+ if (current->HasObservableSideEffects()) {
+ HSimulate* sim = HSimulate::cast(current->next());
+ instruction_needing_environment = instr;
+ sim->ReplayEnvironment(current_block_->last_environment());
+ hydrogen_value_for_lazy_bailout = sim;
+ }
+ LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
+ bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
+ chunk_->AddInstruction(bailout, current_block_);
+ if (instruction_needing_environment != NULL) {
+ // Store the lazy deopt environment with the instruction if needed.
+ // Right now it is only used for LInstanceOfKnownGlobal.
+ instruction_needing_environment->
+ SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
+ }
+ }
}
current_instruction_ = old_current;
}
@@ -2505,21 +2516,6 @@ LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instr->ReplayEnvironment(current_block_->last_environment());
-
- // If there is an instruction pending deoptimization environment create a
- // lazy bailout instruction to capture the environment.
- if (pending_deoptimization_ast_id_ == instr->ast_id()) {
- LInstruction* result = new(zone()) LLazyBailout;
- result = AssignEnvironment(result);
- // Store the lazy deopt environment with the instruction if needed. Right
- // now it is only used for LInstanceOfKnownGlobal.
- instruction_pending_deoptimization_environment_->
- SetDeferredLazyDeoptimizationEnvironment(result->environment());
- instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = BailoutId::None();
- return result;
- }
-
return NULL;
}
diff --git a/chromium/v8/src/mips/lithium-mips.h b/chromium/v8/src/mips/lithium-mips.h
index dbb78ea0846..8d34399057b 100644
--- a/chromium/v8/src/mips/lithium-mips.h
+++ b/chromium/v8/src/mips/lithium-mips.h
@@ -2729,9 +2729,7 @@ class LChunkBuilder V8_FINAL BASE_EMBEDDED {
next_block_(NULL),
argument_count_(0),
allocator_(allocator),
- position_(RelocInfo::kNoPosition),
- instruction_pending_deoptimization_environment_(NULL),
- pending_deoptimization_ast_id_(BailoutId::None()) { }
+ position_(RelocInfo::kNoPosition) { }
// Build the sequence for the graph.
LPlatformChunk* Build();
@@ -2883,8 +2881,6 @@ class LChunkBuilder V8_FINAL BASE_EMBEDDED {
int argument_count_;
LAllocator* allocator_;
int position_;
- LInstruction* instruction_pending_deoptimization_environment_;
- BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};
diff --git a/chromium/v8/src/runtime.cc b/chromium/v8/src/runtime.cc
index c909f34db17..8333380e83b 100644
--- a/chromium/v8/src/runtime.cc
+++ b/chromium/v8/src/runtime.cc
@@ -980,6 +980,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitializeFromArrayLike) {
Runtime::ArrayIdToTypeAndSize(arrayId, &array_type, &element_size);
Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
+ if (source->IsJSTypedArray() &&
+ JSTypedArray::cast(*source)->type() == array_type) {
+ length_obj = Handle<Object>(JSTypedArray::cast(*source)->length(), isolate);
+ }
size_t length = NumberToSize(isolate, *length_obj);
if ((length > static_cast<unsigned>(Smi::kMaxValue)) ||
diff --git a/chromium/v8/src/safepoint-table.h b/chromium/v8/src/safepoint-table.h
index ea35253ff84..cd094c55bf2 100644
--- a/chromium/v8/src/safepoint-table.h
+++ b/chromium/v8/src/safepoint-table.h
@@ -219,6 +219,9 @@ class SafepointTableBuilder BASE_EMBEDDED {
// Record deoptimization index for lazy deoptimization for the last
// outstanding safepoints.
void RecordLazyDeoptimizationIndex(int index);
+ void BumpLastLazySafepointIndex() {
+ last_lazy_safepoint_ = deopt_index_list_.length();
+ }
// Emit the safepoint table after the body. The number of bits per
// entry must be enough to hold all the pointer indexes.
diff --git a/chromium/v8/src/serialize.h b/chromium/v8/src/serialize.h
index ee9df39ad86..9229bad4061 100644
--- a/chromium/v8/src/serialize.h
+++ b/chromium/v8/src/serialize.h
@@ -60,7 +60,7 @@ const int kReferenceTypeShift = kReferenceIdBits;
const int kDebugRegisterBits = 4;
const int kDebugIdShift = kDebugRegisterBits;
-const int kDeoptTableSerializeEntryCount = 8;
+const int kDeoptTableSerializeEntryCount = 12;
// ExternalReferenceTable is a helper class that defines the relationship
// between external references and their encodings. It is used to build
diff --git a/chromium/v8/src/typedarray.js b/chromium/v8/src/typedarray.js
index 21dd9c82d14..0a06ebbdd42 100644
--- a/chromium/v8/src/typedarray.js
+++ b/chromium/v8/src/typedarray.js
@@ -49,7 +49,7 @@ endmacro
macro TYPED_ARRAY_CONSTRUCTOR(ARRAY_ID, NAME, ELEMENT_SIZE)
function NAMEConstructByArrayBuffer(obj, buffer, byteOffset, length) {
- var bufferByteLength = buffer.byteLength;
+ var bufferByteLength = %ArrayBufferGetByteLength(buffer);
var offset;
if (IS_UNDEFINED(byteOffset)) {
offset = 0;
@@ -313,7 +313,7 @@ function DataViewConstructor(buffer, byteOffset, byteLength) { // length = 3
if (!IS_ARRAYBUFFER(buffer)) {
throw MakeTypeError('data_view_not_array_buffer', []);
}
- var bufferByteLength = buffer.byteLength;
+ var bufferByteLength = %ArrayBufferGetByteLength(buffer);
var offset = IS_UNDEFINED(byteOffset) ?
0 : ToPositiveInteger(byteOffset, 'invalid_data_view_offset');
if (offset > bufferByteLength) {
diff --git a/chromium/v8/src/version.cc b/chromium/v8/src/version.cc
index 17793d535d1..2d89ca8bd50 100644
--- a/chromium/v8/src/version.cc
+++ b/chromium/v8/src/version.cc
@@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 23
#define BUILD_NUMBER 17
-#define PATCH_LEVEL 22
+#define PATCH_LEVEL 28
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/chromium/v8/src/x64/lithium-codegen-x64.cc b/chromium/v8/src/x64/lithium-codegen-x64.cc
index ff6f1e6ef3d..80024e78e17 100644
--- a/chromium/v8/src/x64/lithium-codegen-x64.cc
+++ b/chromium/v8/src/x64/lithium-codegen-x64.cc
@@ -268,6 +268,13 @@ void LCodeGen::GenerateOsrPrologue() {
}
+void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
+ if (!instr->IsLazyBailout() && !instr->IsGap()) {
+ safepoints_.BumpLastLazySafepointIndex();
+ }
+}
+
+
bool LCodeGen::GenerateJumpTable() {
Label needs_frame;
if (jump_table_.length() > 0) {
@@ -1953,7 +1960,6 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
- __ nop(); // Signals no inlined code.
}
@@ -3601,10 +3607,11 @@ void LCodeGen::DoMathRound(LMathRound* instr) {
const XMMRegister xmm_scratch = double_scratch0();
Register output_reg = ToRegister(instr->result());
XMMRegister input_reg = ToDoubleRegister(instr->value());
+ XMMRegister input_temp = ToDoubleRegister(instr->temp());
static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5
static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5
- Label done, round_to_zero, below_one_half, do_not_compensate, restore;
+ Label done, round_to_zero, below_one_half;
Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
__ movq(kScratchRegister, one_half);
__ movq(xmm_scratch, kScratchRegister);
@@ -3628,21 +3635,19 @@ void LCodeGen::DoMathRound(LMathRound* instr) {
// CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then
// compare and compensate.
- __ movq(kScratchRegister, input_reg); // Back up input_reg.
- __ subsd(input_reg, xmm_scratch);
- __ cvttsd2si(output_reg, input_reg);
+ __ movq(input_temp, input_reg); // Do not alter input_reg.
+ __ subsd(input_temp, xmm_scratch);
+ __ cvttsd2si(output_reg, input_temp);
// Catch minint due to overflow, and to prevent overflow when compensating.
__ cmpl(output_reg, Immediate(0x80000000));
__ RecordComment("D2I conversion overflow");
DeoptimizeIf(equal, instr->environment());
__ Cvtlsi2sd(xmm_scratch, output_reg);
- __ ucomisd(input_reg, xmm_scratch);
- __ j(equal, &restore, Label::kNear);
+ __ ucomisd(xmm_scratch, input_temp);
+ __ j(equal, &done, dist);
__ subl(output_reg, Immediate(1));
// No overflow because we already ruled out minint.
- __ bind(&restore);
- __ movq(input_reg, kScratchRegister); // Restore input_reg.
__ jmp(&done, dist);
__ bind(&round_to_zero);
@@ -4124,44 +4129,51 @@ void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) {
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
- if (instr->hydrogen()->skip_check()) return;
+ HBoundsCheck* hinstr = instr->hydrogen();
+ if (hinstr->skip_check()) return;
+
+ Representation representation = hinstr->length()->representation();
+ ASSERT(representation.Equals(hinstr->index()->representation()));
+ ASSERT(representation.IsSmiOrInteger32());
if (instr->length()->IsRegister()) {
Register reg = ToRegister(instr->length());
- if (!instr->hydrogen()->length()->representation().IsSmi()) {
- __ AssertZeroExtended(reg);
- }
+
if (instr->index()->IsConstantOperand()) {
int32_t constant_index =
ToInteger32(LConstantOperand::cast(instr->index()));
- if (instr->hydrogen()->length()->representation().IsSmi()) {
+ if (representation.IsSmi()) {
__ Cmp(reg, Smi::FromInt(constant_index));
} else {
- __ cmpq(reg, Immediate(constant_index));
+ __ cmpl(reg, Immediate(constant_index));
}
} else {
Register reg2 = ToRegister(instr->index());
- if (!instr->hydrogen()->index()->representation().IsSmi()) {
- __ AssertZeroExtended(reg2);
+ if (representation.IsSmi()) {
+ __ cmpq(reg, reg2);
+ } else {
+ __ cmpl(reg, reg2);
}
- __ cmpq(reg, reg2);
}
} else {
Operand length = ToOperand(instr->length());
if (instr->index()->IsConstantOperand()) {
int32_t constant_index =
ToInteger32(LConstantOperand::cast(instr->index()));
- if (instr->hydrogen()->length()->representation().IsSmi()) {
+ if (representation.IsSmi()) {
__ Cmp(length, Smi::FromInt(constant_index));
} else {
- __ cmpq(length, Immediate(constant_index));
+ __ cmpl(length, Immediate(constant_index));
}
} else {
- __ cmpq(length, ToRegister(instr->index()));
+ if (representation.IsSmi()) {
+ __ cmpq(length, ToRegister(instr->index()));
+ } else {
+ __ cmpl(length, ToRegister(instr->index()));
+ }
}
}
- Condition condition =
- instr->hydrogen()->allow_equality() ? below : below_equal;
+ Condition condition = hinstr->allow_equality() ? below : below_equal;
ApplyCheckIf(condition, instr);
}
diff --git a/chromium/v8/src/x64/lithium-codegen-x64.h b/chromium/v8/src/x64/lithium-codegen-x64.h
index 53d26460b32..63bfe187f14 100644
--- a/chromium/v8/src/x64/lithium-codegen-x64.h
+++ b/chromium/v8/src/x64/lithium-codegen-x64.h
@@ -159,6 +159,7 @@ class LCodeGen: public LCodeGenBase {
// Code generation passes. Returns true if code generation should
// continue.
+ void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
bool GeneratePrologue();
bool GenerateDeferredCode();
bool GenerateJumpTable();
diff --git a/chromium/v8/src/x64/lithium-x64.cc b/chromium/v8/src/x64/lithium-x64.cc
index 473e93dde6f..449eb2b6a11 100644
--- a/chromium/v8/src/x64/lithium-x64.cc
+++ b/chromium/v8/src/x64/lithium-x64.cc
@@ -640,15 +640,6 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
instr->MarkAsCall();
instr = AssignPointerMap(instr);
- if (hinstr->HasObservableSideEffects()) {
- ASSERT(hinstr->next()->IsSimulate());
- HSimulate* sim = HSimulate::cast(hinstr->next());
- ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_.IsNone());
- instruction_pending_deoptimization_environment_ = instr;
- pending_deoptimization_ast_id_ = sim->ast_id();
- }
-
// If instruction does not have side-effects lazy deoptimization
// after the call will try to deoptimize to the point before the call.
// Thus we still need to attach environment to this call even if
@@ -924,6 +915,26 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) {
instr = AssignEnvironment(instr);
}
chunk_->AddInstruction(instr, current_block_);
+
+ if (instr->IsCall()) {
+ HValue* hydrogen_value_for_lazy_bailout = current;
+ LInstruction* instruction_needing_environment = NULL;
+ if (current->HasObservableSideEffects()) {
+ HSimulate* sim = HSimulate::cast(current->next());
+ instruction_needing_environment = instr;
+ sim->ReplayEnvironment(current_block_->last_environment());
+ hydrogen_value_for_lazy_bailout = sim;
+ }
+ LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
+ bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
+ chunk_->AddInstruction(bailout, current_block_);
+ if (instruction_needing_environment != NULL) {
+ // Store the lazy deopt environment with the instruction if needed.
+ // Right now it is only used for LInstanceOfKnownGlobal.
+ instruction_needing_environment->
+ SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
+ }
+ }
}
current_instruction_ = old_current;
}
@@ -1208,8 +1219,9 @@ LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
- LOperand* input = UseRegisterAtStart(instr->value());
- LMathRound* result = new(zone()) LMathRound(input);
+ LOperand* input = UseRegister(instr->value());
+ LOperand* temp = FixedTemp(xmm4);
+ LMathRound* result = new(zone()) LMathRound(input, temp);
return AssignEnvironment(DefineAsRegister(result));
}
@@ -2528,21 +2540,6 @@ LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instr->ReplayEnvironment(current_block_->last_environment());
-
- // If there is an instruction pending deoptimization environment create a
- // lazy bailout instruction to capture the environment.
- if (pending_deoptimization_ast_id_ == instr->ast_id()) {
- LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
- LInstruction* result = AssignEnvironment(lazy_bailout);
- // Store the lazy deopt environment with the instruction if needed. Right
- // now it is only used for LInstanceOfKnownGlobal.
- instruction_pending_deoptimization_environment_->
- SetDeferredLazyDeoptimizationEnvironment(result->environment());
- instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = BailoutId::None();
- return result;
- }
-
return NULL;
}
diff --git a/chromium/v8/src/x64/lithium-x64.h b/chromium/v8/src/x64/lithium-x64.h
index 44bd992f7dd..dc15c97c44c 100644
--- a/chromium/v8/src/x64/lithium-x64.h
+++ b/chromium/v8/src/x64/lithium-x64.h
@@ -730,13 +730,15 @@ class LMathFloor V8_FINAL : public LTemplateInstruction<1, 1, 0> {
};
-class LMathRound V8_FINAL : public LTemplateInstruction<1, 1, 0> {
+class LMathRound V8_FINAL : public LTemplateInstruction<1, 1, 1> {
public:
- explicit LMathRound(LOperand* value) {
+ explicit LMathRound(LOperand* value, LOperand* temp) {
inputs_[0] = value;
+ temps_[0] = temp;
}
LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
DECLARE_CONCRETE_INSTRUCTION(MathRound, "math-round")
DECLARE_HYDROGEN_ACCESSOR(UnaryMathOperation)
@@ -2685,9 +2687,7 @@ class LChunkBuilder V8_FINAL BASE_EMBEDDED {
current_block_(NULL),
next_block_(NULL),
argument_count_(0),
- allocator_(allocator),
- instruction_pending_deoptimization_environment_(NULL),
- pending_deoptimization_ast_id_(BailoutId::None()) { }
+ allocator_(allocator) { }
// Build the sequence for the graph.
LPlatformChunk* Build();
@@ -2838,8 +2838,6 @@ class LChunkBuilder V8_FINAL BASE_EMBEDDED {
HBasicBlock* next_block_;
int argument_count_;
LAllocator* allocator_;
- LInstruction* instruction_pending_deoptimization_environment_;
- BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};