diff options
Diffstat (limited to 'deps/v8/src/ia32/full-codegen-ia32.cc')
-rw-r--r-- | deps/v8/src/ia32/full-codegen-ia32.cc | 361 |
1 files changed, 214 insertions, 147 deletions
diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc index 81c9ccb128..33d5cabad7 100644 --- a/deps/v8/src/ia32/full-codegen-ia32.cc +++ b/deps/v8/src/ia32/full-codegen-ia32.cc @@ -138,7 +138,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { // function calls. if (info->is_strict_mode() || info->is_native()) { Label ok; - __ test(ecx, Operand(ecx)); + __ test(ecx, ecx); __ j(zero, &ok, Label::kNear); // +1 for return address. int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize; @@ -147,6 +147,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { __ bind(&ok); } + // Open a frame scope to indicate that there is a frame on the stack. The + // MANUAL indicates that the scope shouldn't actually generate code to set up + // the frame (that is done below). + FrameScope frame_scope(masm_, StackFrame::MANUAL); + __ push(ebp); // Caller's frame pointer. __ mov(ebp, esp); __ push(esi); // Callee's context. @@ -200,11 +205,12 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { // Store it in the context. int context_offset = Context::SlotOffset(var->index()); __ mov(Operand(esi, context_offset), eax); - // Update the write barrier. This clobbers all involved - // registers, so we have use a third register to avoid - // clobbering esi. - __ mov(ecx, esi); - __ RecordWrite(ecx, context_offset, eax, ebx); + // Update the write barrier. This clobbers eax and ebx. + __ RecordWriteContextSlot(esi, + context_offset, + eax, + ebx, + kDontSaveFPRegs); } } } @@ -260,7 +266,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { // constant. if (scope()->is_function_scope() && scope()->function() != NULL) { int ignored = 0; - EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored); + EmitDeclaration(scope()->function(), CONST, NULL, &ignored); } VisitDeclarations(scope()->declarations()); } @@ -365,10 +371,10 @@ void FullCodeGenerator::EmitReturnSequence() { void FullCodeGenerator::verify_stack_height() { ASSERT(FLAG_verify_stack_height); - __ sub(Operand(ebp), Immediate(kPointerSize * stack_height())); - __ cmp(ebp, Operand(esp)); + __ sub(ebp, Immediate(kPointerSize * stack_height())); + __ cmp(ebp, esp); __ Assert(equal, "Full codegen stack height not as expected."); - __ add(Operand(ebp), Immediate(kPointerSize * stack_height())); + __ add(ebp, Immediate(kPointerSize * stack_height())); } @@ -597,7 +603,7 @@ void FullCodeGenerator::DoTest(Expression* condition, ToBooleanStub stub(result_register()); __ push(result_register()); __ CallStub(&stub, condition->test_id()); - __ test(result_register(), Operand(result_register())); + __ test(result_register(), result_register()); // The stub returns nonzero for true. Split(not_zero, if_true, if_false, fall_through); } @@ -661,11 +667,12 @@ void FullCodeGenerator::SetVar(Variable* var, ASSERT(!scratch1.is(src)); MemOperand location = VarOperand(var, scratch0); __ mov(location, src); + // Emit the write barrier code if the location is in the heap. if (var->IsContextSlot()) { int offset = Context::SlotOffset(var->index()); ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi)); - __ RecordWrite(scratch0, offset, src, scratch1); + __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs); } } @@ -697,7 +704,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, - Variable::Mode mode, + VariableMode mode, FunctionLiteral* function, int* global_count) { // If it was not possible to allocate the variable at compile time, we @@ -715,7 +722,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, Comment cmnt(masm_, "[ Declaration"); VisitForAccumulatorValue(function); __ mov(StackOperand(variable), result_register()); - } else if (mode == Variable::CONST || mode == Variable::LET) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ mov(StackOperand(variable), Immediate(isolate()->factory()->the_hole_value())); @@ -738,11 +745,16 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, Comment cmnt(masm_, "[ Declaration"); VisitForAccumulatorValue(function); __ mov(ContextOperand(esi, variable->index()), result_register()); - int offset = Context::SlotOffset(variable->index()); - __ mov(ebx, esi); - __ RecordWrite(ebx, offset, result_register(), ecx); + // We know that we have written a function, which is not a smi. + __ RecordWriteContextSlot(esi, + Context::SlotOffset(variable->index()), + result_register(), + ecx, + kDontSaveFPRegs, + EMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); PrepareForBailoutForId(proxy->id(), NO_REGISTERS); - } else if (mode == Variable::CONST || mode == Variable::LET) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ mov(ContextOperand(esi, variable->index()), Immediate(isolate()->factory()->the_hole_value())); @@ -756,10 +768,8 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, __ push(esi); __ push(Immediate(variable->name())); // Declaration nodes are always introduced in one of three modes. - ASSERT(mode == Variable::VAR || - mode == Variable::CONST || - mode == Variable::LET); - PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; + ASSERT(mode == VAR || mode == CONST || mode == LET); + PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE; __ push(Immediate(Smi::FromInt(attr))); // Push initial value, if any. // Note: For variables we must not push an initial value (such as @@ -768,7 +778,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, increment_stack_height(3); if (function != NULL) { VisitForStackValue(function); - } else if (mode == Variable::CONST || mode == Variable::LET) { + } else if (mode == CONST || mode == LET) { __ push(Immediate(isolate()->factory()->the_hole_value())); increment_stack_height(); } else { @@ -835,10 +845,10 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { if (inline_smi_code) { Label slow_case; __ mov(ecx, edx); - __ or_(ecx, Operand(eax)); + __ or_(ecx, eax); patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear); - __ cmp(edx, Operand(eax)); + __ cmp(edx, eax); __ j(not_equal, &next_test); __ Drop(1); // Switch value is no longer needed. __ jmp(clause->body_target()); @@ -850,7 +860,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); __ call(ic, RelocInfo::CODE_TARGET, clause->CompareId()); patch_site.EmitPatchInfo(); - __ test(eax, Operand(eax)); + __ test(eax, eax); __ j(not_equal, &next_test); __ Drop(1); // Switch value is no longer needed. __ jmp(clause->body_target()); @@ -939,7 +949,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { // For all objects but the receiver, check that the cache is empty. Label check_prototype; - __ cmp(ecx, Operand(eax)); + __ cmp(ecx, eax); __ j(equal, &check_prototype, Label::kNear); __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset)); __ cmp(edx, isolate()->factory()->empty_fixed_array()); @@ -1021,9 +1031,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ push(ecx); // Enumerable. __ push(ebx); // Current entry. __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); - __ test(eax, Operand(eax)); + __ test(eax, eax); __ j(equal, loop_statement.continue_label()); - __ mov(ebx, Operand(eax)); + __ mov(ebx, eax); // Update the 'each' property or variable from the possibly filtered // entry in register ebx. @@ -1047,7 +1057,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { // Remove the pointers stored on the stack. __ bind(loop_statement.break_label()); - __ add(Operand(esp), Immediate(5 * kPointerSize)); + __ add(esp, Immediate(5 * kPointerSize)); decrement_stack_height(ForIn::kElementCount); // Exit and decrement the loop depth. @@ -1189,16 +1199,22 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, // introducing variables. In those cases, we do not want to // perform a runtime call for all variables in the scope // containing the eval. - if (var->mode() == Variable::DYNAMIC_GLOBAL) { + if (var->mode() == DYNAMIC_GLOBAL) { EmitLoadGlobalCheckExtensions(var, typeof_state, slow); __ jmp(done); - } else if (var->mode() == Variable::DYNAMIC_LOCAL) { + } else if (var->mode() == DYNAMIC_LOCAL) { Variable* local = var->local_if_not_shadowed(); __ mov(eax, ContextSlotOperandCheckExtensions(local, slow)); - if (local->mode() == Variable::CONST) { + if (local->mode() == CONST || + local->mode() == LET) { __ cmp(eax, isolate()->factory()->the_hole_value()); __ j(not_equal, done); - __ mov(eax, isolate()->factory()->undefined_value()); + if (local->mode() == CONST) { + __ mov(eax, isolate()->factory()->undefined_value()); + } else { // LET + __ push(Immediate(var->name())); + __ CallRuntime(Runtime::kThrowReferenceError, 1); + } } __ jmp(done); } @@ -1231,7 +1247,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { Comment cmnt(masm_, var->IsContextSlot() ? "Context variable" : "Stack variable"); - if (var->mode() != Variable::LET && var->mode() != Variable::CONST) { + if (var->mode() != LET && var->mode() != CONST) { context()->Plug(var); } else { // Let and const need a read barrier. @@ -1239,10 +1255,10 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { GetVar(eax, var); __ cmp(eax, isolate()->factory()->the_hole_value()); __ j(not_equal, &done, Label::kNear); - if (var->mode() == Variable::LET) { + if (var->mode() == LET) { __ push(Immediate(var->name())); __ CallRuntime(Runtime::kThrowReferenceError, 1); - } else { // Variable::CONST + } else { // CONST __ mov(eax, isolate()->factory()->undefined_value()); } __ bind(&done); @@ -1480,8 +1496,18 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { int offset = FixedArray::kHeaderSize + (i * kPointerSize); __ mov(FieldOperand(ebx, offset), result_register()); + Label no_map_change; + __ JumpIfSmi(result_register(), &no_map_change); // Update the write barrier for the array store. - __ RecordWrite(ebx, offset, result_register(), ecx); + __ RecordWriteField(ebx, offset, result_register(), ecx, + kDontSaveFPRegs, + EMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); + __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset)); + __ CheckFastSmiOnlyElements(edi, &no_map_change, Label::kNear); + __ push(Operand(esp, 0)); + __ CallRuntime(Runtime::kNonSmiElementStored, 1); + __ bind(&no_map_change); PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); } @@ -1641,7 +1667,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, __ pop(edx); decrement_stack_height(); __ mov(ecx, eax); - __ or_(eax, Operand(edx)); + __ or_(eax, edx); JumpPatchSite patch_site(masm_); patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear); @@ -1691,32 +1717,32 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, break; } case Token::ADD: - __ add(eax, Operand(ecx)); + __ add(eax, ecx); __ j(overflow, &stub_call); break; case Token::SUB: - __ sub(eax, Operand(ecx)); + __ sub(eax, ecx); __ j(overflow, &stub_call); break; case Token::MUL: { __ SmiUntag(eax); - __ imul(eax, Operand(ecx)); + __ imul(eax, ecx); __ j(overflow, &stub_call); - __ test(eax, Operand(eax)); + __ test(eax, eax); __ j(not_zero, &done, Label::kNear); __ mov(ebx, edx); - __ or_(ebx, Operand(ecx)); + __ or_(ebx, ecx); __ j(negative, &stub_call); break; } case Token::BIT_OR: - __ or_(eax, Operand(ecx)); + __ or_(eax, ecx); break; case Token::BIT_AND: - __ and_(eax, Operand(ecx)); + __ and_(eax, ecx); break; case Token::BIT_XOR: - __ xor_(eax, Operand(ecx)); + __ xor_(eax, ecx); break; default: UNREACHABLE(); @@ -1838,7 +1864,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); } - } else if (var->mode() == Variable::LET && op != Token::INIT_LET) { + } else if (var->mode() == LET && op != Token::INIT_LET) { // Non-initializing assignment to let variable needs a write barrier. if (var->IsLookupSlot()) { __ push(eax); // Value. @@ -1859,11 +1885,12 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, __ mov(location, eax); if (var->IsContextSlot()) { __ mov(edx, eax); - __ RecordWrite(ecx, Context::SlotOffset(var->index()), edx, ebx); + int offset = Context::SlotOffset(var->index()); + __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs); } } - } else if (var->mode() != Variable::CONST) { + } else if (var->mode() != CONST) { // Assignment to var or initializing assignment to let. if (var->IsStackAllocated() || var->IsContextSlot()) { MemOperand location = VarOperand(var, ecx); @@ -1877,7 +1904,8 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, __ mov(location, eax); if (var->IsContextSlot()) { __ mov(edx, eax); - __ RecordWrite(ecx, Context::SlotOffset(var->index()), edx, ebx); + int offset = Context::SlotOffset(var->index()); + __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs); } } else { ASSERT(var->IsLookupSlot()); @@ -2069,8 +2097,29 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { } // Record source position for debugger. SetSourcePosition(expr->position()); + + // Record call targets in unoptimized code, but not in the snapshot. + bool record_call_target = !Serializer::enabled(); + if (record_call_target) { + flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET); + } CallFunctionStub stub(arg_count, flags); __ CallStub(&stub); + if (record_call_target) { + // There is a one element cache in the instruction stream. +#ifdef DEBUG + int return_site_offset = masm()->pc_offset(); +#endif + Handle<Object> uninitialized = + CallFunctionStub::UninitializedSentinel(isolate()); + Handle<JSGlobalPropertyCell> cell = + isolate()->factory()->NewJSGlobalPropertyCell(uninitialized); + __ test(eax, Immediate(cell)); + // Patching code in the stub assumes the opcode is 1 byte and there is + // word for a pointer in the operand. + ASSERT(masm()->pc_offset() - return_site_offset >= 1 + kPointerSize); + } + RecordJSReturnSite(expr); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); @@ -2094,10 +2143,8 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag, // Push the strict mode flag. In harmony mode every eval call // is a strict mode eval call. - StrictModeFlag strict_mode = strict_mode_flag(); - if (FLAG_harmony_block_scoping) { - strict_mode = kStrictMode; - } + StrictModeFlag strict_mode = + FLAG_harmony_scoping ? kStrictMode : strict_mode_flag(); __ push(Immediate(Smi::FromInt(strict_mode))); __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP @@ -2140,7 +2187,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { // context lookup in the runtime system. Label done; Variable* var = proxy->var(); - if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) { + if (!var->IsUnallocated() && var->mode() == DYNAMIC_GLOBAL) { Label slow; EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow); // Push the function and resolve eval. @@ -2438,9 +2485,9 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( STATIC_ASSERT(kPointerSize == 4); __ lea(ecx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize)); // Calculate location of the first key name. - __ add(Operand(ebx), - Immediate(FixedArray::kHeaderSize + - DescriptorArray::kFirstIndex * kPointerSize)); + __ add(ebx, + Immediate(FixedArray::kHeaderSize + + DescriptorArray::kFirstIndex * kPointerSize)); // Loop through all the keys in the descriptor array. If one of these is the // symbol valueOf the result is false. Label entry, loop; @@ -2449,9 +2496,9 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( __ mov(edx, FieldOperand(ebx, 0)); __ cmp(edx, FACTORY->value_of_symbol()); __ j(equal, if_false); - __ add(Operand(ebx), Immediate(kPointerSize)); + __ add(ebx, Immediate(kPointerSize)); __ bind(&entry); - __ cmp(ebx, Operand(ecx)); + __ cmp(ebx, ecx); __ j(not_equal, &loop); // Reload map as register ebx was used as temporary above. @@ -2591,7 +2638,7 @@ void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) { __ pop(ebx); decrement_stack_height(); - __ cmp(eax, Operand(ebx)); + __ cmp(eax, ebx); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); Split(equal, if_true, if_false, fall_through); @@ -2647,20 +2694,24 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) { // Check that the object is a JS object but take special care of JS // functions to make sure they have 'Function' as their class. + // Assume that there are only two callable types, and one of them is at + // either end of the type range for JS object types. Saves extra comparisons. + STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax); // Map is now in eax. __ j(below, &null); - - // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type, and - // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after - // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter. - STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE); - STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE == - LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1); - __ CmpInstanceType(eax, FIRST_CALLABLE_SPEC_OBJECT_TYPE); - __ j(above_equal, &function); - - // Check if the constructor in the map is a function. + STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == + FIRST_SPEC_OBJECT_TYPE + 1); + __ j(equal, &function); + + __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE); + STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == + LAST_SPEC_OBJECT_TYPE - 1); + __ j(equal, &function); + // Assume that there is no larger type. + STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); + + // Check if the constructor in the map is a JS function. __ mov(eax, FieldOperand(eax, Map::kConstructorOffset)); __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx); __ j(not_equal, &non_function_constructor); @@ -2741,8 +2792,8 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { if (CpuFeatures::IsSupported(SSE2)) { CpuFeatures::Scope fscope(SSE2); __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single. - __ movd(xmm1, Operand(ebx)); - __ movd(xmm0, Operand(eax)); + __ movd(xmm1, ebx); + __ movd(xmm0, eax); __ cvtss2sd(xmm1, xmm1); __ xorps(xmm0, xmm1); __ subsd(xmm0, xmm1); @@ -2843,10 +2894,11 @@ void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) { // Store the value. __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax); + // Update the write barrier. Save the value as it will be // overwritten by the write barrier code and is needed afterward. __ mov(edx, eax); - __ RecordWrite(ebx, JSValue::kValueOffset, edx, ecx); + __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs); __ bind(&done); context()->Plug(eax); @@ -3119,14 +3171,14 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) { __ mov(index_1, Operand(esp, 1 * kPointerSize)); __ mov(index_2, Operand(esp, 0)); __ mov(temp, index_1); - __ or_(temp, Operand(index_2)); + __ or_(temp, index_2); __ JumpIfNotSmi(temp, &slow_case); // Check that both indices are valid. __ mov(temp, FieldOperand(object, JSArray::kLengthOffset)); - __ cmp(temp, Operand(index_1)); + __ cmp(temp, index_1); __ j(below_equal, &slow_case); - __ cmp(temp, Operand(index_2)); + __ cmp(temp, index_2); __ j(below_equal, &slow_case); // Bring addresses into index1 and index2. @@ -3139,16 +3191,35 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) { __ mov(Operand(index_2, 0), object); __ mov(Operand(index_1, 0), temp); - Label new_space; - __ InNewSpace(elements, temp, equal, &new_space); + Label no_remembered_set; + __ CheckPageFlag(elements, + temp, + 1 << MemoryChunk::SCAN_ON_SCAVENGE, + not_zero, + &no_remembered_set, + Label::kNear); + // Possible optimization: do a check that both values are Smis + // (or them and test against Smi mask.) + + // We are swapping two objects in an array and the incremental marker never + // pauses in the middle of scanning a single object. Therefore the + // incremental marker is not disturbed, so we don't need to call the + // RecordWrite stub that notifies the incremental marker. + __ RememberedSetHelper(elements, + index_1, + temp, + kDontSaveFPRegs, + MacroAssembler::kFallThroughAtEnd); + __ RememberedSetHelper(elements, + index_2, + temp, + kDontSaveFPRegs, + MacroAssembler::kFallThroughAtEnd); + + __ bind(&no_remembered_set); - __ mov(object, elements); - __ RecordWriteHelper(object, index_1, temp); - __ RecordWriteHelper(elements, index_2, temp); - - __ bind(&new_space); // We are done. Drop elements from the stack, and return undefined. - __ add(Operand(esp), Immediate(3 * kPointerSize)); + __ add(esp, Immediate(3 * kPointerSize)); __ mov(eax, isolate()->factory()->undefined_value()); __ jmp(&done); @@ -3221,11 +3292,11 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) { __ pop(left); Label done, fail, ok; - __ cmp(left, Operand(right)); + __ cmp(left, right); __ j(equal, &ok); // Fail if either is a non-HeapObject. __ mov(tmp, left); - __ and_(Operand(tmp), right); + __ and_(tmp, right); __ JumpIfSmi(tmp, &fail); __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset)); __ CmpInstanceType(tmp, JS_REGEXP_TYPE); @@ -3316,7 +3387,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { Operand separator_operand = Operand(esp, 2 * kPointerSize); Operand result_operand = Operand(esp, 1 * kPointerSize); Operand array_length_operand = Operand(esp, 0); - __ sub(Operand(esp), Immediate(2 * kPointerSize)); + __ sub(esp, Immediate(2 * kPointerSize)); __ cld(); // Check that the array is a JSArray __ JumpIfSmi(array, &bailout); @@ -3352,7 +3423,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { // Live loop registers: index, array_length, string, // scratch, string_length, elements. if (FLAG_debug_code) { - __ cmp(index, Operand(array_length)); + __ cmp(index, array_length); __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin"); } __ bind(&loop); @@ -3370,8 +3441,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { __ add(string_length, FieldOperand(string, SeqAsciiString::kLengthOffset)); __ j(overflow, &bailout); - __ add(Operand(index), Immediate(1)); - __ cmp(index, Operand(array_length)); + __ add(index, Immediate(1)); + __ cmp(index, array_length); __ j(less, &loop); // If array_length is 1, return elements[0], a string. @@ -3405,10 +3476,10 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { // to string_length. __ mov(scratch, separator_operand); __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset)); - __ sub(string_length, Operand(scratch)); // May be negative, temporarily. + __ sub(string_length, scratch); // May be negative, temporarily. __ imul(scratch, array_length_operand); __ j(overflow, &bailout); - __ add(string_length, Operand(scratch)); + __ add(string_length, scratch); __ j(overflow, &bailout); __ shr(string_length, 1); @@ -3449,7 +3520,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { __ lea(string, FieldOperand(string, SeqAsciiString::kHeaderSize)); __ CopyBytes(string, result_pos, string_length, scratch); - __ add(Operand(index), Immediate(1)); + __ add(index, Immediate(1)); __ bind(&loop_1_condition); __ cmp(index, array_length_operand); __ j(less, &loop_1); // End while (index < length). @@ -3490,7 +3561,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { __ lea(string, FieldOperand(string, SeqAsciiString::kHeaderSize)); __ CopyBytes(string, result_pos, string_length, scratch); - __ add(Operand(index), Immediate(1)); + __ add(index, Immediate(1)); __ cmp(index, array_length_operand); __ j(less, &loop_2); // End while (index < length). @@ -3531,7 +3602,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { __ lea(string, FieldOperand(string, SeqAsciiString::kHeaderSize)); __ CopyBytes(string, result_pos, string_length, scratch); - __ add(Operand(index), Immediate(1)); + __ add(index, Immediate(1)); __ cmp(index, array_length_operand); __ j(less, &loop_3); // End while (index < length). @@ -3543,7 +3614,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { __ bind(&done); __ mov(eax, result_operand); // Drop temp values from the stack, and restore context register. - __ add(Operand(esp), Immediate(3 * kPointerSize)); + __ add(esp, Immediate(3 * kPointerSize)); __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); decrement_stack_height(); @@ -3823,9 +3894,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { if (ShouldInlineSmiCase(expr->op())) { if (expr->op() == Token::INC) { - __ add(Operand(eax), Immediate(Smi::FromInt(1))); + __ add(eax, Immediate(Smi::FromInt(1))); } else { - __ sub(Operand(eax), Immediate(Smi::FromInt(1))); + __ sub(eax, Immediate(Smi::FromInt(1))); } __ j(overflow, &stub_call, Label::kNear); // We could eliminate this smi check if we split the code at @@ -3835,9 +3906,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ bind(&stub_call); // Call stub. Undo operation first. if (expr->op() == Token::INC) { - __ sub(Operand(eax), Immediate(Smi::FromInt(1))); + __ sub(eax, Immediate(Smi::FromInt(1))); } else { - __ add(Operand(eax), Immediate(Smi::FromInt(1))); + __ add(eax, Immediate(Smi::FromInt(1))); } } @@ -3956,10 +4027,14 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, - Handle<String> check, - Label* if_true, - Label* if_false, - Label* fall_through) { + Handle<String> check) { + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + Label* fall_through = NULL; + context()->PrepareTest(&materialize_true, &materialize_false, + &if_true, &if_false, &fall_through); + { AccumulatorValueContext context(this); VisitForTypeofValue(expr); } @@ -3998,8 +4073,11 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, Split(not_zero, if_true, if_false, fall_through); } else if (check->Equals(isolate()->heap()->function_symbol())) { __ JumpIfSmi(eax, if_false); - __ CmpObjectType(eax, FIRST_CALLABLE_SPEC_OBJECT_TYPE, edx); - Split(above_equal, if_true, if_false, fall_through); + STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); + __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx); + __ j(equal, if_true); + __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE); + Split(equal, if_true, if_false, fall_through); } else if (check->Equals(isolate()->heap()->object_symbol())) { __ JumpIfSmi(eax, if_false); if (!FLAG_harmony_typeof) { @@ -4017,18 +4095,7 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, } else { if (if_false != fall_through) __ jmp(if_false); } -} - - -void FullCodeGenerator::EmitLiteralCompareUndefined(Expression* expr, - Label* if_true, - Label* if_false, - Label* fall_through) { - VisitForAccumulatorValue(expr); - PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); - - __ cmp(eax, isolate()->factory()->undefined_value()); - Split(equal, if_true, if_false, fall_through); + context()->Plug(if_true, if_false); } @@ -4036,9 +4103,12 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { Comment cmnt(masm_, "[ CompareOperation"); SetSourcePosition(expr->position()); + // First we try a fast inlined version of the compare when one of + // the operands is a literal. + if (TryLiteralCompare(expr)) return; + // Always perform the comparison for its control flow. Pack the result // into the expression's context after the comparison is performed. - Label materialize_true, materialize_false; Label* if_true = NULL; Label* if_false = NULL; @@ -4046,16 +4116,9 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { context()->PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false, &fall_through); - // First we try a fast inlined version of the compare when one of - // the operands is a literal. - if (TryLiteralCompare(expr, if_true, if_false, fall_through)) { - context()->Plug(if_true, if_false); - return; - } - Token::Value op = expr->op(); VisitForStackValue(expr->left()); - switch (expr->op()) { + switch (op) { case Token::IN: VisitForStackValue(expr->right()); __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); @@ -4071,7 +4134,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { __ CallStub(&stub); decrement_stack_height(2); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); - __ test(eax, Operand(eax)); + __ test(eax, eax); // The stub returns 0 for true. Split(zero, if_true, if_false, fall_through); break; @@ -4080,11 +4143,8 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { default: { VisitForAccumulatorValue(expr->right()); Condition cc = no_condition; - bool strict = false; switch (op) { case Token::EQ_STRICT: - strict = true; - // Fall through case Token::EQ: cc = equal; __ pop(edx); @@ -4120,10 +4180,10 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { JumpPatchSite patch_site(masm_); if (inline_smi_code) { Label slow_case; - __ mov(ecx, Operand(edx)); - __ or_(ecx, Operand(eax)); + __ mov(ecx, edx); + __ or_(ecx, eax); patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear); - __ cmp(edx, Operand(eax)); + __ cmp(edx, eax); Split(cc, if_true, if_false, NULL); __ bind(&slow_case); } @@ -4135,7 +4195,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { patch_site.EmitPatchInfo(); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); - __ test(eax, Operand(eax)); + __ test(eax, eax); Split(cc, if_true, if_false, fall_through); } } @@ -4146,7 +4206,9 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { } -void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { +void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, + Expression* sub_expr, + NilValue nil) { Label materialize_true, materialize_false; Label* if_true = NULL; Label* if_false = NULL; @@ -4154,15 +4216,20 @@ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { context()->PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false, &fall_through); - VisitForAccumulatorValue(expr->expression()); + VisitForAccumulatorValue(sub_expr); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); - - __ cmp(eax, isolate()->factory()->null_value()); - if (expr->is_strict()) { + Handle<Object> nil_value = nil == kNullValue ? + isolate()->factory()->null_value() : + isolate()->factory()->undefined_value(); + __ cmp(eax, nil_value); + if (expr->op() == Token::EQ_STRICT) { Split(equal, if_true, if_false, fall_through); } else { + Handle<Object> other_nil_value = nil == kNullValue ? + isolate()->factory()->undefined_value() : + isolate()->factory()->null_value(); __ j(equal, if_true); - __ cmp(eax, isolate()->factory()->undefined_value()); + __ cmp(eax, other_nil_value); __ j(equal, if_true); __ JumpIfSmi(eax, if_false); // It can be an undetectable object. @@ -4229,7 +4296,7 @@ void FullCodeGenerator::EnterFinallyBlock() { // Cook return address on top of stack (smi encoded Code* delta) ASSERT(!result_register().is(edx)); __ pop(edx); - __ sub(Operand(edx), Immediate(masm_->CodeObject())); + __ sub(edx, Immediate(masm_->CodeObject())); STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); STATIC_ASSERT(kSmiTag == 0); __ SmiTag(edx); @@ -4245,8 +4312,8 @@ void FullCodeGenerator::ExitFinallyBlock() { // Uncook return address. __ pop(edx); __ SmiUntag(edx); - __ add(Operand(edx), Immediate(masm_->CodeObject())); - __ jmp(Operand(edx)); + __ add(edx, Immediate(masm_->CodeObject())); + __ jmp(edx); } |