summaryrefslogtreecommitdiff
path: root/deps/v8/src/baseline/ia32/baseline-assembler-ia32-inl.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/baseline/ia32/baseline-assembler-ia32-inl.h')
-rw-r--r--deps/v8/src/baseline/ia32/baseline-assembler-ia32-inl.h113
1 files changed, 43 insertions, 70 deletions
diff --git a/deps/v8/src/baseline/ia32/baseline-assembler-ia32-inl.h b/deps/v8/src/baseline/ia32/baseline-assembler-ia32-inl.h
index 2c63dac2b6..f96f2e00a5 100644
--- a/deps/v8/src/baseline/ia32/baseline-assembler-ia32-inl.h
+++ b/deps/v8/src/baseline/ia32/baseline-assembler-ia32-inl.h
@@ -44,32 +44,6 @@ class BaselineAssembler::ScratchRegisterScope {
int registers_used_;
};
-// TODO(v8:11461): Unify condition names in the MacroAssembler.
-enum class Condition : uint32_t {
- kEqual = equal,
- kNotEqual = not_equal,
-
- kLessThan = less,
- kGreaterThan = greater,
- kLessThanEqual = less_equal,
- kGreaterThanEqual = greater_equal,
-
- kUnsignedLessThan = below,
- kUnsignedGreaterThan = above,
- kUnsignedLessThanEqual = below_equal,
- kUnsignedGreaterThanEqual = above_equal,
-
- kOverflow = overflow,
- kNoOverflow = no_overflow,
-
- kZero = zero,
- kNotZero = not_zero,
-};
-
-inline internal::Condition AsMasmCondition(Condition cond) {
- return static_cast<internal::Condition>(cond);
-}
-
namespace detail {
#define __ masm_->
@@ -124,7 +98,7 @@ void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
Label* target,
Label::Distance distance) {
__ cmp(left, Immediate(right));
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
}
void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
@@ -139,21 +113,31 @@ void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
} else {
__ test(value, Immediate(mask));
}
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
Label* target, Label::Distance distance) {
__ cmp(lhs, rhs);
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
+}
+
+void BaselineAssembler::JumpIfObjectTypeFast(Condition cc, Register object,
+ InstanceType instance_type,
+ Label* target,
+ Label::Distance distance) {
+ ScratchRegisterScope temps(this);
+ Register scratch = temps.AcquireScratch();
+ JumpIfObjectType(cc, object, instance_type, scratch, target, distance);
}
+
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
Register map, Label* target,
Label::Distance distance) {
__ AssertNotSmi(object);
__ CmpObjectType(object, instance_type, map);
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
@@ -167,7 +151,7 @@ void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
__ movd(eax, xmm0);
}
__ CmpInstanceType(map, instance_type);
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
}
void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
MemOperand operand, Label* target,
@@ -181,31 +165,31 @@ void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
} else {
__ cmp(value, Immediate(smi));
}
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
Label* target, Label::Distance distance) {
__ AssertSmi(lhs);
__ AssertSmi(rhs);
__ cmp(lhs, rhs);
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
}
void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
MemOperand operand, Label* target,
Label::Distance distance) {
__ cmp(operand, value);
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
}
void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
Register value, Label* target,
Label::Distance distance) {
__ cmp(operand, value);
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
}
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
Label* target, Label::Distance distance) {
__ cmpb(value, Immediate(byte));
- __ j(AsMasmCondition(cc), target, distance);
+ __ j(cc, target, distance);
}
void BaselineAssembler::Move(interpreter::Register output, Register source) {
return __ mov(RegisterFrameOperand(output), source);
@@ -319,8 +303,8 @@ void BaselineAssembler::Pop(T... registers) {
(__ Pop(registers), ...);
}
-void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
- int offset) {
+void BaselineAssembler::LoadTaggedField(Register output, Register source,
+ int offset) {
__ mov(output, FieldOperand(source, offset));
}
@@ -336,11 +320,6 @@ void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
SmiUntag(output);
}
-void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
- int offset) {
- __ mov(output, FieldOperand(source, offset));
-}
-
void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
Register source, int offset) {
__ movzx_w(output, FieldOperand(source, offset));
@@ -380,16 +359,14 @@ void BaselineAssembler::TryLoadOptimizedOsrCode(Register scratch_and_result,
Label* on_result,
Label::Distance distance) {
Label fallthrough;
- LoadTaggedPointerField(scratch_and_result, feedback_vector,
- FeedbackVector::OffsetOfElementAt(slot.ToInt()));
+ LoadTaggedField(scratch_and_result, feedback_vector,
+ FeedbackVector::OffsetOfElementAt(slot.ToInt()));
__ LoadWeakValue(scratch_and_result, &fallthrough);
// Is it marked_for_deoptimization? If yes, clear the slot.
{
ScratchRegisterScope temps(this);
- Register scratch2 = temps.AcquireScratch();
- DCHECK(!AreAliased(scratch_and_result, scratch2));
- __ TestCodeTIsMarkedForDeoptimization(scratch_and_result, scratch2);
+ __ TestCodeIsMarkedForDeoptimization(scratch_and_result);
__ j(equal, on_result, distance);
__ mov(FieldOperand(feedback_vector,
FeedbackVector::OffsetOfElementAt(slot.ToInt())),
@@ -406,8 +383,8 @@ void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
ScratchRegisterScope scratch_scope(this);
Register feedback_cell = scratch_scope.AcquireScratch();
LoadFunction(feedback_cell);
- LoadTaggedPointerField(feedback_cell, feedback_cell,
- JSFunction::kFeedbackCellOffset);
+ LoadTaggedField(feedback_cell, feedback_cell,
+ JSFunction::kFeedbackCellOffset);
__ add(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
Immediate(weight));
if (skip_interrupt_label) {
@@ -423,8 +400,8 @@ void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
Register feedback_cell = scratch_scope.AcquireScratch();
DCHECK(!AreAliased(feedback_cell, weight));
LoadFunction(feedback_cell);
- LoadTaggedPointerField(feedback_cell, feedback_cell,
- JSFunction::kFeedbackCellOffset);
+ LoadTaggedField(feedback_cell, feedback_cell,
+ JSFunction::kFeedbackCellOffset);
__ add(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
weight);
if (skip_interrupt_label) __ j(greater_equal, skip_interrupt_label);
@@ -433,16 +410,16 @@ void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
void BaselineAssembler::LdaContextSlot(Register context, uint32_t index,
uint32_t depth) {
for (; depth > 0; --depth) {
- LoadTaggedPointerField(context, context, Context::kPreviousOffset);
+ LoadTaggedField(context, context, Context::kPreviousOffset);
}
- LoadTaggedAnyField(kInterpreterAccumulatorRegister, context,
- Context::OffsetOfElementAt(index));
+ LoadTaggedField(kInterpreterAccumulatorRegister, context,
+ Context::OffsetOfElementAt(index));
}
void BaselineAssembler::StaContextSlot(Register context, Register value,
uint32_t index, uint32_t depth) {
for (; depth > 0; --depth) {
- LoadTaggedPointerField(context, context, Context::kPreviousOffset);
+ LoadTaggedField(context, context, Context::kPreviousOffset);
}
StoreTaggedFieldWithWriteBarrier(context, Context::OffsetOfElementAt(index),
value);
@@ -451,33 +428,29 @@ void BaselineAssembler::StaContextSlot(Register context, Register value,
void BaselineAssembler::LdaModuleVariable(Register context, int cell_index,
uint32_t depth) {
for (; depth > 0; --depth) {
- LoadTaggedPointerField(context, context, Context::kPreviousOffset);
+ LoadTaggedField(context, context, Context::kPreviousOffset);
}
- LoadTaggedPointerField(context, context, Context::kExtensionOffset);
+ LoadTaggedField(context, context, Context::kExtensionOffset);
if (cell_index > 0) {
- LoadTaggedPointerField(context, context,
- SourceTextModule::kRegularExportsOffset);
+ LoadTaggedField(context, context, SourceTextModule::kRegularExportsOffset);
// The actual array index is (cell_index - 1).
cell_index -= 1;
} else {
- LoadTaggedPointerField(context, context,
- SourceTextModule::kRegularImportsOffset);
+ LoadTaggedField(context, context, SourceTextModule::kRegularImportsOffset);
// The actual array index is (-cell_index - 1).
cell_index = -cell_index - 1;
}
LoadFixedArrayElement(context, context, cell_index);
- LoadTaggedAnyField(kInterpreterAccumulatorRegister, context,
- Cell::kValueOffset);
+ LoadTaggedField(kInterpreterAccumulatorRegister, context, Cell::kValueOffset);
}
void BaselineAssembler::StaModuleVariable(Register context, Register value,
int cell_index, uint32_t depth) {
for (; depth > 0; --depth) {
- LoadTaggedPointerField(context, context, Context::kPreviousOffset);
+ LoadTaggedField(context, context, Context::kPreviousOffset);
}
- LoadTaggedPointerField(context, context, Context::kExtensionOffset);
- LoadTaggedPointerField(context, context,
- SourceTextModule::kRegularExportsOffset);
+ LoadTaggedField(context, context, Context::kExtensionOffset);
+ LoadTaggedField(context, context, SourceTextModule::kRegularExportsOffset);
// The actual array index is (cell_index - 1).
cell_index -= 1;
@@ -567,8 +540,8 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
// Drop receiver + arguments.
__ masm()->DropArguments(params_size, scratch,
- TurboAssembler::kCountIsInteger,
- TurboAssembler::kCountIncludesReceiver);
+ MacroAssembler::kCountIsInteger,
+ MacroAssembler::kCountIncludesReceiver);
__ masm()->Ret();
}