summaryrefslogtreecommitdiff
path: root/deps/v8/test/unittests/interpreter
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/test/unittests/interpreter')
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc69
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc7
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc1011
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc1
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-operands-unittest.cc47
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc24
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc37
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc1
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc19
-rw-r--r--deps/v8/test/unittests/interpreter/bytecodes-unittest.cc120
-rw-r--r--deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc1
-rw-r--r--deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc189
-rw-r--r--deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h16
13 files changed, 1397 insertions, 145 deletions
diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc
index 999490518e..e9b996ea09 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc
@@ -8,6 +8,7 @@
#include "src/interpreter/bytecode-array-iterator.h"
#include "src/interpreter/bytecode-label.h"
#include "src/interpreter/bytecode-register-allocator.h"
+#include "src/objects-inl.h"
#include "test/unittests/test-utils.h"
namespace v8 {
@@ -77,8 +78,8 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
// Emit global load / store operations.
Handle<String> name = factory->NewStringFromStaticChars("var_name");
- builder.LoadGlobal(1, TypeofMode::NOT_INSIDE_TYPEOF)
- .LoadGlobal(1, TypeofMode::INSIDE_TYPEOF)
+ builder.LoadGlobal(name, 1, TypeofMode::NOT_INSIDE_TYPEOF)
+ .LoadGlobal(name, 1, TypeofMode::INSIDE_TYPEOF)
.StoreGlobal(name, 1, LanguageMode::SLOPPY)
.StoreGlobal(name, 1, LanguageMode::STRICT);
@@ -115,18 +116,19 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.LoadLookupGlobalSlot(name, TypeofMode::INSIDE_TYPEOF, 1, 0);
// Emit closure operations.
- builder.CreateClosure(0, NOT_TENURED);
+ builder.CreateClosure(0, 1, NOT_TENURED);
// Emit create context operation.
builder.CreateBlockContext(factory->NewScopeInfo(1));
builder.CreateCatchContext(reg, name, factory->NewScopeInfo(1));
builder.CreateFunctionContext(1);
+ builder.CreateEvalContext(1);
builder.CreateWithContext(reg, factory->NewScopeInfo(1));
// Emit literal creation operations.
- builder.CreateRegExpLiteral(factory->NewStringFromStaticChars("a"), 0, 0)
- .CreateArrayLiteral(factory->NewFixedArray(1), 0, 0)
- .CreateObjectLiteral(factory->NewFixedArray(1), 0, 0, reg);
+ builder.CreateRegExpLiteral(factory->NewStringFromStaticChars("a"), 0, 0);
+ builder.CreateArrayLiteral(0, 0, 0);
+ builder.CreateObjectLiteral(0, 0, 0, reg);
// Call operations.
builder.Call(reg, reg_list, 1, Call::GLOBAL_CALL)
@@ -135,7 +137,8 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.Call(reg, reg_list, 1, Call::GLOBAL_CALL, TailCallMode::kAllow)
.CallRuntime(Runtime::kIsArray, reg)
.CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, reg_list, pair)
- .CallJSRuntime(Context::SPREAD_ITERABLE_INDEX, reg_list);
+ .CallJSRuntime(Context::SPREAD_ITERABLE_INDEX, reg_list)
+ .NewWithSpread(reg_list);
// Emit binary operator invocations.
builder.BinaryOperation(Token::Value::ADD, reg, 1)
@@ -195,14 +198,28 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.CompareOperation(Token::Value::INSTANCEOF, reg, 8)
.CompareOperation(Token::Value::IN, reg, 9);
+ // Emit peephole optimizations of equality with Null or Undefined.
+ builder.LoadUndefined()
+ .CompareOperation(Token::Value::EQ, reg, 1)
+ .LoadNull()
+ .CompareOperation(Token::Value::EQ, reg, 1)
+ .LoadUndefined()
+ .CompareOperation(Token::Value::EQ_STRICT, reg, 1)
+ .LoadNull()
+ .CompareOperation(Token::Value::EQ_STRICT, reg, 1);
+
// Emit conversion operator invocations.
builder.ConvertAccumulatorToNumber(reg)
.ConvertAccumulatorToObject(reg)
.ConvertAccumulatorToName(reg);
+ // Emit GetSuperConstructor.
+ builder.GetSuperConstructor(reg);
+
// Short jumps with Imm8 operands
{
- BytecodeLabel start, after_jump1, after_jump2, after_jump3, after_jump4;
+ BytecodeLabel start, after_jump1, after_jump2, after_jump3, after_jump4,
+ after_jump5;
builder.Bind(&start)
.Jump(&after_jump1)
.Bind(&after_jump1)
@@ -212,11 +229,13 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.Bind(&after_jump3)
.JumpIfNotHole(&after_jump4)
.Bind(&after_jump4)
+ .JumpIfJSReceiver(&after_jump5)
+ .Bind(&after_jump5)
.JumpLoop(&start, 0);
}
// Longer jumps with constant operands
- BytecodeLabel end[8];
+ BytecodeLabel end[9];
{
BytecodeLabel after_jump;
builder.Jump(&end[0])
@@ -231,7 +250,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.JumpIfFalse(&end[4])
.JumpIfNull(&end[5])
.JumpIfUndefined(&end[6])
- .JumpIfNotHole(&end[7]);
+ .JumpIfNotHole(&end[7])
+ .LoadLiteral(factory->prototype_string())
+ .JumpIfJSReceiver(&end[8]);
}
// Perform an operation that returns boolean value to
@@ -258,6 +279,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.Bind(&after_jump2);
}
+ // Emit set pending message bytecode.
+ builder.SetPendingMessage();
+
// Emit stack check bytecode.
builder.StackCheck(0);
@@ -282,14 +306,14 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
Handle<String> wide_name = factory->NewStringFromStaticChars("var_wide_name");
// Emit wide global load / store operations.
- builder.LoadGlobal(1024, TypeofMode::NOT_INSIDE_TYPEOF)
- .LoadGlobal(1024, TypeofMode::INSIDE_TYPEOF)
- .LoadGlobal(1024, TypeofMode::INSIDE_TYPEOF)
+ builder.LoadGlobal(name, 1024, TypeofMode::NOT_INSIDE_TYPEOF)
+ .LoadGlobal(name, 1024, TypeofMode::INSIDE_TYPEOF)
+ .LoadGlobal(name, 1024, TypeofMode::INSIDE_TYPEOF)
.StoreGlobal(name, 1024, LanguageMode::SLOPPY)
.StoreGlobal(wide_name, 1, LanguageMode::STRICT);
// Emit extra wide global load.
- builder.LoadGlobal(1024 * 1024, TypeofMode::NOT_INSIDE_TYPEOF);
+ builder.LoadGlobal(name, 1024 * 1024, TypeofMode::NOT_INSIDE_TYPEOF);
// Emit wide load / store property operations.
builder.LoadNamedProperty(reg, wide_name, 0)
@@ -299,6 +323,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.StoreNamedProperty(reg, wide_name, 0, LanguageMode::STRICT)
.StoreKeyedProperty(reg, reg, 2056, LanguageMode::STRICT);
+ builder.StoreDataPropertyInLiteral(reg, reg,
+ DataPropertyInLiteralFlag::kNoFlags, 0);
+
// Emit wide context operations.
builder.LoadContextSlot(reg, 1024, 0).StoreContextSlot(reg, 1024, 0);
@@ -309,13 +336,14 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.StoreLookupSlot(wide_name, LanguageMode::STRICT);
// CreateClosureWide
- builder.CreateClosure(1000, NOT_TENURED);
+ builder.CreateClosure(1000, 321, NOT_TENURED);
// Emit wide variant of literal creation operations.
- builder.CreateRegExpLiteral(factory->NewStringFromStaticChars("wide_literal"),
- 0, 0)
- .CreateArrayLiteral(factory->NewFixedArray(2), 0, 0)
- .CreateObjectLiteral(factory->NewFixedArray(2), 0, 0, reg);
+ builder
+ .CreateRegExpLiteral(factory->NewStringFromStaticChars("wide_literal"), 0,
+ 0)
+ .CreateArrayLiteral(0, 0, 0)
+ .CreateObjectLiteral(0, 0, 0, reg);
// Emit load and store operations for module variables.
builder.LoadModuleVariable(-1, 42)
@@ -393,6 +421,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
scorecard[Bytecodes::ToByte(Bytecode::kBitwiseOrSmi)] = 1;
scorecard[Bytecodes::ToByte(Bytecode::kShiftLeftSmi)] = 1;
scorecard[Bytecodes::ToByte(Bytecode::kShiftRightSmi)] = 1;
+ scorecard[Bytecodes::ToByte(Bytecode::kTestUndetectable)] = 1;
+ scorecard[Bytecodes::ToByte(Bytecode::kTestUndefined)] = 1;
+ scorecard[Bytecodes::ToByte(Bytecode::kTestNull)] = 1;
}
// Check return occurs at the end and only once in the BytecodeArray.
diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
index 894134a959..cd7d764ee5 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
@@ -6,6 +6,7 @@
#include "src/interpreter/bytecode-array-builder.h"
#include "src/interpreter/bytecode-array-iterator.h"
+#include "src/objects-inl.h"
#include "test/unittests/test-utils.h"
namespace v8 {
@@ -60,7 +61,7 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
.ForInPrepare(reg_0, triple)
.CallRuntime(Runtime::kLoadIC_Miss, reg_0)
.Debugger()
- .LoadGlobal(0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
.Return();
// Test iterator sees the expected output from the builder.
@@ -268,8 +269,8 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaGlobal);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
- CHECK_EQ(iterator.current_bytecode_size(), 6);
- CHECK_EQ(iterator.GetIndexOperand(0), 0x10000000u);
+ CHECK_EQ(iterator.current_bytecode_size(), 10);
+ CHECK_EQ(iterator.GetIndexOperand(1), 0x10000000u);
offset += Bytecodes::Size(Bytecode::kLdaGlobal, OperandScale::kQuadruple) +
kPrefixByteSize;
iterator.Advance();
diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc
new file mode 100644
index 0000000000..2209dc9219
--- /dev/null
+++ b/deps/v8/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc
@@ -0,0 +1,1011 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/v8.h"
+
+#include "src/interpreter/bytecode-array-builder.h"
+#include "src/interpreter/bytecode-array-random-iterator.h"
+#include "src/objects-inl.h"
+#include "test/unittests/test-utils.h"
+
+namespace v8 {
+namespace internal {
+namespace interpreter {
+
+class BytecodeArrayRandomIteratorTest : public TestWithIsolateAndZone {
+ public:
+ BytecodeArrayRandomIteratorTest() {}
+ ~BytecodeArrayRandomIteratorTest() override {}
+};
+
+TEST_F(BytecodeArrayRandomIteratorTest, InvalidBeforeStart) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+
+ iterator.GoToStart();
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+ ASSERT_FALSE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, InvalidAfterEnd) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+
+ iterator.GoToEnd();
+ ASSERT_TRUE(iterator.IsValid());
+ ++iterator;
+ ASSERT_FALSE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, AccessesFirst) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+
+ iterator.GoToStart();
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 0);
+ EXPECT_EQ(iterator.current_offset(), 0);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_0));
+ ASSERT_TRUE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, AccessesLast) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+
+ iterator.GoToEnd();
+
+ int offset = bytecodeArray->length() -
+ Bytecodes::Size(Bytecode::kReturn, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kReturn);
+ EXPECT_EQ(iterator.current_index(), 23);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, RandomAccessValid) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t name_index = 2;
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ // Test iterator sees the expected output from the builder.
+ BytecodeArrayRandomIterator iterator(builder.ToBytecodeArray(isolate()),
+ zone());
+ const int kPrefixByteSize = 1;
+ int offset = 0;
+
+ iterator.GoToIndex(13);
+ offset = Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 13);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator.GoToIndex(2);
+ offset = Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 2);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_1));
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator.GoToIndex(18);
+ offset = Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntimeForPair);
+ EXPECT_EQ(iterator.current_index(), 18);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadLookupSlotForCall);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 1);
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ EXPECT_EQ(iterator.GetRegisterOperand(3).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(3), 2);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator -= 3;
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset -= Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset -= Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaNamedProperty);
+ EXPECT_EQ(iterator.current_index(), 15);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetIndexOperand(1), name_index);
+ EXPECT_EQ(iterator.GetIndexOperand(2), feedback_slot);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator += 2;
+ offset += Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 17);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator.GoToIndex(23);
+ offset = Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset +=
+ Bytecodes::Size(Bytecode::kCallRuntimeForPair, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kForInPrepare, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kCallRuntime, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kDebugger, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaGlobal, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kReturn);
+ EXPECT_EQ(iterator.current_index(), 23);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator.GoToIndex(24);
+ EXPECT_FALSE(iterator.IsValid());
+
+ iterator.GoToIndex(-5);
+ EXPECT_FALSE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, IteratesBytecodeArray) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t name_index = 2;
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ // Test iterator sees the expected output from the builder.
+ BytecodeArrayRandomIterator iterator(builder.ToBytecodeArray(isolate()),
+ zone());
+ const int kPrefixByteSize = 1;
+ int offset = 0;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 0);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_0));
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 1);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 2);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_1));
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 3);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaZero);
+ EXPECT_EQ(iterator.current_index(), 4);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 5);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+ EXPECT_EQ(iterator.current_index(), 6);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_0);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStackCheck);
+ EXPECT_EQ(iterator.current_index(), 7);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Bytecodes::NumberOfOperands(iterator.current_bytecode()), 0);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 8);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+ EXPECT_EQ(iterator.current_index(), 9);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+ EXPECT_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStackCheck);
+ EXPECT_EQ(iterator.current_index(), 10);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Bytecodes::NumberOfOperands(iterator.current_bytecode()), 0);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 11);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdar);
+ EXPECT_EQ(iterator.current_index(), 12);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 13);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 14);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaNamedProperty);
+ EXPECT_EQ(iterator.current_index(), 15);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetIndexOperand(1), name_index);
+ EXPECT_EQ(iterator.GetIndexOperand(2), feedback_slot);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 16);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 17);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntimeForPair);
+ EXPECT_EQ(iterator.current_index(), 18);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadLookupSlotForCall);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 1);
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ EXPECT_EQ(iterator.GetRegisterOperand(3).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(3), 2);
+ ASSERT_TRUE(iterator.IsValid());
+ offset +=
+ Bytecodes::Size(Bytecode::kCallRuntimeForPair, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kForInPrepare);
+ EXPECT_EQ(iterator.current_index(), 19);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 3);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kForInPrepare, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntime);
+ EXPECT_EQ(iterator.current_index(), 20);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadIC_Miss);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kCallRuntime, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
+ EXPECT_EQ(iterator.current_index(), 21);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kDebugger, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaGlobal);
+ EXPECT_EQ(iterator.current_index(), 22);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+ EXPECT_EQ(iterator.current_bytecode_size(), 10);
+ EXPECT_EQ(iterator.GetIndexOperand(1), 0x10000000u);
+ offset += Bytecodes::Size(Bytecode::kLdaGlobal, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kReturn);
+ EXPECT_EQ(iterator.current_index(), 23);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ ++iterator;
+ ASSERT_TRUE(!iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, IteratesBytecodeArrayBackwards) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t name_index = 2;
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ // Test iterator sees the expected output from the builder.
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+ const int kPrefixByteSize = 1;
+ int offset = bytecodeArray->length();
+
+ iterator.GoToEnd();
+
+ offset -= Bytecodes::Size(Bytecode::kReturn, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kReturn);
+ EXPECT_EQ(iterator.current_index(), 23);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaGlobal, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaGlobal);
+ EXPECT_EQ(iterator.current_index(), 22);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+ EXPECT_EQ(iterator.current_bytecode_size(), 10);
+ EXPECT_EQ(iterator.GetIndexOperand(1), 0x10000000u);
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kDebugger, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
+ EXPECT_EQ(iterator.current_index(), 21);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kCallRuntime, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntime);
+ EXPECT_EQ(iterator.current_index(), 20);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadIC_Miss);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kForInPrepare, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kForInPrepare);
+ EXPECT_EQ(iterator.current_index(), 19);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 3);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -=
+ Bytecodes::Size(Bytecode::kCallRuntimeForPair, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntimeForPair);
+ EXPECT_EQ(iterator.current_index(), 18);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadLookupSlotForCall);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 1);
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ EXPECT_EQ(iterator.GetRegisterOperand(3).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(3), 2);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 17);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 16);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaNamedProperty);
+ EXPECT_EQ(iterator.current_index(), 15);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetIndexOperand(1), name_index);
+ EXPECT_EQ(iterator.GetIndexOperand(2), feedback_slot);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 14);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 13);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdar);
+ EXPECT_EQ(iterator.current_index(), 12);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 11);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStackCheck);
+ EXPECT_EQ(iterator.current_index(), 10);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Bytecodes::NumberOfOperands(iterator.current_bytecode()), 0);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+ EXPECT_EQ(iterator.current_index(), 9);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+ EXPECT_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 8);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStackCheck);
+ EXPECT_EQ(iterator.current_index(), 7);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Bytecodes::NumberOfOperands(iterator.current_bytecode()), 0);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+ EXPECT_EQ(iterator.current_index(), 6);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_0);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 5);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaZero);
+ EXPECT_EQ(iterator.current_index(), 4);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 3);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 2);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_1));
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 1);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 0);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_0));
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+ ASSERT_FALSE(iterator.IsValid());
+}
+
+} // namespace interpreter
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc
index bc865ef7d1..91b3a7554b 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc
@@ -10,6 +10,7 @@
#include "src/interpreter/bytecode-label.h"
#include "src/interpreter/constant-array-builder.h"
#include "src/isolate.h"
+#include "src/objects-inl.h"
#include "src/source-position-table.h"
#include "src/utils.h"
#include "test/unittests/interpreter/bytecode-utils.h"
diff --git a/deps/v8/test/unittests/interpreter/bytecode-operands-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-operands-unittest.cc
new file mode 100644
index 0000000000..a02d7f01c6
--- /dev/null
+++ b/deps/v8/test/unittests/interpreter/bytecode-operands-unittest.cc
@@ -0,0 +1,47 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/v8.h"
+
+#include "src/interpreter/bytecode-operands.h"
+#include "src/isolate.h"
+#include "test/unittests/test-utils.h"
+
+namespace v8 {
+namespace internal {
+namespace interpreter {
+
+using BytecodeOperandsTest = TestWithIsolateAndZone;
+
+TEST(BytecodeOperandsTest, IsScalableSignedByte) {
+#define SCALABLE_SIGNED_OPERAND(Name, ...) \
+ CHECK(BytecodeOperands::IsScalableSignedByte(OperandType::k##Name));
+ REGISTER_OPERAND_TYPE_LIST(SCALABLE_SIGNED_OPERAND)
+ SIGNED_SCALABLE_SCALAR_OPERAND_TYPE_LIST(SCALABLE_SIGNED_OPERAND)
+#undef SCALABLE_SIGNED_OPERAND
+#define NOT_SCALABLE_SIGNED_OPERAND(Name, ...) \
+ CHECK(!BytecodeOperands::IsScalableSignedByte(OperandType::k##Name));
+ INVALID_OPERAND_TYPE_LIST(NOT_SCALABLE_SIGNED_OPERAND)
+ UNSIGNED_FIXED_SCALAR_OPERAND_TYPE_LIST(NOT_SCALABLE_SIGNED_OPERAND)
+ UNSIGNED_SCALABLE_SCALAR_OPERAND_TYPE_LIST(NOT_SCALABLE_SIGNED_OPERAND)
+#undef NOT_SCALABLE_SIGNED_OPERAND
+}
+
+TEST(BytecodeOperandsTest, IsScalableUnsignedByte) {
+#define SCALABLE_UNSIGNED_OPERAND(Name, ...) \
+ CHECK(BytecodeOperands::IsScalableUnsignedByte(OperandType::k##Name));
+ UNSIGNED_SCALABLE_SCALAR_OPERAND_TYPE_LIST(SCALABLE_UNSIGNED_OPERAND)
+#undef SCALABLE_SIGNED_OPERAND
+#define NOT_SCALABLE_UNSIGNED_OPERAND(Name, ...) \
+ CHECK(!BytecodeOperands::IsScalableUnsignedByte(OperandType::k##Name));
+ INVALID_OPERAND_TYPE_LIST(NOT_SCALABLE_UNSIGNED_OPERAND)
+ REGISTER_OPERAND_TYPE_LIST(NOT_SCALABLE_UNSIGNED_OPERAND)
+ SIGNED_SCALABLE_SCALAR_OPERAND_TYPE_LIST(NOT_SCALABLE_UNSIGNED_OPERAND)
+ UNSIGNED_FIXED_SCALAR_OPERAND_TYPE_LIST(NOT_SCALABLE_UNSIGNED_OPERAND)
+#undef NOT_SCALABLE_SIGNED_OPERAND
+}
+
+} // namespace interpreter
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc
index d1c570d421..7d139f4b56 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc
@@ -19,11 +19,12 @@ class BytecodePeepholeOptimizerTest : public BytecodePipelineStage,
public TestWithIsolateAndZone {
public:
BytecodePeepholeOptimizerTest()
- : peephole_optimizer_(this), last_written_(Bytecode::kIllegal) {}
+ : peephole_optimizer_(this),
+ last_written_(BytecodeNode::Illegal(BytecodeSourceInfo())) {}
~BytecodePeepholeOptimizerTest() override {}
void Reset() {
- last_written_.set_bytecode(Bytecode::kIllegal);
+ last_written_ = BytecodeNode::Illegal(BytecodeSourceInfo());
write_count_ = 0;
}
@@ -403,6 +404,25 @@ TEST_F(BytecodePeepholeOptimizerTest, MergeLdaZeroWithBinaryOp) {
}
}
+TEST_F(BytecodePeepholeOptimizerTest, MergeLdaNullOrUndefinedWithCompareOp) {
+ Bytecode first_bytecodes[] = {Bytecode::kLdaUndefined, Bytecode::kLdaNull};
+
+ for (auto first_bytecode : first_bytecodes) {
+ uint32_t reg_operand = Register(0).ToOperand();
+ uint32_t idx_operand = 1;
+ BytecodeNode first(first_bytecode);
+ BytecodeNode second(Bytecode::kTestEqual, reg_operand, idx_operand);
+ optimizer()->Write(&first);
+ optimizer()->Write(&second);
+ Flush();
+ CHECK_EQ(write_count(), 1);
+ CHECK_EQ(last_written().bytecode(), Bytecode::kTestUndetectable);
+ CHECK_EQ(last_written().operand_count(), 1);
+ CHECK_EQ(last_written().operand(0), reg_operand);
+ Reset();
+ }
+}
+
} // namespace interpreter
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc
index c4388e8fed..45366196f4 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc
@@ -68,11 +68,12 @@ TEST_F(BytecodeNodeTest, Constructor2) {
}
TEST_F(BytecodeNodeTest, Constructor3) {
- uint32_t operands[] = {0x11};
- BytecodeNode node(Bytecode::kLdaGlobal, operands[0]);
+ uint32_t operands[] = {0x11, 0x22};
+ BytecodeNode node(Bytecode::kLdaGlobal, operands[0], operands[1]);
CHECK_EQ(node.bytecode(), Bytecode::kLdaGlobal);
- CHECK_EQ(node.operand_count(), 1);
+ CHECK_EQ(node.operand_count(), 2);
CHECK_EQ(node.operand(0), operands[0]);
+ CHECK_EQ(node.operand(1), operands[1]);
CHECK(!node.source_info().is_valid());
}
@@ -133,36 +134,6 @@ TEST_F(BytecodeNodeTest, NoEqualityWithDifferentSourceInfo) {
CHECK_NE(node, other);
}
-TEST_F(BytecodeNodeTest, SetBytecode0) {
- uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
- BytecodeSourceInfo source_info(77, false);
- BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
- operands[3], source_info);
- CHECK_EQ(node.source_info(), source_info);
-
- BytecodeNode clone(Bytecode::kIllegal);
- clone = node;
- clone.set_bytecode(Bytecode::kNop);
- CHECK_EQ(clone.bytecode(), Bytecode::kNop);
- CHECK_EQ(clone.operand_count(), 0);
- CHECK_EQ(clone.source_info(), source_info);
-}
-
-TEST_F(BytecodeNodeTest, SetBytecode1) {
- uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
- BytecodeSourceInfo source_info(77, false);
- BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
- operands[3], source_info);
-
- BytecodeNode clone(Bytecode::kIllegal);
- clone = node;
- clone.set_bytecode(Bytecode::kJump, 0x01aabbcc);
- CHECK_EQ(clone.bytecode(), Bytecode::kJump);
- CHECK_EQ(clone.operand_count(), 1);
- CHECK_EQ(clone.operand(0), 0x01aabbccu);
- CHECK_EQ(clone.source_info(), source_info);
-}
-
} // namespace interpreter
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc
index 81c6da5f8f..b2c8b47c79 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc
@@ -6,6 +6,7 @@
#include "src/interpreter/bytecode-array-builder.h"
#include "src/interpreter/bytecode-register-allocator.h"
+#include "src/objects-inl.h"
#include "test/unittests/test-utils.h"
namespace v8 {
diff --git a/deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc
index 55003d7511..ba9e880787 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc
@@ -78,7 +78,7 @@ TEST_F(BytecodeRegisterOptimizerTest, TemporaryMaterializedForJump) {
Register temp = NewTemporary();
optimizer()->DoStar(temp, BytecodeSourceInfo());
CHECK_EQ(write_count(), 0u);
- optimizer()->PrepareForBytecode(Bytecode::kJump);
+ optimizer()->PrepareForBytecode<Bytecode::kJump, AccumulatorUse::kNone>();
CHECK_EQ(write_count(), 1u);
CHECK_EQ(output()->at(0).bytecode(), Bytecode::kStar);
CHECK_EQ(output()->at(0).operand(0), static_cast<uint32_t>(temp.ToOperand()));
@@ -96,7 +96,7 @@ TEST_F(BytecodeRegisterOptimizerTest, TemporaryNotEmitted) {
BytecodeNode node1(Bytecode::kStar, NewTemporary().ToOperand());
ReleaseTemporaries(temp);
CHECK_EQ(write_count(), 0u);
- optimizer()->PrepareForBytecode(Bytecode::kReturn);
+ optimizer()->PrepareForBytecode<Bytecode::kReturn, AccumulatorUse::kRead>();
CHECK_EQ(output()->at(0).bytecode(), Bytecode::kLdar);
CHECK_EQ(output()->at(0).operand(0),
static_cast<uint32_t>(parameter.ToOperand()));
@@ -104,12 +104,12 @@ TEST_F(BytecodeRegisterOptimizerTest, TemporaryNotEmitted) {
TEST_F(BytecodeRegisterOptimizerTest, ReleasedRegisterUsed) {
Initialize(3, 1);
- optimizer()->PrepareForBytecode(Bytecode::kLdaSmi);
+ optimizer()->PrepareForBytecode<Bytecode::kLdaSmi, AccumulatorUse::kWrite>();
Register temp0 = NewTemporary();
Register temp1 = NewTemporary();
optimizer()->DoStar(temp1, BytecodeSourceInfo());
CHECK_EQ(write_count(), 0u);
- optimizer()->PrepareForBytecode(Bytecode::kLdaSmi);
+ optimizer()->PrepareForBytecode<Bytecode::kLdaSmi, AccumulatorUse::kWrite>();
CHECK_EQ(write_count(), 1u);
CHECK_EQ(output()->at(0).bytecode(), Bytecode::kStar);
CHECK_EQ(output()->at(0).operand(0),
@@ -120,7 +120,7 @@ TEST_F(BytecodeRegisterOptimizerTest, ReleasedRegisterUsed) {
CHECK_EQ(write_count(), 1u);
optimizer()->DoLdar(temp0, BytecodeSourceInfo());
CHECK_EQ(write_count(), 1u);
- optimizer()->PrepareForBytecode(Bytecode::kReturn);
+ optimizer()->PrepareForBytecode<Bytecode::kReturn, AccumulatorUse::kRead>();
CHECK_EQ(write_count(), 2u);
CHECK_EQ(output()->at(1).bytecode(), Bytecode::kLdar);
CHECK_EQ(output()->at(1).operand(0),
@@ -129,7 +129,7 @@ TEST_F(BytecodeRegisterOptimizerTest, ReleasedRegisterUsed) {
TEST_F(BytecodeRegisterOptimizerTest, ReleasedRegisterNotFlushed) {
Initialize(3, 1);
- optimizer()->PrepareForBytecode(Bytecode::kLdaSmi);
+ optimizer()->PrepareForBytecode<Bytecode::kLdaSmi, AccumulatorUse::kWrite>();
Register temp0 = NewTemporary();
Register temp1 = NewTemporary();
optimizer()->DoStar(temp0, BytecodeSourceInfo());
@@ -158,7 +158,7 @@ TEST_F(BytecodeRegisterOptimizerTest, StoresToLocalsImmediate) {
CHECK_EQ(output()->at(0).operand(1),
static_cast<uint32_t>(local.ToOperand()));
- optimizer()->PrepareForBytecode(Bytecode::kReturn);
+ optimizer()->PrepareForBytecode<Bytecode::kReturn, AccumulatorUse::kRead>();
CHECK_EQ(write_count(), 2u);
CHECK_EQ(output()->at(1).bytecode(), Bytecode::kLdar);
CHECK_EQ(output()->at(1).operand(0),
@@ -188,12 +188,13 @@ TEST_F(BytecodeRegisterOptimizerTest, RangeOfTemporariesMaterializedForInput) {
Register parameter = Register::FromParameterIndex(1, 3);
Register temp0 = NewTemporary();
Register temp1 = NewTemporary();
- optimizer()->PrepareForBytecode(Bytecode::kLdaSmi);
+ optimizer()->PrepareForBytecode<Bytecode::kLdaSmi, AccumulatorUse::kWrite>();
optimizer()->DoStar(temp0, BytecodeSourceInfo());
optimizer()->DoMov(parameter, temp1, BytecodeSourceInfo());
CHECK_EQ(write_count(), 0u);
- optimizer()->PrepareForBytecode(Bytecode::kCallJSRuntime);
+ optimizer()
+ ->PrepareForBytecode<Bytecode::kCallJSRuntime, AccumulatorUse::kWrite>();
RegisterList reg_list =
optimizer()->GetInputRegisterList(RegisterList(temp0.index(), 2));
CHECK_EQ(temp0.index(), reg_list.first_register().index());
diff --git a/deps/v8/test/unittests/interpreter/bytecodes-unittest.cc b/deps/v8/test/unittests/interpreter/bytecodes-unittest.cc
index 81868590b9..cfcdf6c3bc 100644
--- a/deps/v8/test/unittests/interpreter/bytecodes-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecodes-unittest.cc
@@ -200,6 +200,126 @@ TEST(Bytecodes, SizesForUnsignedOperands) {
OperandSize::kQuad);
}
+// Helper macros to generate a check for if a bytecode is in a macro list of
+// bytecodes. We can use these to exhaustively test a check over all bytecodes,
+// both those that should pass and those that should fail the check.
+#define OR_IS_BYTECODE(Name, ...) || bytecode == Bytecode::k##Name
+#define IN_BYTECODE_LIST(BYTECODE, LIST) \
+ ([](Bytecode bytecode) { return false LIST(OR_IS_BYTECODE); }(BYTECODE))
+
+TEST(Bytecodes, IsJump) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsJump(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsJump(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsForwardJump) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_FORWARD_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsForwardJump(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsForwardJump(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsConditionalJump) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONDITIONAL_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsConditionalJump(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsConditionalJump(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsUnconditionalJump) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_UNCONDITIONAL_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsUnconditionalJump(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsUnconditionalJump(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsJumpImmediate) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_IMMEDIATE_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsJumpImmediate(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsJumpImmediate(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsJumpConstant) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONSTANT_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsJumpConstant(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsJumpConstant(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsConditionalJumpImmediate) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONDITIONAL_BYTECODE_LIST) && \
+ IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_IMMEDIATE_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsConditionalJumpImmediate(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsConditionalJumpImmediate(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsConditionalJumpConstant) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONDITIONAL_BYTECODE_LIST) && \
+ IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONSTANT_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsConditionalJumpConstant(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsConditionalJumpConstant(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsJumpIfToBoolean) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_TO_BOOLEAN_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsJumpIfToBoolean(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsJumpIfToBoolean(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+#undef OR_IS_BYTECODE
+#undef IN_BYTECODE_LIST
+
TEST(OperandScale, PrefixesRequired) {
CHECK(!Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kSingle));
CHECK(Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kDouble));
diff --git a/deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc b/deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc
index fc80f7145d..3a0ec0835c 100644
--- a/deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc
@@ -8,6 +8,7 @@
#include "src/handles-inl.h"
#include "src/interpreter/constant-array-builder.h"
#include "src/isolate.h"
+#include "src/objects-inl.h"
#include "test/unittests/test-utils.h"
namespace v8 {
diff --git a/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc b/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc
index b8eb64c884..f80e8a3f80 100644
--- a/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc
@@ -5,7 +5,6 @@
#include "test/unittests/interpreter/interpreter-assembler-unittest.h"
#include "src/code-factory.h"
-#include "src/compiler/graph.h"
#include "src/compiler/node.h"
#include "src/interface-descriptors.h"
#include "src/isolate.h"
@@ -21,6 +20,14 @@ using namespace compiler;
namespace interpreter {
+InterpreterAssemblerTestState::InterpreterAssemblerTestState(
+ InterpreterAssemblerTest* test, Bytecode bytecode)
+ : compiler::CodeAssemblerState(
+ test->isolate(), test->zone(),
+ InterpreterDispatchDescriptor(test->isolate()),
+ Code::ComputeFlags(Code::BYTECODE_HANDLER),
+ Bytecodes::ToString(bytecode), Bytecodes::ReturnCount(bytecode)) {}
+
const interpreter::Bytecode kBytecodes[] = {
#define DEFINE_BYTECODE(Name, ...) interpreter::Bytecode::k##Name,
BYTECODE_LIST(DEFINE_BYTECODE)
@@ -44,6 +51,12 @@ Matcher<Node*> IsIntPtrSub(const Matcher<Node*>& lhs_matcher,
: IsInt32Sub(lhs_matcher, rhs_matcher);
}
+Matcher<Node*> IsIntPtrMul(const Matcher<Node*>& lhs_matcher,
+ const Matcher<Node*>& rhs_matcher) {
+ return kPointerSize == 8 ? IsInt64Mul(lhs_matcher, rhs_matcher)
+ : IsInt32Mul(lhs_matcher, rhs_matcher);
+}
+
Matcher<Node*> IsWordShl(const Matcher<Node*>& lhs_matcher,
const Matcher<Node*>& rhs_matcher) {
return kPointerSize == 8 ? IsWord64Shl(lhs_matcher, rhs_matcher)
@@ -62,6 +75,18 @@ Matcher<Node*> IsWordOr(const Matcher<Node*>& lhs_matcher,
: IsWord32Or(lhs_matcher, rhs_matcher);
}
+Matcher<Node*> IsChangeInt32ToIntPtr(const Matcher<Node*>& matcher) {
+ return kPointerSize == 8 ? IsChangeInt32ToInt64(matcher) : matcher;
+}
+
+Matcher<Node*> IsChangeUint32ToWord(const Matcher<Node*>& matcher) {
+ return kPointerSize == 8 ? IsChangeUint32ToUint64(matcher) : matcher;
+}
+
+Matcher<Node*> IsTruncateWordToWord32(const Matcher<Node*>& matcher) {
+ return kPointerSize == 8 ? IsTruncateInt64ToInt32(matcher) : matcher;
+}
+
InterpreterAssemblerTest::InterpreterAssemblerForTest::
~InterpreterAssemblerForTest() {
// Tests don't necessarily read and write accumulator but
@@ -101,15 +126,11 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedByteOperand(
Matcher<Node*>
InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedByteOperand(
int offset) {
- Matcher<Node*> load_matcher = IsLoad(
+ return IsLoad(
MachineType::Int8(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
IsIntPtrConstant(offset)));
- if (kPointerSize == 8) {
- load_matcher = IsChangeInt32ToInt64(load_matcher);
- }
- return load_matcher;
}
Matcher<Node*>
@@ -148,9 +169,8 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedShortOperand(
Matcher<Node*>
InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedShortOperand(
int offset) {
- Matcher<Node*> load_matcher;
if (TargetSupportsUnalignedAccess()) {
- load_matcher = IsLoad(
+ return IsLoad(
MachineType::Int16(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
@@ -174,14 +194,9 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedShortOperand(
IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
}
- load_matcher = IsWord32Or(
- IsWord32Shl(bytes[0], IsInt32Constant(kBitsPerByte)), bytes[1]);
- }
-
- if (kPointerSize == 8) {
- load_matcher = IsChangeInt32ToInt64(load_matcher);
+ return IsWord32Or(IsWord32Shl(bytes[0], IsInt32Constant(kBitsPerByte)),
+ bytes[1]);
}
- return load_matcher;
}
Matcher<Node*>
@@ -224,9 +239,8 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedQuadOperand(
Matcher<Node*>
InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedQuadOperand(
int offset) {
- Matcher<Node*> load_matcher;
if (TargetSupportsUnalignedAccess()) {
- load_matcher = IsLoad(
+ return IsLoad(
MachineType::Int32(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
@@ -250,18 +264,13 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedQuadOperand(
IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
}
- load_matcher = IsWord32Or(
+ return IsWord32Or(
IsWord32Shl(bytes[0], IsInt32Constant(3 * kBitsPerByte)),
IsWord32Or(
IsWord32Shl(bytes[1], IsInt32Constant(2 * kBitsPerByte)),
IsWord32Or(IsWord32Shl(bytes[2], IsInt32Constant(1 * kBitsPerByte)),
bytes[3])));
}
-
- if (kPointerSize == 8) {
- load_matcher = IsChangeInt32ToInt64(load_matcher);
- }
- return load_matcher;
}
Matcher<Node*>
@@ -298,7 +307,8 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedOperand(
TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* tail_call_node = m.Dispatch();
OperandScale operand_scale = OperandScale::kSingle;
@@ -310,9 +320,7 @@ TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
m.IsLoad(MachineType::Uint8(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
next_bytecode_offset_matcher);
- if (kPointerSize == 8) {
- target_bytecode_matcher = IsChangeUint32ToUint64(target_bytecode_matcher);
- }
+ target_bytecode_matcher = IsChangeUint32ToWord(target_bytecode_matcher);
Matcher<Node*> code_target_matcher = m.IsLoad(
MachineType::Pointer(),
IsParameter(InterpreterDispatchDescriptor::kDispatchTable),
@@ -330,10 +338,7 @@ TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
m.IsLoad(MachineType::Uint8(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
after_lookahead_offset);
- if (kPointerSize == 8) {
- after_lookahead_bytecode =
- IsChangeUint32ToUint64(after_lookahead_bytecode);
- }
+ after_lookahead_bytecode = IsChangeUint32ToWord(after_lookahead_bytecode);
target_bytecode_matcher =
IsPhi(MachineType::PointerRepresentation(), target_bytecode_matcher,
after_lookahead_bytecode, _);
@@ -364,7 +369,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, Jump) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
if (!interpreter::Bytecodes::IsJump(bytecode)) return;
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* tail_call_node = m.Jump(m.IntPtrConstant(jump_offset));
Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd(
@@ -372,10 +378,7 @@ TARGET_TEST_F(InterpreterAssemblerTest, Jump) {
IsIntPtrConstant(jump_offset));
Matcher<Node*> target_bytecode_matcher =
m.IsLoad(MachineType::Uint8(), _, next_bytecode_offset_matcher);
- if (kPointerSize == 8) {
- target_bytecode_matcher =
- IsChangeUint32ToUint64(target_bytecode_matcher);
- }
+ target_bytecode_matcher = IsChangeUint32ToWord(target_bytecode_matcher);
Matcher<Node*> code_target_matcher =
m.IsLoad(MachineType::Pointer(),
IsParameter(InterpreterDispatchDescriptor::kDispatchTable),
@@ -398,7 +401,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
OperandScale::kSingle, OperandScale::kDouble, OperandScale::kQuadruple};
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
TRACED_FOREACH(interpreter::OperandScale, operand_scale, kOperandScales) {
- InterpreterAssemblerForTest m(this, bytecode, operand_scale);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode, operand_scale);
int number_of_operands =
interpreter::Bytecodes::NumberOfOperands(bytecode);
for (int i = 0; i < number_of_operands; i++) {
@@ -419,7 +423,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
break;
case interpreter::OperandType::kIdx:
EXPECT_THAT(m.BytecodeOperandIdx(i),
- m.IsUnsignedOperand(offset, operand_size));
+ IsChangeUint32ToWord(
+ m.IsUnsignedOperand(offset, operand_size)));
break;
case interpreter::OperandType::kUImm:
EXPECT_THAT(m.BytecodeOperandUImm(i),
@@ -436,8 +441,9 @@ TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
case interpreter::OperandType::kRegOutPair:
case interpreter::OperandType::kRegOutTriple:
case interpreter::OperandType::kRegPair:
- EXPECT_THAT(m.BytecodeOperandReg(i),
- m.IsSignedOperand(offset, operand_size));
+ EXPECT_THAT(
+ m.BytecodeOperandReg(i),
+ IsChangeInt32ToIntPtr(m.IsSignedOperand(offset, operand_size)));
break;
case interpreter::OperandType::kRuntimeId:
EXPECT_THAT(m.BytecodeOperandRuntimeId(i),
@@ -463,7 +469,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) {
continue;
}
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
// Should be incoming accumulator if not set.
EXPECT_THAT(m.GetAccumulator(),
IsParameter(InterpreterDispatchDescriptor::kAccumulator));
@@ -485,7 +492,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) {
TARGET_TEST_F(InterpreterAssemblerTest, GetContext) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
EXPECT_THAT(
m.GetContext(),
m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
@@ -496,7 +504,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, GetContext) {
TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* reg_index_node = m.IntPtrConstant(44);
Node* reg_location_node = m.RegisterLocation(reg_index_node);
EXPECT_THAT(reg_location_node,
@@ -508,7 +517,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) {
TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* reg_index_node = m.IntPtrConstant(44);
Node* load_reg_node = m.LoadRegister(reg_index_node);
EXPECT_THAT(load_reg_node,
@@ -520,7 +530,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) {
TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* store_value = m.Int32Constant(0xdeadbeef);
Node* reg_index_node = m.IntPtrConstant(44);
Node* store_reg_node = m.StoreRegister(store_value, reg_index_node);
@@ -536,7 +547,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) {
TARGET_TEST_F(InterpreterAssemblerTest, SmiTag) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* value = m.Int32Constant(44);
EXPECT_THAT(m.SmiTag(value), IsBitcastWordToTaggedSigned(IsIntPtrConstant(
static_cast<intptr_t>(44)
@@ -549,8 +561,9 @@ TARGET_TEST_F(InterpreterAssemblerTest, SmiTag) {
TARGET_TEST_F(InterpreterAssemblerTest, IntPtrAdd) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
- Node* a = m.Int32Constant(0);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
+ Node* a = m.Parameter(0);
Node* b = m.Int32Constant(1);
Node* add = m.IntPtrAdd(a, b);
EXPECT_THAT(add, IsIntPtrAdd(a, b));
@@ -559,8 +572,9 @@ TARGET_TEST_F(InterpreterAssemblerTest, IntPtrAdd) {
TARGET_TEST_F(InterpreterAssemblerTest, IntPtrSub) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
- Node* a = m.Int32Constant(0);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
+ Node* a = m.Parameter(0);
Node* b = m.Int32Constant(1);
Node* add = m.IntPtrSub(a, b);
EXPECT_THAT(add, IsIntPtrSub(a, b));
@@ -569,7 +583,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, IntPtrSub) {
TARGET_TEST_F(InterpreterAssemblerTest, WordShl) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* a = m.IntPtrConstant(0);
Node* add = m.WordShl(a, 10);
EXPECT_THAT(add, IsWordShl(a, IsIntPtrConstant(10)));
@@ -578,25 +593,44 @@ TARGET_TEST_F(InterpreterAssemblerTest, WordShl) {
TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
- Node* index = m.IntPtrConstant(2);
- Node* load_constant = m.LoadConstantPoolEntry(index);
- Matcher<Node*> constant_pool_matcher = m.IsLoad(
- MachineType::AnyTagged(),
- IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
- IsIntPtrConstant(BytecodeArray::kConstantPoolOffset - kHeapObjectTag));
- EXPECT_THAT(
- load_constant,
- m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher,
- IsIntPtrAdd(
- IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
- IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2)))));
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
+ {
+ Node* index = m.IntPtrConstant(2);
+ Node* load_constant = m.LoadConstantPoolEntry(index);
+ Matcher<Node*> constant_pool_matcher =
+ m.IsLoad(MachineType::AnyTagged(),
+ IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
+ IsIntPtrConstant(BytecodeArray::kConstantPoolOffset -
+ kHeapObjectTag));
+ EXPECT_THAT(load_constant,
+ m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher,
+ IsIntPtrConstant(FixedArray::OffsetOfElementAt(2) -
+ kHeapObjectTag)));
+ }
+ {
+ Node* index = m.Parameter(2);
+ Node* load_constant = m.LoadConstantPoolEntry(index);
+ Matcher<Node*> constant_pool_matcher =
+ m.IsLoad(MachineType::AnyTagged(),
+ IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
+ IsIntPtrConstant(BytecodeArray::kConstantPoolOffset -
+ kHeapObjectTag));
+ EXPECT_THAT(
+ load_constant,
+ m.IsLoad(
+ MachineType::AnyTagged(), constant_pool_matcher,
+ IsIntPtrAdd(
+ IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
+ IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2)))));
+ }
}
}
TARGET_TEST_F(InterpreterAssemblerTest, LoadObjectField) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* object = m.IntPtrConstant(0xdeadbeef);
int offset = 16;
Node* load_field = m.LoadObjectField(object, offset);
@@ -608,7 +642,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, LoadObjectField) {
TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime2) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* arg1 = m.Int32Constant(2);
Node* arg2 = m.Int32Constant(3);
Node* context = m.Int32Constant(4);
@@ -622,19 +657,21 @@ TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime) {
const int kResultSizes[] = {1, 2};
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
TRACED_FOREACH(int, result_size, kResultSizes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Callable builtin = CodeFactory::InterpreterCEntry(isolate(), result_size);
Node* function_id = m.Int32Constant(0);
- Node* first_arg = m.Int32Constant(1);
+ Node* first_arg = m.IntPtrConstant(1);
Node* arg_count = m.Int32Constant(2);
- Node* context = m.Int32Constant(4);
+ Node* context = m.IntPtrConstant(4);
Matcher<Node*> function_table = IsExternalConstant(
ExternalReference::runtime_function_table_address(isolate()));
Matcher<Node*> function = IsIntPtrAdd(
function_table,
- IsInt32Mul(function_id, IsInt32Constant(sizeof(Runtime::Function))));
+ IsChangeUint32ToWord(IsInt32Mul(
+ function_id, IsInt32Constant(sizeof(Runtime::Function)))));
Matcher<Node*> function_entry =
m.IsLoad(MachineType::Pointer(), function,
IsIntPtrConstant(offsetof(Runtime::Function, entry)));
@@ -653,13 +690,14 @@ TARGET_TEST_F(InterpreterAssemblerTest, CallJS) {
TailCallMode::kAllow};
TRACED_FOREACH(TailCallMode, tail_call_mode, tail_call_modes) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Callable builtin =
CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode);
- Node* function = m.Int32Constant(0);
- Node* first_arg = m.Int32Constant(1);
+ Node* function = m.IntPtrConstant(0);
+ Node* first_arg = m.IntPtrConstant(1);
Node* arg_count = m.Int32Constant(2);
- Node* context = m.Int32Constant(3);
+ Node* context = m.IntPtrConstant(3);
Node* call_js =
m.CallJS(function, context, first_arg, arg_count, tail_call_mode);
EXPECT_THAT(call_js, IsCall(_, IsHeapConstant(builtin.code()), arg_count,
@@ -670,7 +708,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, CallJS) {
TARGET_TEST_F(InterpreterAssemblerTest, LoadTypeFeedbackVector) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* feedback_vector = m.LoadTypeFeedbackVector();
Matcher<Node*> load_function_matcher =
diff --git a/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h b/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h
index e3e525273a..210a201d07 100644
--- a/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h
+++ b/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h
@@ -5,6 +5,7 @@
#ifndef V8_UNITTESTS_INTERPRETER_INTERPRETER_ASSEMBLER_UNITTEST_H_
#define V8_UNITTESTS_INTERPRETER_INTERPRETER_ASSEMBLER_UNITTEST_H_
+#include "src/compiler/code-assembler.h"
#include "src/compiler/machine-operator.h"
#include "src/interpreter/interpreter-assembler.h"
#include "test/unittests/test-utils.h"
@@ -16,6 +17,14 @@ namespace interpreter {
using ::testing::Matcher;
+class InterpreterAssemblerTest;
+
+class InterpreterAssemblerTestState : public compiler::CodeAssemblerState {
+ public:
+ InterpreterAssemblerTestState(InterpreterAssemblerTest* test,
+ Bytecode bytecode);
+};
+
class InterpreterAssemblerTest : public TestWithIsolateAndZone {
public:
InterpreterAssemblerTest() {}
@@ -24,11 +33,10 @@ class InterpreterAssemblerTest : public TestWithIsolateAndZone {
class InterpreterAssemblerForTest final : public InterpreterAssembler {
public:
InterpreterAssemblerForTest(
- InterpreterAssemblerTest* test, Bytecode bytecode,
+ InterpreterAssemblerTestState* state, Bytecode bytecode,
OperandScale operand_scale = OperandScale::kSingle)
- : InterpreterAssembler(test->isolate(), test->zone(), bytecode,
- operand_scale) {}
- ~InterpreterAssemblerForTest() override;
+ : InterpreterAssembler(state, bytecode, operand_scale) {}
+ ~InterpreterAssemblerForTest();
Matcher<compiler::Node*> IsLoad(
const Matcher<compiler::LoadRepresentation>& rep_matcher,