diff options
Diffstat (limited to 'deps/v8/src/interpreter/bytecode-array-writer.cc')
-rw-r--r-- | deps/v8/src/interpreter/bytecode-array-writer.cc | 148 |
1 files changed, 42 insertions, 106 deletions
diff --git a/deps/v8/src/interpreter/bytecode-array-writer.cc b/deps/v8/src/interpreter/bytecode-array-writer.cc index 6694a3697c..fb3876819e 100644 --- a/deps/v8/src/interpreter/bytecode-array-writer.cc +++ b/deps/v8/src/interpreter/bytecode-array-writer.cc @@ -21,27 +21,23 @@ BytecodeArrayWriter::BytecodeArrayWriter( Zone* zone, ConstantArrayBuilder* constant_array_builder, SourcePositionTableBuilder::RecordingMode source_position_mode) : bytecodes_(zone), - max_register_count_(0), unbound_jumps_(0), source_position_table_builder_(zone, source_position_mode), - constant_array_builder_(constant_array_builder) {} + constant_array_builder_(constant_array_builder) { + bytecodes_.reserve(512); // Derived via experimentation. +} // override BytecodeArrayWriter::~BytecodeArrayWriter() {} // override Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray( - Isolate* isolate, int fixed_register_count, int parameter_count, + Isolate* isolate, int register_count, int parameter_count, Handle<FixedArray> handler_table) { DCHECK_EQ(0, unbound_jumps_); int bytecode_size = static_cast<int>(bytecodes()->size()); - - // All locals need a frame slot for the debugger, but may not be - // present in generated code. - int frame_size_for_locals = fixed_register_count * kPointerSize; - int frame_size_used = max_register_count() * kPointerSize; - int frame_size = std::max(frame_size_for_locals, frame_size_used); + int frame_size = register_count * kPointerSize; Handle<FixedArray> constant_pool = constant_array_builder()->ToFixedArray(isolate); Handle<BytecodeArray> bytecode_array = isolate->factory()->NewBytecodeArray( @@ -104,116 +100,48 @@ void BytecodeArrayWriter::UpdateSourcePositionTable( } } -namespace { - -OperandScale ScaleForScalableByteOperand(OperandSize operand_size) { - STATIC_ASSERT(static_cast<int>(OperandSize::kByte) == - static_cast<int>(OperandScale::kSingle)); - STATIC_ASSERT(static_cast<int>(OperandSize::kShort) == - static_cast<int>(OperandScale::kDouble)); - STATIC_ASSERT(static_cast<int>(OperandSize::kQuad) == - static_cast<int>(OperandScale::kQuadruple)); - return static_cast<OperandScale>(operand_size); -} - -OperandScale OperandScaleForScalableSignedByte(uint32_t operand_value) { - int32_t signed_operand = static_cast<int32_t>(operand_value); - OperandSize bytes_required = Bytecodes::SizeForSignedOperand(signed_operand); - return ScaleForScalableByteOperand(bytes_required); -} - -OperandScale OperandScaleForScalableUnsignedByte(uint32_t operand_value) { - OperandSize bytes_required = Bytecodes::SizeForUnsignedOperand(operand_value); - return ScaleForScalableByteOperand(bytes_required); -} - -OperandScale GetOperandScale(const BytecodeNode* const node) { - const OperandTypeInfo* operand_type_infos = - Bytecodes::GetOperandTypeInfos(node->bytecode()); - OperandScale operand_scale = OperandScale::kSingle; - int operand_count = node->operand_count(); - for (int i = 0; i < operand_count; ++i) { - switch (operand_type_infos[i]) { - case OperandTypeInfo::kScalableSignedByte: { - uint32_t operand = node->operand(i); - operand_scale = - std::max(operand_scale, OperandScaleForScalableSignedByte(operand)); - break; - } - case OperandTypeInfo::kScalableUnsignedByte: { - uint32_t operand = node->operand(i); - operand_scale = std::max(operand_scale, - OperandScaleForScalableUnsignedByte(operand)); - break; - } - case OperandTypeInfo::kFixedUnsignedByte: - case OperandTypeInfo::kFixedUnsignedShort: - break; - case OperandTypeInfo::kNone: - UNREACHABLE(); - break; - } - } - return operand_scale; -} - -} // namespace - void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) { DCHECK_NE(node->bytecode(), Bytecode::kIllegal); - uint8_t buffer[kMaxSizeOfPackedBytecode]; - uint8_t* buffer_limit = buffer; + Bytecode bytecode = node->bytecode(); + OperandScale operand_scale = node->operand_scale(); - OperandScale operand_scale = GetOperandScale(node); if (operand_scale != OperandScale::kSingle) { Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale); - *buffer_limit++ = Bytecodes::ToByte(prefix); + bytecodes()->push_back(Bytecodes::ToByte(prefix)); } - - Bytecode bytecode = node->bytecode(); - *buffer_limit++ = Bytecodes::ToByte(bytecode); + bytecodes()->push_back(Bytecodes::ToByte(bytecode)); const uint32_t* const operands = node->operands(); - const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode); - const int operand_count = Bytecodes::NumberOfOperands(bytecode); + const int operand_count = node->operand_count(); + const OperandSize* operand_sizes = + Bytecodes::GetOperandSizes(bytecode, operand_scale); for (int i = 0; i < operand_count; ++i) { - OperandSize operand_size = - Bytecodes::SizeOfOperand(operand_types[i], operand_scale); - switch (operand_size) { + switch (operand_sizes[i]) { case OperandSize::kNone: UNREACHABLE(); break; case OperandSize::kByte: - *buffer_limit++ = static_cast<uint8_t>(operands[i]); + bytecodes()->push_back(static_cast<uint8_t>(operands[i])); break; case OperandSize::kShort: { - WriteUnalignedUInt16(buffer_limit, operands[i]); - buffer_limit += 2; + uint16_t operand = static_cast<uint16_t>(operands[i]); + const uint8_t* raw_operand = reinterpret_cast<const uint8_t*>(&operand); + bytecodes()->push_back(raw_operand[0]); + bytecodes()->push_back(raw_operand[1]); break; } case OperandSize::kQuad: { - WriteUnalignedUInt32(buffer_limit, operands[i]); - buffer_limit += 4; + const uint8_t* raw_operand = + reinterpret_cast<const uint8_t*>(&operands[i]); + bytecodes()->push_back(raw_operand[0]); + bytecodes()->push_back(raw_operand[1]); + bytecodes()->push_back(raw_operand[2]); + bytecodes()->push_back(raw_operand[3]); break; } } - - int count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_types[i]); - if (count == 0) { - continue; - } - // NB operand_types is terminated by OperandType::kNone so - // operand_types[i + 1] is valid whilst i < operand_count. - if (operand_types[i + 1] == OperandType::kRegCount) { - count = static_cast<int>(operands[i]); - } - Register reg = Register::FromOperand(static_cast<int32_t>(operands[i])); - max_register_count_ = std::max(max_register_count_, reg.index() + count); } - - DCHECK_LE(buffer_limit, buffer + sizeof(buffer)); - bytecodes()->insert(bytecodes()->end(), buffer, buffer_limit); } // static @@ -247,18 +175,17 @@ void BytecodeArrayWriter::PatchJumpWith8BitOperand(size_t jump_location, DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode)); size_t operand_location = jump_location + 1; DCHECK_EQ(bytecodes()->at(operand_location), k8BitJumpPlaceholder); - if (Bytecodes::SizeForSignedOperand(delta) == OperandSize::kByte) { - // The jump fits within the range of an Imm operand, so cancel + if (Bytecodes::ScaleForSignedOperand(delta) == OperandScale::kSingle) { + // The jump fits within the range of an Imm8 operand, so cancel // the reservation and jump directly. constant_array_builder()->DiscardReservedEntry(OperandSize::kByte); bytecodes()->at(operand_location) = static_cast<uint8_t>(delta); } else { - // The jump does not fit within the range of an Imm operand, so + // The jump does not fit within the range of an Imm8 operand, so // commit reservation putting the offset into the constant pool, // and update the jump instruction and operand. size_t entry = constant_array_builder()->CommitReservedEntry( OperandSize::kByte, Smi::FromInt(delta)); - DCHECK_LE(entry, kMaxUInt32); DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)), OperandSize::kByte); jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); @@ -273,14 +200,21 @@ void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location, DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode)); size_t operand_location = jump_location + 1; uint8_t operand_bytes[2]; - if (Bytecodes::SizeForSignedOperand(delta) <= OperandSize::kShort) { + if (Bytecodes::ScaleForSignedOperand(delta) <= OperandScale::kDouble) { + // The jump fits within the range of an Imm16 operand, so cancel + // the reservation and jump directly. constant_array_builder()->DiscardReservedEntry(OperandSize::kShort); WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta)); } else { - jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); - bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode); + // The jump does not fit within the range of an Imm16 operand, so + // commit reservation putting the offset into the constant pool, + // and update the jump instruction and operand. size_t entry = constant_array_builder()->CommitReservedEntry( OperandSize::kShort, Smi::FromInt(delta)); + DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)), + OperandSize::kShort); + jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); + bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode); WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry)); } DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder && @@ -351,13 +285,14 @@ void BytecodeArrayWriter::EmitJump(BytecodeNode* node, BytecodeLabel* label) { // Label has been bound already so this is a backwards jump. size_t abs_delta = current_offset - label->offset(); int delta = -static_cast<int>(abs_delta); - OperandSize operand_size = Bytecodes::SizeForSignedOperand(delta); - if (operand_size > OperandSize::kByte) { + OperandScale operand_scale = Bytecodes::ScaleForSignedOperand(delta); + if (operand_scale > OperandScale::kSingle) { // Adjust for scaling byte prefix for wide jump offset. DCHECK_LE(delta, 0); delta -= 1; } - node->set_bytecode(node->bytecode(), delta); + DCHECK_EQ(Bytecode::kJumpLoop, node->bytecode()); + node->set_bytecode(node->bytecode(), delta, node->operand(1)); } else { // The label has not yet been bound so this is a forward reference // that will be patched when the label is bound. We create a @@ -369,6 +304,7 @@ void BytecodeArrayWriter::EmitJump(BytecodeNode* node, BytecodeLabel* label) { label->set_referrer(current_offset); OperandSize reserved_operand_size = constant_array_builder()->CreateReservedEntry(); + DCHECK_NE(Bytecode::kJumpLoop, node->bytecode()); switch (reserved_operand_size) { case OperandSize::kNone: UNREACHABLE(); |