summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRyan Dahl <ry@tinyclouds.org>2011-03-25 12:02:38 -0700
committerRyan Dahl <ry@tinyclouds.org>2011-03-25 12:02:38 -0700
commitc8ee19a618c77539b9eeba6ab57ab6be03ddd163 (patch)
tree6392fb8070e056a32f71b55db2f2b07c0eec1d03
parent53dc74e12f87c49515082f409c9cdc5e6e67974c (diff)
downloadnode-new-c8ee19a618c77539b9eeba6ab57ab6be03ddd163.tar.gz
Upgrade V8 to 3.1.8.5
-rw-r--r--deps/v8/SConstruct1
-rw-r--r--deps/v8/src/arm/deoptimizer-arm.cc5
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.cc1
-rw-r--r--deps/v8/src/assembler.cc1
-rw-r--r--deps/v8/src/assembler.h3
-rw-r--r--deps/v8/src/deoptimizer.h7
-rw-r--r--deps/v8/src/hydrogen-instructions.h9
-rw-r--r--deps/v8/src/ia32/deoptimizer-ia32.cc74
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.cc40
-rw-r--r--deps/v8/src/version.cc2
-rw-r--r--deps/v8/src/x64/deoptimizer-x64.cc5
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.cc18
-rw-r--r--deps/v8/test/mjsunit/compiler/regress-loadfield.js65
-rw-r--r--deps/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js52
14 files changed, 235 insertions, 48 deletions
diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct
index 2b0ce2412d..84707e9847 100644
--- a/deps/v8/SConstruct
+++ b/deps/v8/SConstruct
@@ -302,6 +302,7 @@ V8_EXTRA_FLAGS = {
'gcc': {
'all': {
'WARNINGFLAGS': ['-Wall',
+ '-Werror',
'-W',
'-Wno-unused-parameter',
'-Wnon-virtual-dtor']
diff --git a/deps/v8/src/arm/deoptimizer-arm.cc b/deps/v8/src/arm/deoptimizer-arm.cc
index 9a5aa902b8..339841875a 100644
--- a/deps/v8/src/arm/deoptimizer-arm.cc
+++ b/deps/v8/src/arm/deoptimizer-arm.cc
@@ -44,6 +44,11 @@ int Deoptimizer::patch_size() {
}
+void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
+ // Nothing to do. No new relocation information is written for lazy
+ // deoptimization on ARM.
+}
+
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
AssertNoAllocation no_allocation;
diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc
index c5e9271760..c0f5800bf4 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.cc
+++ b/deps/v8/src/arm/lithium-codegen-arm.cc
@@ -75,6 +75,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(StackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
+ Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
}
diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc
index 030d15c514..68f01dec05 100644
--- a/deps/v8/src/assembler.cc
+++ b/deps/v8/src/assembler.cc
@@ -139,6 +139,7 @@ const int kPCJumpTag = (1 << kExtraTagBits) - 1;
const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
+const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask;
const int kVariableLengthPCJumpTopTag = 1;
const int kChunkBits = 7;
diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h
index 9e6aa087aa..9b9eea6c80 100644
--- a/deps/v8/src/assembler.h
+++ b/deps/v8/src/assembler.h
@@ -192,6 +192,9 @@ class RelocInfo BASE_EMBEDDED {
// The maximum size for a call instruction including pc-jump.
static const int kMaxCallSize = 6;
+ // The maximum pc delta that will use the short encoding.
+ static const int kMaxSmallPCDelta;
+
enum Mode {
// Please note the order is important (see IsCodeTarget, IsGCRelocMode).
CONSTRUCT_CALL, // code target that is a call to a JavaScript constructor.
diff --git a/deps/v8/src/deoptimizer.h b/deps/v8/src/deoptimizer.h
index 1d4f4770f9..90495c9768 100644
--- a/deps/v8/src/deoptimizer.h
+++ b/deps/v8/src/deoptimizer.h
@@ -110,6 +110,13 @@ class Deoptimizer : public Malloced {
int fp_to_sp_delta);
static Deoptimizer* Grab();
+ // Makes sure that there is enough room in the relocation
+ // information of a code object to perform lazy deoptimization
+ // patching. If there is not enough room a new relocation
+ // information object is allocated and comments are added until it
+ // is big enough.
+ static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
+
// Deoptimize the function now. Its current optimized code will never be run
// again and any activations of the optimized code will get deoptimized when
// execution returns.
diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h
index 22916f5034..35ff29749b 100644
--- a/deps/v8/src/hydrogen-instructions.h
+++ b/deps/v8/src/hydrogen-instructions.h
@@ -1423,8 +1423,9 @@ class HJSArrayLength: public HUnaryOperation {
// object. It is guaranteed to be 32 bit integer, but it can be
// represented as either a smi or heap number.
set_representation(Representation::Tagged());
- SetFlag(kDependsOnArrayLengths);
SetFlag(kUseGVN);
+ SetFlag(kDependsOnArrayLengths);
+ SetFlag(kDependsOnMaps);
}
virtual Representation RequiredInputRepresentation(int index) const {
@@ -1442,8 +1443,8 @@ class HFixedArrayLength: public HUnaryOperation {
public:
explicit HFixedArrayLength(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Tagged());
- SetFlag(kDependsOnArrayLengths);
SetFlag(kUseGVN);
+ SetFlag(kDependsOnArrayLengths);
}
virtual Representation RequiredInputRepresentation(int index) const {
@@ -2268,6 +2269,7 @@ class HCompareJSObjectEq: public HBinaryOperation {
: HBinaryOperation(left, right) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
+ SetFlag(kDependsOnMaps);
}
virtual bool EmitAtUses() const {
@@ -2943,6 +2945,7 @@ class HLoadNamedField: public HUnaryOperation {
offset_(offset) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
+ SetFlag(kDependsOnMaps);
if (is_in_object) {
SetFlag(kDependsOnInobjectFields);
} else {
@@ -3269,6 +3272,7 @@ class HStringCharCodeAt: public HBinaryOperation {
: HBinaryOperation(string, index) {
set_representation(Representation::Integer32());
SetFlag(kUseGVN);
+ SetFlag(kDependsOnMaps);
}
virtual Representation RequiredInputRepresentation(int index) const {
@@ -3296,6 +3300,7 @@ class HStringLength: public HUnaryOperation {
explicit HStringLength(HValue* string) : HUnaryOperation(string) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
+ SetFlag(kDependsOnMaps);
}
virtual Representation RequiredInputRepresentation(int index) const {
diff --git a/deps/v8/src/ia32/deoptimizer-ia32.cc b/deps/v8/src/ia32/deoptimizer-ia32.cc
index 5f4d94449a..21193ce0b8 100644
--- a/deps/v8/src/ia32/deoptimizer-ia32.cc
+++ b/deps/v8/src/ia32/deoptimizer-ia32.cc
@@ -55,6 +55,80 @@ static void ZapCodeRange(Address start, Address end) {
}
+void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
+ HandleScope scope;
+
+ // Compute the size of relocation information needed for the code
+ // patching in Deoptimizer::DeoptimizeFunction.
+ int min_reloc_size = 0;
+ Address prev_reloc_address = code->instruction_start();
+ Address code_start_address = code->instruction_start();
+ SafepointTable table(*code);
+ for (unsigned i = 0; i < table.length(); ++i) {
+ Address curr_reloc_address = code_start_address + table.GetPcOffset(i);
+ ASSERT_GE(curr_reloc_address, prev_reloc_address);
+ SafepointEntry safepoint_entry = table.GetEntry(i);
+ int deoptimization_index = safepoint_entry.deoptimization_index();
+ if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) {
+ // The gap code is needed to get to the state expected at the
+ // bailout and we need to skip the call opcode to get to the
+ // address that needs reloc.
+ curr_reloc_address += safepoint_entry.gap_code_size() + 1;
+ int pc_delta = curr_reloc_address - prev_reloc_address;
+ // We use RUNTIME_ENTRY reloc info which has a size of 2 bytes
+ // if encodable with small pc delta encoding and up to 6 bytes
+ // otherwise.
+ if (pc_delta <= RelocInfo::kMaxSmallPCDelta) {
+ min_reloc_size += 2;
+ } else {
+ min_reloc_size += 6;
+ }
+ prev_reloc_address = curr_reloc_address;
+ }
+ }
+
+ // If the relocation information is not big enough we create a new
+ // relocation info object that is padded with comments to make it
+ // big enough for lazy doptimization.
+ int reloc_length = code->relocation_info()->length();
+ if (min_reloc_size > reloc_length) {
+ int comment_reloc_size = RelocInfo::kMinRelocCommentSize;
+ // Padding needed.
+ int min_padding = min_reloc_size - reloc_length;
+ // Number of comments needed to take up at least that much space.
+ int additional_comments =
+ (min_padding + comment_reloc_size - 1) / comment_reloc_size;
+ // Actual padding size.
+ int padding = additional_comments * comment_reloc_size;
+ // Allocate new relocation info and copy old relocation to the end
+ // of the new relocation info array because relocation info is
+ // written and read backwards.
+ Handle<ByteArray> new_reloc =
+ Factory::NewByteArray(reloc_length + padding, TENURED);
+ memcpy(new_reloc->GetDataStartAddress() + padding,
+ code->relocation_info()->GetDataStartAddress(),
+ reloc_length);
+ // Create a relocation writer to write the comments in the padding
+ // space. Use position 0 for everything to ensure short encoding.
+ RelocInfoWriter reloc_info_writer(
+ new_reloc->GetDataStartAddress() + padding, 0);
+ intptr_t comment_string
+ = reinterpret_cast<intptr_t>(RelocInfo::kFillerCommentString);
+ RelocInfo rinfo(0, RelocInfo::COMMENT, comment_string);
+ for (int i = 0; i < additional_comments; ++i) {
+#ifdef DEBUG
+ byte* pos_before = reloc_info_writer.pos();
+#endif
+ reloc_info_writer.Write(&rinfo);
+ ASSERT(RelocInfo::kMinRelocCommentSize ==
+ pos_before - reloc_info_writer.pos());
+ }
+ // Replace relocation information on the code object.
+ code->set_relocation_info(*new_reloc);
+ }
+}
+
+
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
AssertNoAllocation no_allocation;
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc
index 0b345d31ad..2b7712c439 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc
@@ -31,6 +31,7 @@
#include "ia32/lithium-codegen-ia32.h"
#include "code-stubs.h"
+#include "deoptimizer.h"
#include "stub-cache.h"
namespace v8 {
@@ -43,20 +44,13 @@ class SafepointGenerator : public PostCallGenerator {
public:
SafepointGenerator(LCodeGen* codegen,
LPointerMap* pointers,
- int deoptimization_index,
- bool ensure_reloc_space = false)
+ int deoptimization_index)
: codegen_(codegen),
pointers_(pointers),
- deoptimization_index_(deoptimization_index),
- ensure_reloc_space_(ensure_reloc_space) { }
+ deoptimization_index_(deoptimization_index) {}
virtual ~SafepointGenerator() { }
virtual void Generate() {
- // Ensure that we have enough space in the reloc info to patch
- // this with calls when doing deoptimization.
- if (ensure_reloc_space_) {
- codegen_->EnsureRelocSpaceForDeoptimization();
- }
codegen_->RecordSafepoint(pointers_, deoptimization_index_);
}
@@ -64,7 +58,6 @@ class SafepointGenerator : public PostCallGenerator {
LCodeGen* codegen_;
LPointerMap* pointers_;
int deoptimization_index_;
- bool ensure_reloc_space_;
};
@@ -78,7 +71,6 @@ bool LCodeGen::GenerateCode() {
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
- GenerateRelocPadding() &&
GenerateSafepointTable();
}
@@ -88,6 +80,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(StackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
+ Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
}
@@ -385,22 +378,6 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
}
-void LCodeGen::EnsureRelocSpaceForDeoptimization() {
- // Since we patch the reloc info with RUNTIME_ENTRY calls every patch
- // site will take up 2 bytes + any pc-jumps.
- // We are conservative and always reserver 6 bytes in case where a
- // simple pc-jump is not enough.
- uint32_t pc_delta =
- masm()->pc_offset() - deoptimization_reloc_size.last_pc_offset;
- if (is_uintn(pc_delta, 6)) {
- deoptimization_reloc_size.min_size += 2;
- } else {
- deoptimization_reloc_size.min_size += 6;
- }
- deoptimization_reloc_size.last_pc_offset = masm()->pc_offset();
-}
-
-
void LCodeGen::AddToTranslation(Translation* translation,
LOperand* op,
bool is_tagged) {
@@ -454,7 +431,6 @@ void LCodeGen::CallCode(Handle<Code> code,
}
__ call(code, mode);
- EnsureRelocSpaceForDeoptimization();
RegisterLazyDeoptimization(instr);
// Signal that we don't inline smi code before these stubs in the
@@ -479,6 +455,7 @@ void LCodeGen::CallRuntime(Runtime::Function* fun,
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
__ CallRuntime(fun, argc);
+
RegisterLazyDeoptimization(instr);
}
@@ -2299,8 +2276,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
RegisterEnvironmentForDeoptimization(env);
SafepointGenerator safepoint_generator(this,
pointers,
- env->deoptimization_index(),
- true);
+ env->deoptimization_index());
v8::internal::ParameterCount actual(eax);
__ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
}
@@ -2372,7 +2348,6 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
__ CallSelf();
} else {
__ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
- EnsureRelocSpaceForDeoptimization();
}
// Setup deoptimization.
@@ -3835,8 +3810,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
// builtin)
SafepointGenerator safepoint_generator(this,
pointers,
- env->deoptimization_index(),
- true);
+ env->deoptimization_index());
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ push(Immediate(Smi::FromInt(strict_mode_flag())));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc
index d851a27da0..2a1eb79090 100644
--- a/deps/v8/src/version.cc
+++ b/deps/v8/src/version.cc
@@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 1
#define BUILD_NUMBER 8
-#define PATCH_LEVEL 3
+#define PATCH_LEVEL 5
#define CANDIDATE_VERSION false
// Define SONAME to have the SCons build the put a specific SONAME into the
diff --git a/deps/v8/src/x64/deoptimizer-x64.cc b/deps/v8/src/x64/deoptimizer-x64.cc
index 6278142858..61f4381108 100644
--- a/deps/v8/src/x64/deoptimizer-x64.cc
+++ b/deps/v8/src/x64/deoptimizer-x64.cc
@@ -101,6 +101,11 @@ class SafepointTableDeoptimiztionEntryIterator {
};
+void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
+ // TODO(1276): Implement.
+}
+
+
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
AssertNoAllocation no_allocation;
diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc
index 0ae8a003c9..5bc0275909 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.cc
+++ b/deps/v8/src/x64/lithium-codegen-x64.cc
@@ -43,20 +43,16 @@ class SafepointGenerator : public PostCallGenerator {
public:
SafepointGenerator(LCodeGen* codegen,
LPointerMap* pointers,
- int deoptimization_index,
- bool ensure_reloc_space = false)
+ int deoptimization_index)
: codegen_(codegen),
pointers_(pointers),
- deoptimization_index_(deoptimization_index),
- ensure_reloc_space_(ensure_reloc_space) { }
+ deoptimization_index_(deoptimization_index) { }
virtual ~SafepointGenerator() { }
virtual void Generate() {
// Ensure that we have enough space in the reloc info to patch
// this with calls when doing deoptimization.
- if (ensure_reloc_space_) {
- codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true);
- }
+ codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true);
codegen_->RecordSafepoint(pointers_, deoptimization_index_);
}
@@ -64,7 +60,6 @@ class SafepointGenerator : public PostCallGenerator {
LCodeGen* codegen_;
LPointerMap* pointers_;
int deoptimization_index_;
- bool ensure_reloc_space_;
};
@@ -87,6 +82,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(StackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
+ Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
}
@@ -2220,8 +2216,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
RegisterEnvironmentForDeoptimization(env);
SafepointGenerator safepoint_generator(this,
pointers,
- env->deoptimization_index(),
- true);
+ env->deoptimization_index());
v8::internal::ParameterCount actual(rax);
__ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
}
@@ -3597,8 +3592,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
// builtin)
SafepointGenerator safepoint_generator(this,
pointers,
- env->deoptimization_index(),
- true);
+ env->deoptimization_index());
__ Push(Smi::FromInt(strict_mode_flag()));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
}
diff --git a/deps/v8/test/mjsunit/compiler/regress-loadfield.js b/deps/v8/test/mjsunit/compiler/regress-loadfield.js
new file mode 100644
index 0000000000..a202891900
--- /dev/null
+++ b/deps/v8/test/mjsunit/compiler/regress-loadfield.js
@@ -0,0 +1,65 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Regression test for GVN on field loads.
+
+function bar() {}
+
+// Make sure there is a transition on adding "bar" inobject property.
+var b = new bar();
+b.bar = "bar";
+
+function test(a) {
+ var b = new Array(10);
+ for (var i = 0; i < 10; i++) {
+ b[i] = new bar();
+ }
+
+ for (var i = 0; i < 10; i++) {
+ b[i].bar = a.foo;
+ }
+}
+
+// Create an object with fast backing store properties.
+var a = {};
+a.p1 = "";
+a.p2 = "";
+a.p3 = "";
+a.p4 = "";
+a.p5 = "";
+a.p6 = "";
+a.p7 = "";
+a.p8 = "";
+a.p9 = "";
+a.p10 = "";
+a.p11 = "";
+a.foo = "foo";
+for (var i = 0; i < 100000; i++) {
+ test(a);
+}
+
+test("");
diff --git a/deps/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js b/deps/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js
new file mode 100644
index 0000000000..f1fe6d55a6
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js
@@ -0,0 +1,52 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Do not generate debug code since that will space things differently
+// in the generated code.
+// Flags: --allow-natives-syntax --expose-gc --nodebug-code
+
+// Regression test for issue where we did not pad the relocation
+// information enough to have room for lazy deoptimization.
+
+function kaboom() {
+ var a = function () {},
+ b = function () {},
+ c, d = function () { var d = []; },
+ e = function () { var e = {}; };
+ c = function () { d(); b(); };
+ return function (x, y) {
+ c();
+ a();
+ return function f() { }({});
+ };
+}
+
+kaboom();
+
+%DeoptimizeFunction(kaboom);
+
+gc();