summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--deps/v8/AUTHORS1
-rw-r--r--deps/v8/ChangeLog12
-rw-r--r--deps/v8/SConstruct4
-rw-r--r--deps/v8/include/v8.h15
-rwxr-xr-xdeps/v8/src/SConscript6
-rw-r--r--deps/v8/src/api.cc34
-rw-r--r--deps/v8/src/api.h14
-rw-r--r--deps/v8/src/arm/builtins-arm.cc194
-rw-r--r--deps/v8/src/arm/codegen-arm.cc17
-rw-r--r--deps/v8/src/arm/constants-arm.cc92
-rw-r--r--deps/v8/src/arm/constants-arm.h28
-rw-r--r--deps/v8/src/arm/debug-arm.cc6
-rw-r--r--deps/v8/src/arm/disasm-arm.cc19
-rw-r--r--deps/v8/src/arm/ic-arm.cc18
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.cc108
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.h62
-rw-r--r--deps/v8/src/arm/regexp-macro-assembler-arm.cc24
-rw-r--r--deps/v8/src/arm/simulator-arm.cc117
-rw-r--r--deps/v8/src/arm/stub-cache-arm.cc141
-rw-r--r--deps/v8/src/bootstrapper.cc2
-rw-r--r--deps/v8/src/builtins.cc5
-rw-r--r--deps/v8/src/builtins.h1
-rw-r--r--deps/v8/src/checks.h32
-rw-r--r--deps/v8/src/codegen.cc5
-rw-r--r--deps/v8/src/codegen.h12
-rw-r--r--deps/v8/src/d8.cc15
-rw-r--r--deps/v8/src/d8.js7
-rw-r--r--deps/v8/src/debug.cc213
-rw-r--r--deps/v8/src/debug.h22
-rw-r--r--deps/v8/src/heap.cc124
-rw-r--r--deps/v8/src/ia32/builtins-ia32.cc19
-rw-r--r--deps/v8/src/ia32/codegen-ia32.cc22
-rw-r--r--deps/v8/src/ia32/debug-ia32.cc21
-rw-r--r--deps/v8/src/ia32/ic-ia32.cc16
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.cc102
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.h35
-rw-r--r--deps/v8/src/ia32/stub-cache-ia32.cc24
-rw-r--r--deps/v8/src/list.h5
-rw-r--r--deps/v8/src/log.cc4
-rw-r--r--deps/v8/src/macro-assembler.h34
-rw-r--r--deps/v8/src/mark-compact.cc52
-rw-r--r--deps/v8/src/mark-compact.h3
-rw-r--r--deps/v8/src/mirror-delay.js3
-rw-r--r--deps/v8/src/objects-debug.cc11
-rw-r--r--deps/v8/src/objects.cc18
-rw-r--r--deps/v8/src/objects.h28
-rw-r--r--deps/v8/src/platform-freebsd.cc6
-rw-r--r--deps/v8/src/platform-linux.cc39
-rw-r--r--deps/v8/src/platform-macos.cc15
-rw-r--r--deps/v8/src/runtime.cc80
-rw-r--r--deps/v8/src/runtime.h417
-rw-r--r--deps/v8/src/serialize.cc31
-rw-r--r--deps/v8/src/serialize.h2
-rw-r--r--deps/v8/src/spaces.cc10
-rw-r--r--deps/v8/src/spaces.h10
-rw-r--r--deps/v8/src/v8threads.cc22
-rw-r--r--deps/v8/src/v8threads.h4
-rw-r--r--deps/v8/src/version.cc2
-rw-r--r--deps/v8/src/x64/assembler-x64.cc8
-rw-r--r--deps/v8/src/x64/builtins-x64.cc17
-rw-r--r--deps/v8/src/x64/codegen-x64.cc50
-rw-r--r--deps/v8/src/x64/debug-x64.cc14
-rw-r--r--deps/v8/src/x64/ic-x64.cc16
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.cc147
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.h71
-rw-r--r--deps/v8/src/x64/regexp-macro-assembler-x64.cc4
-rw-r--r--deps/v8/src/x64/regexp-macro-assembler-x64.h6
-rw-r--r--deps/v8/src/x64/stub-cache-x64.cc24
-rw-r--r--deps/v8/test/cctest/cctest.status4
-rw-r--r--deps/v8/test/cctest/test-assembler-arm.cc6
-rw-r--r--deps/v8/test/cctest/test-debug.cc22
-rw-r--r--deps/v8/test/cctest/test-log.cc161
-rw-r--r--deps/v8/test/cctest/testcfg.py4
-rw-r--r--deps/v8/test/mjsunit/debug-scopes.js101
-rw-r--r--deps/v8/test/mjsunit/debug-step-stub-callfunction.js18
-rw-r--r--deps/v8/test/mjsunit/debug-stepin-call-function-stub.js115
-rw-r--r--deps/v8/test/mjsunit/debug-stepout-recursive-function.js106
-rw-r--r--deps/v8/test/mjsunit/debug-stepout-to-builtin.js84
-rw-r--r--deps/v8/test/mjsunit/function-prototype.js5
-rw-r--r--deps/v8/test/mjsunit/mjsunit.status3
-rw-r--r--[-rwxr-xr-x]deps/v8/test/mjsunit/regress/regress-246.js0
-rw-r--r--[-rwxr-xr-x]deps/v8/test/mjsunit/regress/regress-254.js0
-rw-r--r--deps/v8/test/mjsunit/regress/regress-crbug-18639.js34
-rw-r--r--deps/v8/test/mjsunit/testcfg.py4
-rw-r--r--deps/v8/test/mozilla/mozilla.status4
-rw-r--r--deps/v8/tools/tickprocessor.js2
-rw-r--r--deps/v8/tools/visual_studio/arm.vsprops4
-rw-r--r--deps/v8/tools/visual_studio/common.vsprops2
-rw-r--r--deps/v8/tools/visual_studio/d8_arm.vcproj199
-rw-r--r--deps/v8/tools/visual_studio/ia32.vsprops2
-rw-r--r--deps/v8/tools/visual_studio/v8_arm.sln8
-rw-r--r--deps/v8/tools/visual_studio/v8_arm.vcproj223
-rw-r--r--deps/v8/tools/visual_studio/v8_base_arm.vcproj4
-rw-r--r--deps/v8/tools/visual_studio/v8_cctest_arm.vcproj2
-rw-r--r--deps/v8/tools/visual_studio/v8_process_sample_arm.vcproj151
-rw-r--r--deps/v8/tools/visual_studio/v8_shell_sample_arm.vcproj151
-rw-r--r--deps/v8/tools/visual_studio/x64.vsprops2
97 files changed, 3183 insertions, 980 deletions
diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS
index bfe58a2c37..5c5ae4e1b8 100644
--- a/deps/v8/AUTHORS
+++ b/deps/v8/AUTHORS
@@ -17,3 +17,4 @@ Paolo Giarrusso <p.giarrusso@gmail.com>
Rafal Krypa <rafal@krypa.net>
Rene Rebe <rene@exactcode.de>
Ryan Dahl <coldredlemur@gmail.com>
+Patrick Gansterer <paroga@paroga.com>
diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog
index b07e7cc15e..a78755b63a 100644
--- a/deps/v8/ChangeLog
+++ b/deps/v8/ChangeLog
@@ -1,3 +1,15 @@
+2009-09-09: Version 1.3.10
+
+ Fixed profiler on Mac in 64-bit mode.
+
+ Optimized creation of objects from simple constructor functions on
+ ARM.
+
+ Fixed a number of debugger issues.
+
+ Reduced the amount of memory consumed by V8.
+
+
2009-09-02: Version 1.3.9
Optimized stack guard checks on ARM.
diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct
index 71673c0f0a..ddd0190af7 100644
--- a/deps/v8/SConstruct
+++ b/deps/v8/SConstruct
@@ -258,6 +258,10 @@ V8_EXTRA_FLAGS = {
'all': {
'WARNINGFLAGS': ['/WX', '/wd4355', '/wd4800']
},
+ 'library:shared': {
+ 'CPPDEFINES': ['BUILDING_V8_SHARED'],
+ 'LIBS': ['winmm', 'ws2_32']
+ },
'arch:ia32': {
'WARNINGFLAGS': ['/W3']
},
diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h
index 346050d5d3..2789bad216 100644
--- a/deps/v8/include/v8.h
+++ b/deps/v8/include/v8.h
@@ -979,8 +979,9 @@ class V8EXPORT String : public Primitive {
public:
explicit Utf8Value(Handle<v8::Value> obj);
~Utf8Value();
- char* operator*() const { return str_; }
- int length() { return length_; }
+ char* operator*() { return str_; }
+ const char* operator*() const { return str_; }
+ int length() const { return length_; }
private:
char* str_;
int length_;
@@ -1001,8 +1002,9 @@ class V8EXPORT String : public Primitive {
public:
explicit AsciiValue(Handle<v8::Value> obj);
~AsciiValue();
- char* operator*() const { return str_; }
- int length() { return length_; }
+ char* operator*() { return str_; }
+ const char* operator*() const { return str_; }
+ int length() const { return length_; }
private:
char* str_;
int length_;
@@ -1022,8 +1024,9 @@ class V8EXPORT String : public Primitive {
public:
explicit Value(Handle<v8::Value> obj);
~Value();
- uint16_t* operator*() const { return str_; }
- int length() { return length_; }
+ uint16_t* operator*() { return str_; }
+ const uint16_t* operator*() const { return str_; }
+ int length() const { return length_; }
private:
uint16_t* str_;
int length_;
diff --git a/deps/v8/src/SConscript b/deps/v8/src/SConscript
index fee3fab431..a1cbf1ba29 100755
--- a/deps/v8/src/SConscript
+++ b/deps/v8/src/SConscript
@@ -56,9 +56,9 @@ SOURCES = {
],
'arch:arm': [
'arm/assembler-arm.cc', 'arm/builtins-arm.cc', 'arm/cfg-arm.cc',
- 'arm/codegen-arm.cc', 'arm/cpu-arm.cc', 'arm/disasm-arm.cc',
- 'arm/debug-arm.cc', 'arm/frames-arm.cc', 'arm/ic-arm.cc',
- 'arm/jump-target-arm.cc', 'arm/macro-assembler-arm.cc',
+ 'arm/codegen-arm.cc', 'arm/constants-arm.cc', 'arm/cpu-arm.cc',
+ 'arm/disasm-arm.cc', 'arm/debug-arm.cc', 'arm/frames-arm.cc',
+ 'arm/ic-arm.cc', 'arm/jump-target-arm.cc', 'arm/macro-assembler-arm.cc',
'arm/regexp-macro-assembler-arm.cc',
'arm/register-allocator-arm.cc', 'arm/stub-cache-arm.cc',
'arm/virtual-frame-arm.cc'
diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc
index 679e038d3a..1128d3e022 100644
--- a/deps/v8/src/api.cc
+++ b/deps/v8/src/api.cc
@@ -427,7 +427,7 @@ void Context::Enter() {
i::Handle<i::Context> env = Utils::OpenHandle(this);
thread_local.EnterContext(env);
- thread_local.SaveContext(i::GlobalHandles::Create(i::Top::context()));
+ thread_local.SaveContext(i::Top::context());
i::Top::set_context(*env);
}
@@ -441,9 +441,8 @@ void Context::Exit() {
}
// Content of 'last_context' could be NULL.
- i::Handle<i::Object> last_context = thread_local.RestoreContext();
- i::Top::set_context(static_cast<i::Context*>(*last_context));
- i::GlobalHandles::Destroy(last_context.location());
+ i::Context* last_context = thread_local.RestoreContext();
+ i::Top::set_context(last_context);
}
@@ -3700,19 +3699,21 @@ char* HandleScopeImplementer::RestoreThreadHelper(char* storage) {
}
-void HandleScopeImplementer::Iterate(
- ObjectVisitor* v,
- List<i::Object**>* blocks,
- v8::ImplementationUtilities::HandleScopeData* handle_data) {
+void HandleScopeImplementer::IterateThis(ObjectVisitor* v) {
// Iterate over all handles in the blocks except for the last.
- for (int i = blocks->length() - 2; i >= 0; --i) {
- Object** block = blocks->at(i);
+ for (int i = Blocks()->length() - 2; i >= 0; --i) {
+ Object** block = Blocks()->at(i);
v->VisitPointers(block, &block[kHandleBlockSize]);
}
// Iterate over live handles in the last block (if any).
- if (!blocks->is_empty()) {
- v->VisitPointers(blocks->last(), handle_data->next);
+ if (!Blocks()->is_empty()) {
+ v->VisitPointers(Blocks()->last(), handle_scope_data_.next);
+ }
+
+ if (!saved_contexts_.is_empty()) {
+ Object** start = reinterpret_cast<Object**>(&saved_contexts_.first());
+ v->VisitPointers(start, start + saved_contexts_.length());
}
}
@@ -3720,18 +3721,15 @@ void HandleScopeImplementer::Iterate(
void HandleScopeImplementer::Iterate(ObjectVisitor* v) {
v8::ImplementationUtilities::HandleScopeData* current =
v8::ImplementationUtilities::CurrentHandleScope();
- Iterate(v, thread_local.Blocks(), current);
+ thread_local.handle_scope_data_ = *current;
+ thread_local.IterateThis(v);
}
char* HandleScopeImplementer::Iterate(ObjectVisitor* v, char* storage) {
HandleScopeImplementer* thread_local =
reinterpret_cast<HandleScopeImplementer*>(storage);
- List<internal::Object**>* blocks_of_archived_thread = thread_local->Blocks();
- v8::ImplementationUtilities::HandleScopeData* handle_data_of_archived_thread =
- &thread_local->handle_scope_data_;
- Iterate(v, blocks_of_archived_thread, handle_data_of_archived_thread);
-
+ thread_local->IterateThis(v);
return storage + ArchiveSpacePerThread();
}
diff --git a/deps/v8/src/api.h b/deps/v8/src/api.h
index ca8f523c94..9ae6307b4d 100644
--- a/deps/v8/src/api.h
+++ b/deps/v8/src/api.h
@@ -352,8 +352,8 @@ class HandleScopeImplementer {
// contexts have been entered.
inline Handle<Object> LastEnteredContext();
- inline void SaveContext(Handle<Object> context);
- inline Handle<Object> RestoreContext();
+ inline void SaveContext(Context* context);
+ inline Context* RestoreContext();
inline bool HasSavedContexts();
inline List<internal::Object**>* Blocks() { return &blocks; }
@@ -368,14 +368,12 @@ class HandleScopeImplementer {
// Used as a stack to keep track of entered contexts.
List<Handle<Object> > entered_contexts_;
// Used as a stack to keep track of saved contexts.
- List<Handle<Object> > saved_contexts_;
+ List<Context*> saved_contexts_;
bool ignore_out_of_memory;
// This is only used for threading support.
v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
- static void Iterate(ObjectVisitor* v,
- List<internal::Object**>* blocks,
- v8::ImplementationUtilities::HandleScopeData* handle_data);
+ void IterateThis(ObjectVisitor* v);
char* RestoreThreadHelper(char* from);
char* ArchiveThreadHelper(char* to);
@@ -386,12 +384,12 @@ class HandleScopeImplementer {
static const int kHandleBlockSize = v8::internal::KB - 2; // fit in one page
-void HandleScopeImplementer::SaveContext(Handle<Object> context) {
+void HandleScopeImplementer::SaveContext(Context* context) {
saved_contexts_.Add(context);
}
-Handle<Object> HandleScopeImplementer::RestoreContext() {
+Context* HandleScopeImplementer::RestoreContext() {
return saved_contexts_.RemoveLast();
}
diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc
index daf2378eb2..920110f92e 100644
--- a/deps/v8/src/arm/builtins-arm.cc
+++ b/deps/v8/src/arm/builtins-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2006-2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -88,23 +88,200 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// Enter a construct frame.
__ EnterConstructFrame();
- // Preserve the two incoming parameters
+ // Preserve the two incoming parameters on the stack.
__ mov(r0, Operand(r0, LSL, kSmiTagSize));
- __ push(r0); // smi-tagged arguments count
- __ push(r1); // constructor function
+ __ push(r0); // Smi-tagged arguments count.
+ __ push(r1); // Constructor function.
+
+ // Use r7 for holding undefined which is used in several places below.
+ __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
+
+ // Try to allocate the object without transitioning into C code. If any of the
+ // preconditions is not met, the code bails out to the runtime call.
+ Label rt_call, allocated;
+ if (FLAG_inline_new) {
+ Label undo_allocation;
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ ExternalReference debug_step_in_fp =
+ ExternalReference::debug_step_in_fp_address();
+ __ mov(r2, Operand(debug_step_in_fp));
+ __ ldr(r2, MemOperand(r2));
+ __ tst(r2, r2);
+ __ b(nz, &rt_call);
+#endif
+
+ // Load the initial map and verify that it is in fact a map.
+ // r1: constructor function
+ // r7: undefined
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
+ __ tst(r2, Operand(kSmiTagMask));
+ __ b(eq, &rt_call);
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE);
+ __ b(ne, &rt_call);
+
+ // Check that the constructor is not constructing a JSFunction (see comments
+ // in Runtime_NewObject in runtime.cc). In which case the initial map's
+ // instance type would be JS_FUNCTION_TYPE.
+ // r1: constructor function
+ // r2: initial map
+ // r7: undefined
+ __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
+ __ b(eq, &rt_call);
+
+ // Now allocate the JSObject on the heap.
+ // r1: constructor function
+ // r2: initial map
+ // r7: undefined
+ __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
+ __ AllocateObjectInNewSpace(r3, r4, r5, r6, &rt_call, NO_ALLOCATION_FLAGS);
+
+ // Allocated the JSObject, now initialize the fields. Map is set to initial
+ // map and properties and elements are set to empty fixed array.
+ // r1: constructor function
+ // r2: initial map
+ // r3: object size
+ // r4: JSObject (not tagged)
+ // r7: undefined
+ __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
+ __ mov(r5, r4);
+ ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
+ ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
+ ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
+
+ // Fill all the in-object properties with undefined.
+ // r1: constructor function
+ // r2: initial map
+ // r3: object size (in words)
+ // r4: JSObject (not tagged)
+ // r5: First in-object property of JSObject (not tagged)
+ // r7: undefined
+ __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
+ { Label loop, entry;
+ __ b(&entry);
+ __ bind(&loop);
+ __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
+ __ bind(&entry);
+ __ cmp(r5, Operand(r6));
+ __ b(lt, &loop);
+ }
+
+ // Add the object tag to make the JSObject real, so that we can continue and
+ // jump into the continuation code at any time from now on. Any failures
+ // need to undo the allocation, so that the heap is in a consistent state
+ // and verifiable.
+ __ add(r4, r4, Operand(kHeapObjectTag));
+
+ // Check if a non-empty properties array is needed. Continue with allocated
+ // object if not fall through to runtime call if it is.
+ // r1: constructor function
+ // r4: JSObject
+ // r5: start of next object (not tagged)
+ // r7: undefined
+ __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
+ // The field instance sizes contains both pre-allocated property fields and
+ // in-object properties.
+ __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
+ __ and_(r6,
+ r0,
+ Operand(0x000000FF << Map::kPreAllocatedPropertyFieldsByte * 8));
+ __ add(r3, r3, Operand(r6, LSR, Map::kPreAllocatedPropertyFieldsByte * 8));
+ __ and_(r6, r0, Operand(0x000000FF << Map::kInObjectPropertiesByte * 8));
+ __ sub(r3, r3, Operand(r6, LSR, Map::kInObjectPropertiesByte * 8), SetCC);
+
+ // Done if no extra properties are to be allocated.
+ __ b(eq, &allocated);
+ __ Assert(pl, "Property allocation count failed.");
+
+ // Scale the number of elements by pointer size and add the header for
+ // FixedArrays to the start of the next object calculation from above.
+ // r1: constructor
+ // r3: number of elements in properties array
+ // r4: JSObject
+ // r5: start of next object
+ // r7: undefined
+ __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
+ __ AllocateObjectInNewSpace(r0,
+ r5,
+ r6,
+ r2,
+ &undo_allocation,
+ RESULT_CONTAINS_TOP);
+
+ // Initialize the FixedArray.
+ // r1: constructor
+ // r3: number of elements in properties array
+ // r4: JSObject
+ // r5: FixedArray (not tagged)
+ // r7: undefined
+ __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
+ __ mov(r2, r5);
+ ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
+ __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
+ ASSERT_EQ(1 * kPointerSize, Array::kLengthOffset);
+ __ str(r3, MemOperand(r2, kPointerSize, PostIndex));
+
+ // Initialize the fields to undefined.
+ // r1: constructor function
+ // r2: First element of FixedArray (not tagged)
+ // r3: number of elements in properties array
+ // r4: JSObject
+ // r5: FixedArray (not tagged)
+ // r7: undefined
+ __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
+ { Label loop, entry;
+ __ b(&entry);
+ __ bind(&loop);
+ __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
+ __ bind(&entry);
+ __ cmp(r2, Operand(r6));
+ __ b(lt, &loop);
+ }
+
+ // Store the initialized FixedArray into the properties field of
+ // the JSObject
+ // r1: constructor function
+ // r4: JSObject
+ // r5: FixedArray (not tagged)
+ __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
+ __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
+
+ // Continue with JSObject being successfully allocated
+ // r1: constructor function
+ // r4: JSObject
+ __ jmp(&allocated);
+
+ // Undo the setting of the new top so that the heap is verifiable. For
+ // example, the map's unused properties potentially do not match the
+ // allocated objects unused properties.
+ // r4: JSObject (previous new top)
+ __ bind(&undo_allocation);
+ __ UndoAllocationInNewSpace(r4, r5);
+ }
- // Allocate the new receiver object.
+ // Allocate the new receiver object using the runtime call.
+ // r1: constructor function
+ __ bind(&rt_call);
__ push(r1); // argument for Runtime_NewObject
__ CallRuntime(Runtime::kNewObject, 1);
- __ push(r0); // save the receiver
+ __ mov(r4, r0);
+
+ // Receiver for constructor call allocated.
+ // r4: JSObject
+ __ bind(&allocated);
+ __ push(r4);
// Push the function and the allocated receiver from the stack.
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ ldr(r1, MemOperand(sp, kPointerSize));
- __ push(r1); // function
- __ push(r0); // receiver
+ __ push(r1); // Constructor function.
+ __ push(r4); // Receiver.
// Reload the number of arguments from the stack.
// r1: constructor function
@@ -194,6 +371,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ LeaveConstructFrame();
__ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
__ add(sp, sp, Operand(kPointerSize));
+ __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
__ Jump(lr);
}
diff --git a/deps/v8/src/arm/codegen-arm.cc b/deps/v8/src/arm/codegen-arm.cc
index b94aa10bd2..7b3662d4fc 100644
--- a/deps/v8/src/arm/codegen-arm.cc
+++ b/deps/v8/src/arm/codegen-arm.cc
@@ -4950,12 +4950,12 @@ static void AllocateHeapNumber(
Register scratch2) { // Another scratch register.
// Allocate an object in the heap for the heap number and tag it as a heap
// object.
- __ AllocateObjectInNewSpace(HeapNumber::kSize,
+ __ AllocateObjectInNewSpace(HeapNumber::kSize / kPointerSize,
result,
scratch1,
scratch2,
need_gc,
- true);
+ TAG_OBJECT);
// Get heap number map and store it in the allocated object.
__ LoadRoot(scratch1, Heap::kHeapNumberMapRootIndex);
@@ -5623,7 +5623,7 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
// argument, so give it a Smi.
__ mov(r0, Operand(Smi::FromInt(0)));
__ push(r0);
- __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1);
+ __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1, 1);
__ StubReturn(1);
}
@@ -5678,6 +5678,13 @@ void UnarySubStub::Generate(MacroAssembler* masm) {
}
+int CEntryStub::MinorKey() {
+ ASSERT(result_size_ <= 2);
+ // Result returned in r0 or r0+r1 by default.
+ return 0;
+}
+
+
void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
// r0 holds the exception.
@@ -6195,7 +6202,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// by calling the runtime system.
__ bind(&slow);
__ push(r1);
- __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
+ __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1, 1);
}
@@ -6216,7 +6223,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
- __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3);
+ __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3, 1);
}
diff --git a/deps/v8/src/arm/constants-arm.cc b/deps/v8/src/arm/constants-arm.cc
new file mode 100644
index 0000000000..964bfe14f0
--- /dev/null
+++ b/deps/v8/src/arm/constants-arm.cc
@@ -0,0 +1,92 @@
+// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "constants-arm.h"
+
+
+namespace assembler {
+namespace arm {
+
+namespace v8i = v8::internal;
+
+
+// These register names are defined in a way to match the native disassembler
+// formatting. See for example the command "objdump -d <binary file>".
+const char* Registers::names_[kNumRegisters] = {
+ "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
+ "r8", "r9", "r10", "fp", "ip", "sp", "lr", "pc",
+};
+
+
+// List of alias names which can be used when referring to ARM registers.
+const Registers::RegisterAlias Registers::aliases_[] = {
+ {10, "sl"},
+ {11, "r11"},
+ {12, "r12"},
+ {13, "r13"},
+ {14, "r14"},
+ {15, "r15"},
+ {kNoRegister, NULL}
+};
+
+
+const char* Registers::Name(int reg) {
+ const char* result;
+ if ((0 <= reg) && (reg < kNumRegisters)) {
+ result = names_[reg];
+ } else {
+ result = "noreg";
+ }
+ return result;
+}
+
+
+int Registers::Number(const char* name) {
+ // Look through the canonical names.
+ for (int i = 0; i < kNumRegisters; i++) {
+ if (strcmp(names_[i], name) == 0) {
+ return i;
+ }
+ }
+
+ // Look through the alias names.
+ int i = 0;
+ while (aliases_[i].reg != kNoRegister) {
+ if (strcmp(aliases_[i].name, name) == 0) {
+ return aliases_[i].reg;
+ }
+ i++;
+ }
+
+ // No register with the reguested name found.
+ return kNoRegister;
+}
+
+
+} } // namespace assembler::arm
diff --git a/deps/v8/src/arm/constants-arm.h b/deps/v8/src/arm/constants-arm.h
index f0311dfc17..2f2b709508 100644
--- a/deps/v8/src/arm/constants-arm.h
+++ b/deps/v8/src/arm/constants-arm.h
@@ -52,6 +52,13 @@
namespace assembler {
namespace arm {
+// Number of registers in normal ARM mode.
+static const int kNumRegisters = 16;
+
+// PC is register 15.
+static const int kPCRegister = 15;
+static const int kNoRegister = -1;
+
// Defines constants and accessor classes to assemble, disassemble and
// simulate ARM instructions.
//
@@ -269,6 +276,27 @@ class Instr {
};
+// Helper functions for converting between register numbers and names.
+class Registers {
+ public:
+ // Return the name of the register.
+ static const char* Name(int reg);
+
+ // Lookup the register number for the name provided.
+ static int Number(const char* name);
+
+ struct RegisterAlias {
+ int reg;
+ const char *name;
+ };
+
+ private:
+ static const char* names_[kNumRegisters];
+ static const RegisterAlias aliases_[];
+};
+
+
+
} } // namespace assembler::arm
#endif // V8_ARM_CONSTANTS_ARM_H_
diff --git a/deps/v8/src/arm/debug-arm.cc b/deps/v8/src/arm/debug-arm.cc
index bcfab6c809..e14284136c 100644
--- a/deps/v8/src/arm/debug-arm.cc
+++ b/deps/v8/src/arm/debug-arm.cc
@@ -179,12 +179,6 @@ void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateReturnDebugBreakEntry(MacroAssembler* masm) {
- // Generate nothing as this handling of debug break return is not done this
- // way on ARM - yet.
-}
-
-
void Debug::GenerateStubNoRegistersDebugBreak(MacroAssembler* masm) {
// Generate nothing as CodeStub CallFunction is not used on ARM.
}
diff --git a/deps/v8/src/arm/disasm-arm.cc b/deps/v8/src/arm/disasm-arm.cc
index 2638409e85..64314837d6 100644
--- a/deps/v8/src/arm/disasm-arm.cc
+++ b/deps/v8/src/arm/disasm-arm.cc
@@ -57,6 +57,7 @@
#include "v8.h"
+#include "constants-arm.h"
#include "disasm.h"
#include "macro-assembler.h"
#include "platform.h"
@@ -898,16 +899,6 @@ namespace disasm {
namespace v8i = v8::internal;
-static const int kMaxRegisters = 16;
-
-// These register names are defined in a way to match the native disassembler
-// formatting. See for example the command "objdump -d <binary file>".
-static const char* reg_names[kMaxRegisters] = {
- "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
- "r8", "r9", "r10", "fp", "ip", "sp", "lr", "pc",
-};
-
-
const char* NameConverter::NameOfAddress(byte* addr) const {
static v8::internal::EmbeddedVector<char, 32> tmp_buffer;
v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr);
@@ -921,13 +912,7 @@ const char* NameConverter::NameOfConstant(byte* addr) const {
const char* NameConverter::NameOfCPURegister(int reg) const {
- const char* result;
- if ((0 <= reg) && (reg < kMaxRegisters)) {
- result = reg_names[reg];
- } else {
- result = "noreg";
- }
- return result;
+ return assembler::arm::Registers::Name(reg);
}
diff --git a/deps/v8/src/arm/ic-arm.cc b/deps/v8/src/arm/ic-arm.cc
index 848d04b540..d230b4546f 100644
--- a/deps/v8/src/arm/ic-arm.cc
+++ b/deps/v8/src/arm/ic-arm.cc
@@ -391,7 +391,7 @@ void CallIC::Generate(MacroAssembler* masm,
__ mov(r0, Operand(2));
__ mov(r1, Operand(f));
- CEntryStub stub;
+ CEntryStub stub(1);
__ CallStub(&stub);
// Move result to r1 and leave the internal frame.
@@ -503,7 +503,7 @@ void LoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
__ stm(db_w, sp, r2.bit() | r3.bit());
// Perform tail call to the entry.
- __ TailCallRuntime(f, 2);
+ __ TailCallRuntime(f, 2, 1);
}
@@ -543,7 +543,7 @@ void KeyedLoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
__ ldm(ia, sp, r2.bit() | r3.bit());
__ stm(db_w, sp, r2.bit() | r3.bit());
- __ TailCallRuntime(f, 2);
+ __ TailCallRuntime(f, 2, 1);
}
@@ -599,7 +599,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ ldm(ia, sp, r0.bit() | r1.bit());
__ stm(db_w, sp, r0.bit() | r1.bit());
// Do tail-call to runtime routine.
- __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2);
+ __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2, 1);
// Fast case: Do the load.
__ bind(&fast);
@@ -626,7 +626,7 @@ void KeyedStoreIC::Generate(MacroAssembler* masm,
__ ldm(ia, sp, r2.bit() | r3.bit());
__ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
- __ TailCallRuntime(f, 3);
+ __ TailCallRuntime(f, 3, 1);
}
@@ -684,7 +684,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == object
__ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
// Do tail-call to runtime routine.
- __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
+ __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3, 1);
// Extra capacity case: Check if there is extra capacity to
// perform the store and update the length. Used for adding one
@@ -761,7 +761,7 @@ void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
// Perform tail call to the entry.
__ TailCallRuntime(
- ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
+ ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3, 1);
}
@@ -798,7 +798,7 @@ void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
// Perform tail call to the entry.
__ TailCallRuntime(
- ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
+ ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3, 1);
}
@@ -814,7 +814,7 @@ void StoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
__ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
// Perform tail call to the entry.
- __ TailCallRuntime(f, 3);
+ __ TailCallRuntime(f, 3, 1);
}
diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc
index 2ca4898682..de2db90085 100644
--- a/deps/v8/src/arm/macro-assembler-arm.cc
+++ b/deps/v8/src/arm/macro-assembler-arm.cc
@@ -56,6 +56,7 @@ MacroAssembler::MacroAssembler(void* buffer, int size)
#if defined(USE_THUMB_INTERWORK)
#if !defined(__ARM_ARCH_5T__) && \
!defined(__ARM_ARCH_5TE__) && \
+ !defined(__ARM_ARCH_6__) && \
!defined(__ARM_ARCH_7A__) && \
!defined(__ARM_ARCH_7__)
// add tests for other versions above v5t as required
@@ -773,7 +774,7 @@ void MacroAssembler::AllocateObjectInNewSpace(int object_size,
Register scratch1,
Register scratch2,
Label* gc_required,
- bool tag_allocated_object) {
+ AllocationFlags flags) {
ASSERT(!result.is(scratch1));
ASSERT(!scratch1.is(scratch2));
@@ -782,7 +783,18 @@ void MacroAssembler::AllocateObjectInNewSpace(int object_size,
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address();
mov(scratch1, Operand(new_space_allocation_top));
- ldr(result, MemOperand(scratch1));
+ if ((flags & RESULT_CONTAINS_TOP) == 0) {
+ ldr(result, MemOperand(scratch1));
+ } else {
+#ifdef DEBUG
+ // Assert that result actually contains top on entry. scratch2 is used
+ // immediately below so this use of scratch2 does not cause difference with
+ // respect to register content between debug and release mode.
+ ldr(scratch2, MemOperand(scratch1));
+ cmp(result, scratch2);
+ Check(eq, "Unexpected allocation top");
+#endif
+ }
// Calculate new top and bail out if new space is exhausted. Use result
// to calculate the new top.
@@ -790,7 +802,7 @@ void MacroAssembler::AllocateObjectInNewSpace(int object_size,
ExternalReference::new_space_allocation_limit_address();
mov(scratch2, Operand(new_space_allocation_limit));
ldr(scratch2, MemOperand(scratch2));
- add(result, result, Operand(object_size));
+ add(result, result, Operand(object_size * kPointerSize));
cmp(result, Operand(scratch2));
b(hi, gc_required);
@@ -798,19 +810,98 @@ void MacroAssembler::AllocateObjectInNewSpace(int object_size,
str(result, MemOperand(scratch1));
// Tag and adjust back to start of new object.
- if (tag_allocated_object) {
- sub(result, result, Operand(object_size - kHeapObjectTag));
+ if ((flags & TAG_OBJECT) != 0) {
+ sub(result, result, Operand((object_size * kPointerSize) -
+ kHeapObjectTag));
+ } else {
+ sub(result, result, Operand(object_size * kPointerSize));
+ }
+}
+
+
+void MacroAssembler::AllocateObjectInNewSpace(Register object_size,
+ Register result,
+ Register scratch1,
+ Register scratch2,
+ Label* gc_required,
+ AllocationFlags flags) {
+ ASSERT(!result.is(scratch1));
+ ASSERT(!scratch1.is(scratch2));
+
+ // Load address of new object into result and allocation top address into
+ // scratch1.
+ ExternalReference new_space_allocation_top =
+ ExternalReference::new_space_allocation_top_address();
+ mov(scratch1, Operand(new_space_allocation_top));
+ if ((flags & RESULT_CONTAINS_TOP) == 0) {
+ ldr(result, MemOperand(scratch1));
} else {
- sub(result, result, Operand(object_size));
+#ifdef DEBUG
+ // Assert that result actually contains top on entry. scratch2 is used
+ // immediately below so this use of scratch2 does not cause difference with
+ // respect to register content between debug and release mode.
+ ldr(scratch2, MemOperand(scratch1));
+ cmp(result, scratch2);
+ Check(eq, "Unexpected allocation top");
+#endif
+ }
+
+ // Calculate new top and bail out if new space is exhausted. Use result
+ // to calculate the new top. Object size is in words so a shift is required to
+ // get the number of bytes
+ ExternalReference new_space_allocation_limit =
+ ExternalReference::new_space_allocation_limit_address();
+ mov(scratch2, Operand(new_space_allocation_limit));
+ ldr(scratch2, MemOperand(scratch2));
+ add(result, result, Operand(object_size, LSL, kPointerSizeLog2));
+ cmp(result, Operand(scratch2));
+ b(hi, gc_required);
+
+ // Update allocation top. result temporarily holds the new top,
+ str(result, MemOperand(scratch1));
+
+ // Adjust back to start of new object.
+ sub(result, result, Operand(object_size, LSL, kPointerSizeLog2));
+
+ // Tag object if requested.
+ if ((flags & TAG_OBJECT) != 0) {
+ add(result, result, Operand(kHeapObjectTag));
}
}
+void MacroAssembler::UndoAllocationInNewSpace(Register object,
+ Register scratch) {
+ ExternalReference new_space_allocation_top =
+ ExternalReference::new_space_allocation_top_address();
+
+ // Make sure the object has no tag before resetting top.
+ and_(object, object, Operand(~kHeapObjectTagMask));
+#ifdef DEBUG
+ // Check that the object un-allocated is below the current top.
+ mov(scratch, Operand(new_space_allocation_top));
+ ldr(scratch, MemOperand(scratch));
+ cmp(object, scratch);
+ Check(lt, "Undo allocation of non allocated memory");
+#endif
+ // Write the address of the object to un-allocate as the current top.
+ mov(scratch, Operand(new_space_allocation_top));
+ str(object, MemOperand(scratch));
+}
+
+
void MacroAssembler::CompareObjectType(Register function,
Register map,
Register type_reg,
InstanceType type) {
ldr(map, FieldMemOperand(function, HeapObject::kMapOffset));
+ CompareInstanceType(map, type_reg, type);
+}
+
+
+void MacroAssembler::CompareInstanceType(Register map,
+ Register type_reg,
+ InstanceType type) {
ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
cmp(type_reg, Operand(type));
}
@@ -909,7 +1000,8 @@ void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
- int num_arguments) {
+ int num_arguments,
+ int result_size) {
// TODO(1236192): Most runtime routines don't need the number of
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
@@ -925,7 +1017,7 @@ void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) {
ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
#endif
mov(r1, Operand(builtin));
- CEntryStub stub;
+ CEntryStub stub(1);
Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
}
diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h
index 4bbd9802f3..f45cce51a9 100644
--- a/deps/v8/src/arm/macro-assembler-arm.h
+++ b/deps/v8/src/arm/macro-assembler-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2006-2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -38,34 +38,11 @@ namespace internal {
const Register cp = { 8 }; // JavaScript context pointer
-// Helper types to make boolean flag easier to read at call-site.
-enum InvokeFlag {
- CALL_FUNCTION,
- JUMP_FUNCTION
-};
-
enum InvokeJSFlags {
CALL_JS,
JUMP_JS
};
-enum ExitJSFlag {
- RETURN,
- DO_NOT_RETURN
-};
-
-enum CodeLocation {
- IN_JAVASCRIPT,
- IN_JS_ENTRY,
- IN_C_ENTRY
-};
-
-enum HandlerType {
- TRY_CATCH_HANDLER,
- TRY_FINALLY_HANDLER,
- JS_ENTRY_HANDLER
-};
-
// MacroAssembler implements a collection of frequently used macros.
class MacroAssembler: public Assembler {
@@ -190,16 +167,28 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Allocation support
- // Allocate an object in new space. If the new space is exhausted control
- // continues at the gc_required label. The allocated object is returned in
- // result. If the flag tag_allocated_object is true the result is tagged as
- // as a heap object.
+ // Allocate an object in new space. The object_size is specified in words (not
+ // bytes). If the new space is exhausted control continues at the gc_required
+ // label. The allocated object is returned in result. If the flag
+ // tag_allocated_object is true the result is tagged as as a heap object.
void AllocateObjectInNewSpace(int object_size,
Register result,
Register scratch1,
Register scratch2,
Label* gc_required,
- bool tag_allocated_object);
+ AllocationFlags flags);
+ void AllocateObjectInNewSpace(Register object_size,
+ Register result,
+ Register scratch1,
+ Register scratch2,
+ Label* gc_required,
+ AllocationFlags flags);
+
+ // Undo allocation in new space. The object passed and objects allocated after
+ // it will no longer be allocated. The caller must make sure that no pointers
+ // are left to the object(s) no longer allocated as they would be invalid when
+ // allocation is undone.
+ void UndoAllocationInNewSpace(Register object, Register scratch);
// ---------------------------------------------------------------------------
// Support functions.
@@ -220,12 +209,21 @@ class MacroAssembler: public Assembler {
// It leaves the map in the map register (unless the type_reg and map register
// are the same register). It leaves the heap object in the heap_object
// register unless the heap_object register is the same register as one of the
- // other // registers.
+ // other registers.
void CompareObjectType(Register heap_object,
Register map,
Register type_reg,
InstanceType type);
+ // Compare instance type in a map. map contains a valid map object whose
+ // object type should be compared with the given type. This both
+ // sets the flags and leaves the object type in the type_reg register. It
+ // leaves the heap object in the heap_object register unless the heap_object
+ // register is the same register as type_reg.
+ void CompareInstanceType(Register map,
+ Register type_reg,
+ InstanceType type);
+
inline void BranchOnSmi(Register value, Label* smi_label) {
tst(value, Operand(kSmiTagMask));
b(eq, smi_label);
@@ -261,7 +259,9 @@ class MacroAssembler: public Assembler {
// Tail call of a runtime routine (jump).
// Like JumpToBuiltin, but also takes care of passing the number
// of parameters.
- void TailCallRuntime(const ExternalReference& ext, int num_arguments);
+ void TailCallRuntime(const ExternalReference& ext,
+ int num_arguments,
+ int result_size);
// Jump to the builtin routine.
void JumpToBuiltin(const ExternalReference& builtin);
diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.cc b/deps/v8/src/arm/regexp-macro-assembler-arm.cc
index 252d7839fb..2e75a61a84 100644
--- a/deps/v8/src/arm/regexp-macro-assembler-arm.cc
+++ b/deps/v8/src/arm/regexp-macro-assembler-arm.cc
@@ -216,25 +216,29 @@ void RegExpMacroAssemblerARM::CheckCharacters(Vector<const uc16> str,
int cp_offset,
Label* on_failure,
bool check_end_of_string) {
- int byte_length = str.length() * char_size();
- int byte_offset = cp_offset * char_size();
- if (check_end_of_string) {
- // Check that there are at least str.length() characters left in the input.
- __ cmp(end_of_input_address(), Operand(-(byte_offset + byte_length)));
- BranchOrBacktrack(gt, on_failure);
- }
-
if (on_failure == NULL) {
- // Instead of inlining a backtrack, (re)use the global backtrack target.
+ // Instead of inlining a backtrack for each test, (re)use the global
+ // backtrack target.
on_failure = &backtrack_label_;
}
+ if (check_end_of_string) {
+ // Is last character of required match inside string.
+ CheckPosition(cp_offset + str.length() - 1, on_failure);
+ }
+
__ add(r0, end_of_input_address(), Operand(current_input_offset()));
+ if (cp_offset != 0) {
+ int byte_offset = cp_offset * char_size();
+ __ add(r0, r0, Operand(byte_offset));
+ }
+
+ // r0 : Address of characters to match against str.
int stored_high_byte = 0;
for (int i = 0; i < str.length(); i++) {
if (mode_ == ASCII) {
__ ldrb(r1, MemOperand(r0, char_size(), PostIndex));
- // str[i] is known to be an ASCII character.
+ ASSERT(str[i] <= String::kMaxAsciiCharCode);
__ cmp(r1, Operand(str[i]));
} else {
__ ldrh(r1, MemOperand(r0, char_size(), PostIndex));
diff --git a/deps/v8/src/arm/simulator-arm.cc b/deps/v8/src/arm/simulator-arm.cc
index e258e5a686..70dfcd2a9d 100644
--- a/deps/v8/src/arm/simulator-arm.cc
+++ b/deps/v8/src/arm/simulator-arm.cc
@@ -70,7 +70,8 @@ class Debugger {
Simulator* sim_;
- bool GetValue(char* desc, int32_t* value);
+ int32_t GetRegisterValue(int regnum);
+ bool GetValue(const char* desc, int32_t* value);
// Set or delete a breakpoint. Returns true if successful.
bool SetBreakpoint(Instr* breakpc);
@@ -132,41 +133,19 @@ void Debugger::Stop(Instr* instr) {
#endif
-static const char* reg_names[] = { "r0", "r1", "r2", "r3",
- "r4", "r5", "r6", "r7",
- "r8", "r9", "r10", "r11",
- "r12", "r13", "r14", "r15",
- "pc", "lr", "sp", "ip",
- "fp", "sl", ""};
-
-static int reg_nums[] = { 0, 1, 2, 3,
- 4, 5, 6, 7,
- 8, 9, 10, 11,
- 12, 13, 14, 15,
- 15, 14, 13, 12,
- 11, 10};
-
-
-static int RegNameToRegNum(char* name) {
- int reg = 0;
- while (*reg_names[reg] != 0) {
- if (strcmp(reg_names[reg], name) == 0) {
- return reg_nums[reg];
- }
- reg++;
+int32_t Debugger::GetRegisterValue(int regnum) {
+ if (regnum == kPCRegister) {
+ return sim_->get_pc();
+ } else {
+ return sim_->get_register(regnum);
}
- return -1;
}
-bool Debugger::GetValue(char* desc, int32_t* value) {
- int regnum = RegNameToRegNum(desc);
- if (regnum >= 0) {
- if (regnum == 15) {
- *value = sim_->get_pc();
- } else {
- *value = sim_->get_register(regnum);
- }
+bool Debugger::GetValue(const char* desc, int32_t* value) {
+ int regnum = Registers::Number(desc);
+ if (regnum != kNoRegister) {
+ *value = GetRegisterValue(regnum);
return true;
} else {
return SScanF(desc, "%i", value) == 1;
@@ -246,7 +225,7 @@ void Debugger::Debug() {
v8::internal::EmbeddedVector<char, 256> buffer;
dasm.InstructionDecode(buffer,
reinterpret_cast<byte*>(sim_->get_pc()));
- PrintF(" 0x%x %s\n", sim_->get_pc(), buffer.start());
+ PrintF(" 0x%08x %s\n", sim_->get_pc(), buffer.start());
last_pc = sim_->get_pc();
}
char* line = ReadLine("sim> ");
@@ -270,13 +249,20 @@ void Debugger::Debug() {
} else if ((strcmp(cmd, "p") == 0) || (strcmp(cmd, "print") == 0)) {
if (args == 2) {
int32_t value;
- if (GetValue(arg1, &value)) {
- PrintF("%s: %d 0x%x\n", arg1, value, value);
+ if (strcmp(arg1, "all") == 0) {
+ for (int i = 0; i < kNumRegisters; i++) {
+ value = GetRegisterValue(i);
+ PrintF("%3s: 0x%08x %10d\n", Registers::Name(i), value, value);
+ }
} else {
- PrintF("%s unrecognized\n", arg1);
+ if (GetValue(arg1, &value)) {
+ PrintF("%s: 0x%08x %d \n", arg1, value, value);
+ } else {
+ PrintF("%s unrecognized\n", arg1);
+ }
}
} else {
- PrintF("print value\n");
+ PrintF("print <register>\n");
}
} else if ((strcmp(cmd, "po") == 0)
|| (strcmp(cmd, "printobject") == 0)) {
@@ -284,16 +270,18 @@ void Debugger::Debug() {
int32_t value;
if (GetValue(arg1, &value)) {
Object* obj = reinterpret_cast<Object*>(value);
- USE(obj);
PrintF("%s: \n", arg1);
-#if defined(DEBUG)
+#ifdef DEBUG
obj->PrintLn();
-#endif // defined(DEBUG)
+#else
+ obj->ShortPrint();
+ PrintF("\n");
+#endif
} else {
PrintF("%s unrecognized\n", arg1);
}
} else {
- PrintF("printobject value\n");
+ PrintF("printobject <value>\n");
}
} else if (strcmp(cmd, "disasm") == 0) {
disasm::NameConverter converter;
@@ -325,7 +313,7 @@ void Debugger::Debug() {
while (cur < end) {
dasm.InstructionDecode(buffer, cur);
- PrintF(" 0x%x %s\n", cur, buffer.start());
+ PrintF(" 0x%08x %s\n", cur, buffer.start());
cur += Instr::kInstrSize;
}
} else if (strcmp(cmd, "gdb") == 0) {
@@ -343,7 +331,7 @@ void Debugger::Debug() {
PrintF("%s unrecognized\n", arg1);
}
} else {
- PrintF("break addr\n");
+ PrintF("break <address>\n");
}
} else if (strcmp(cmd, "del") == 0) {
if (!DeleteBreakpoint(NULL)) {
@@ -362,6 +350,30 @@ void Debugger::Debug() {
} else {
PrintF("Not at debugger stop.");
}
+ } else if ((strcmp(cmd, "h") == 0) || (strcmp(cmd, "help") == 0)) {
+ PrintF("cont\n");
+ PrintF(" continue execution (alias 'c')\n");
+ PrintF("stepi\n");
+ PrintF(" step one instruction (alias 'si')\n");
+ PrintF("print <register>\n");
+ PrintF(" print register content (alias 'p')\n");
+ PrintF(" use register name 'all' to print all registers\n");
+ PrintF("printobject <register>\n");
+ PrintF(" print an object from a register (alias 'po')\n");
+ PrintF("flags\n");
+ PrintF(" print flags\n");
+ PrintF("disasm [<instructions>]\n");
+ PrintF("disasm [[<address>] <instructions>]\n");
+ PrintF(" disassemble code, default is 10 instructions from pc\n");
+ PrintF("gdb\n");
+ PrintF(" enter gdb\n");
+ PrintF("break <address>\n");
+ PrintF(" set a break point on the address\n");
+ PrintF("del\n");
+ PrintF(" delete the breakpoint\n");
+ PrintF("unstop\n");
+ PrintF(" ignore the stop instruction at the current location");
+ PrintF(" from now on\n");
} else {
PrintF("Unknown command: %s\n", cmd);
}
@@ -576,7 +588,7 @@ int Simulator::ReadW(int32_t addr, Instr* instr) {
intptr_t* ptr = reinterpret_cast<intptr_t*>(addr);
return *ptr;
}
- PrintF("Unaligned read at %x\n", addr);
+ PrintF("Unaligned read at 0x%08x\n", addr);
UNIMPLEMENTED();
return 0;
}
@@ -588,7 +600,7 @@ void Simulator::WriteW(int32_t addr, int value, Instr* instr) {
*ptr = value;
return;
}
- PrintF("Unaligned write at %x, pc=%p\n", addr, instr);
+ PrintF("Unaligned write at 0x%08x, pc=%p\n", addr, instr);
UNIMPLEMENTED();
}
@@ -598,7 +610,7 @@ uint16_t Simulator::ReadHU(int32_t addr, Instr* instr) {
uint16_t* ptr = reinterpret_cast<uint16_t*>(addr);
return *ptr;
}
- PrintF("Unaligned unsigned halfword read at %x, pc=%p\n", addr, instr);
+ PrintF("Unaligned unsigned halfword read at 0x%08x, pc=%p\n", addr, instr);
UNIMPLEMENTED();
return 0;
}
@@ -609,7 +621,7 @@ int16_t Simulator::ReadH(int32_t addr, Instr* instr) {
int16_t* ptr = reinterpret_cast<int16_t*>(addr);
return *ptr;
}
- PrintF("Unaligned signed halfword read at %x\n", addr);
+ PrintF("Unaligned signed halfword read at 0x%08x\n", addr);
UNIMPLEMENTED();
return 0;
}
@@ -621,7 +633,7 @@ void Simulator::WriteH(int32_t addr, uint16_t value, Instr* instr) {
*ptr = value;
return;
}
- PrintF("Unaligned unsigned halfword write at %x, pc=%p\n", addr, instr);
+ PrintF("Unaligned unsigned halfword write at 0x%08x, pc=%p\n", addr, instr);
UNIMPLEMENTED();
}
@@ -632,7 +644,7 @@ void Simulator::WriteH(int32_t addr, int16_t value, Instr* instr) {
*ptr = value;
return;
}
- PrintF("Unaligned halfword write at %x, pc=%p\n", addr, instr);
+ PrintF("Unaligned halfword write at 0x%08x, pc=%p\n", addr, instr);
UNIMPLEMENTED();
}
@@ -671,7 +683,7 @@ uintptr_t Simulator::StackLimit() const {
// Unsupported instructions use Format to print an error and stop execution.
void Simulator::Format(Instr* instr, const char* format) {
- PrintF("Simulator found unsupported instruction:\n 0x%x: %s\n",
+ PrintF("Simulator found unsupported instruction:\n 0x%08x: %s\n",
instr, format);
UNIMPLEMENTED();
}
@@ -1726,7 +1738,8 @@ void Simulator::DecodeUnconditional(Instr* instr) {
uint16_t halfword = ReadH(addr, instr);
set_register(rd, halfword);
} else {
- UNIMPLEMENTED();
+ Debugger dbg(this);
+ dbg.Stop(instr);
}
}
@@ -1741,7 +1754,7 @@ void Simulator::InstructionDecode(Instr* instr) {
v8::internal::EmbeddedVector<char, 256> buffer;
dasm.InstructionDecode(buffer,
reinterpret_cast<byte*>(instr));
- PrintF(" 0x%x %s\n", instr, buffer.start());
+ PrintF(" 0x%08x %s\n", instr, buffer.start());
}
if (instr->ConditionField() == special_condition) {
DecodeUnconditional(instr);
diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc
index 745b541e54..9e44cfa510 100644
--- a/deps/v8/src/arm/stub-cache-arm.cc
+++ b/deps/v8/src/arm/stub-cache-arm.cc
@@ -478,7 +478,7 @@ void StubCompiler::GenerateLoadCallback(JSObject* object,
// Do tail-call to the runtime system.
ExternalReference load_callback_property =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
- __ TailCallRuntime(load_callback_property, 5);
+ __ TailCallRuntime(load_callback_property, 5, 1);
}
@@ -514,7 +514,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
// Do tail-call to the runtime system.
ExternalReference load_ic_property =
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
- __ TailCallRuntime(load_ic_property, 5);
+ __ TailCallRuntime(load_ic_property, 5, 1);
}
@@ -884,7 +884,7 @@ Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
// Do tail-call to the runtime system.
ExternalReference store_callback_property =
ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
- __ TailCallRuntime(store_callback_property, 4);
+ __ TailCallRuntime(store_callback_property, 4, 1);
// Handle store cache miss.
__ bind(&miss);
@@ -936,7 +936,7 @@ Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
- __ TailCallRuntime(store_ic_property, 3);
+ __ TailCallRuntime(store_ic_property, 3, 1);
// Handle store cache miss.
__ bind(&miss);
@@ -1344,7 +1344,138 @@ Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
Object* ConstructStubCompiler::CompileConstructStub(
SharedFunctionInfo* shared) {
- // Not implemented yet - just jump to generic stub.
+ // ----------- S t a t e -------------
+ // -- r0 : argc
+ // -- r1 : constructor
+ // -- lr : return address
+ // -- [sp] : last argument
+ // -----------------------------------
+ Label generic_stub_call;
+
+ // Use r7 for holding undefined which is used in several places below.
+ __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ // Check to see whether there are any break points in the function code. If
+ // there are jump to the generic constructor stub which calls the actual
+ // code for the function thereby hitting the break points.
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
+ __ cmp(r2, r7);
+ __ b(ne, &generic_stub_call);
+#endif
+
+ // Load the initial map and verify that it is in fact a map.
+ // r1: constructor function
+ // r7: undefined
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
+ __ tst(r2, Operand(kSmiTagMask));
+ __ b(eq, &generic_stub_call);
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE);
+ __ b(ne, &generic_stub_call);
+
+#ifdef DEBUG
+ // Cannot construct functions this way.
+ // r0: argc
+ // r1: constructor function
+ // r2: initial map
+ // r7: undefined
+ __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
+ __ Check(ne, "Function constructed by construct stub.");
+#endif
+
+ // Now allocate the JSObject in new space.
+ // r0: argc
+ // r1: constructor function
+ // r2: initial map
+ // r7: undefined
+ __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
+ __ AllocateObjectInNewSpace(r3,
+ r4,
+ r5,
+ r6,
+ &generic_stub_call,
+ NO_ALLOCATION_FLAGS);
+
+ // Allocated the JSObject, now initialize the fields. Map is set to initial
+ // map and properties and elements are set to empty fixed array.
+ // r0: argc
+ // r1: constructor function
+ // r2: initial map
+ // r3: object size (in words)
+ // r4: JSObject (not tagged)
+ // r7: undefined
+ __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
+ __ mov(r5, r4);
+ ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
+ ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
+ ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
+
+ // Calculate the location of the first argument. The stack contains only the
+ // argc arguments.
+ __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
+
+ // Fill all the in-object properties with undefined.
+ // r0: argc
+ // r1: first argument
+ // r3: object size (in words)
+ // r4: JSObject (not tagged)
+ // r5: First in-object property of JSObject (not tagged)
+ // r7: undefined
+ // Fill the initialized properties with a constant value or a passed argument
+ // depending on the this.x = ...; assignment in the function.
+ for (int i = 0; i < shared->this_property_assignments_count(); i++) {
+ if (shared->IsThisPropertyAssignmentArgument(i)) {
+ Label not_passed, next;
+ // Check if the argument assigned to the property is actually passed.
+ int arg_number = shared->GetThisPropertyAssignmentArgument(i);
+ __ cmp(r0, Operand(arg_number));
+ __ b(le, &not_passed);
+ // Argument passed - find it on the stack.
+ __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
+ __ b(&next);
+ __ bind(&not_passed);
+ // Set the property to undefined.
+ __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
+ __ bind(&next);
+ } else {
+ // Set the property to the constant value.
+ Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
+ __ mov(r2, Operand(constant));
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
+ }
+ }
+
+ // Fill the unused in-object property fields with undefined.
+ for (int i = shared->this_property_assignments_count();
+ i < shared->CalculateInObjectProperties();
+ i++) {
+ __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
+ }
+
+ // r0: argc
+ // r4: JSObject (not tagged)
+ // Move argc to r1 and the JSObject to return to r0 and tag it.
+ __ mov(r1, r0);
+ __ mov(r0, r4);
+ __ orr(r0, r0, Operand(kHeapObjectTag));
+
+ // r0: JSObject
+ // r1: argc
+ // Remove caller arguments and receiver from the stack and return.
+ __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
+ __ add(sp, sp, Operand(kPointerSize));
+ __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
+ __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2);
+ __ Jump(lr);
+
+ // Jump to the generic stub in case the specialized code cannot handle the
+ // construction.
+ __ bind(&generic_stub_call);
Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
Handle<Code> generic_construct_stub(code);
__ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc
index a2c45626be..c1daa57b1f 100644
--- a/deps/v8/src/bootstrapper.cc
+++ b/deps/v8/src/bootstrapper.cc
@@ -474,7 +474,7 @@ void Genesis::CreateRoots(v8::Handle<v8::ObjectTemplate> global_template,
// Please note that the prototype property for function instances must be
// writable.
Handle<DescriptorArray> function_map_descriptors =
- ComputeFunctionInstanceDescriptor(false, true);
+ ComputeFunctionInstanceDescriptor(false, false);
fm->set_instance_descriptors(*function_map_descriptors);
// Allocate the function map first and then patch the prototype later
diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc
index 4262dd2a82..195fe54bef 100644
--- a/deps/v8/src/builtins.cc
+++ b/deps/v8/src/builtins.cc
@@ -609,11 +609,6 @@ static void Generate_Return_DebugBreak(MacroAssembler* masm) {
}
-static void Generate_Return_DebugBreakEntry(MacroAssembler* masm) {
- Debug::GenerateReturnDebugBreakEntry(masm);
-}
-
-
static void Generate_StubNoRegisters_DebugBreak(MacroAssembler* masm) {
Debug::GenerateStubNoRegistersDebugBreak(masm);
}
diff --git a/deps/v8/src/builtins.h b/deps/v8/src/builtins.h
index 0f4a610b83..8df767a925 100644
--- a/deps/v8/src/builtins.h
+++ b/deps/v8/src/builtins.h
@@ -90,7 +90,6 @@ namespace internal {
// Define list of builtins used by the debugger implemented in assembly.
#define BUILTIN_LIST_DEBUG_A(V) \
V(Return_DebugBreak, BUILTIN, DEBUG_BREAK) \
- V(Return_DebugBreakEntry, BUILTIN, DEBUG_BREAK) \
V(ConstructCall_DebugBreak, BUILTIN, DEBUG_BREAK) \
V(StubNoRegisters_DebugBreak, BUILTIN, DEBUG_BREAK) \
V(LoadIC_DebugBreak, LOAD_IC, DEBUG_BREAK) \
diff --git a/deps/v8/src/checks.h b/deps/v8/src/checks.h
index 4ecbaf4cf4..b302e5beee 100644
--- a/deps/v8/src/checks.h
+++ b/deps/v8/src/checks.h
@@ -95,38 +95,6 @@ static inline void CheckNonEqualsHelper(const char* file,
}
}
-#ifdef V8_TARGET_ARCH_X64
-// Helper function used by the CHECK_EQ function when given intptr_t
-// arguments. Should not be called directly.
-static inline void CheckEqualsHelper(const char* file,
- int line,
- const char* expected_source,
- intptr_t expected,
- const char* value_source,
- intptr_t value) {
- if (expected != value) {
- V8_Fatal(file, line,
- "CHECK_EQ(%s, %s) failed\n# Expected: %i\n# Found: %i",
- expected_source, value_source, expected, value);
- }
-}
-
-
-// Helper function used by the CHECK_NE function when given intptr_t
-// arguments. Should not be called directly.
-static inline void CheckNonEqualsHelper(const char* file,
- int line,
- const char* unexpected_source,
- intptr_t unexpected,
- const char* value_source,
- intptr_t value) {
- if (unexpected == value) {
- V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n# Value: %i",
- unexpected_source, value_source, value);
- }
-}
-#endif // V8_TARGET_ARCH_X64
-
// Helper function used by the CHECK function when given string
// arguments. Should not be called directly.
diff --git a/deps/v8/src/codegen.cc b/deps/v8/src/codegen.cc
index 8e516c0b04..9a00ae2b65 100644
--- a/deps/v8/src/codegen.cc
+++ b/deps/v8/src/codegen.cc
@@ -517,7 +517,10 @@ const char* RuntimeStub::GetName() {
void RuntimeStub::Generate(MacroAssembler* masm) {
- masm->TailCallRuntime(ExternalReference(id_), num_arguments_);
+ Runtime::Function* f = Runtime::FunctionForId(id_);
+ masm->TailCallRuntime(ExternalReference(f),
+ num_arguments_,
+ f->result_size);
}
diff --git a/deps/v8/src/codegen.h b/deps/v8/src/codegen.h
index d6967b7aff..d03f4b60b0 100644
--- a/deps/v8/src/codegen.h
+++ b/deps/v8/src/codegen.h
@@ -286,7 +286,7 @@ class CompareStub: public CodeStub {
class CEntryStub : public CodeStub {
public:
- CEntryStub() { }
+ explicit CEntryStub(int result_size) : result_size_(result_size) { }
void Generate(MacroAssembler* masm) { GenerateBody(masm, false); }
@@ -302,10 +302,14 @@ class CEntryStub : public CodeStub {
void GenerateThrowTOS(MacroAssembler* masm);
void GenerateThrowUncatchable(MacroAssembler* masm,
UncatchableExceptionType type);
-
private:
+ // Number of pointers/values returned.
+ int result_size_;
+
Major MajorKey() { return CEntry; }
- int MinorKey() { return 0; }
+ // Minor key must differ if different result_size_ values means different
+ // code is generated.
+ int MinorKey();
const char* GetName() { return "CEntryStub"; }
};
@@ -313,7 +317,7 @@ class CEntryStub : public CodeStub {
class CEntryDebugBreakStub : public CEntryStub {
public:
- CEntryDebugBreakStub() { }
+ CEntryDebugBreakStub() : CEntryStub(1) { }
void Generate(MacroAssembler* masm) { GenerateBody(masm, true); }
diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc
index 7082280856..e4658b1cee 100644
--- a/deps/v8/src/d8.cc
+++ b/deps/v8/src/d8.cc
@@ -159,8 +159,7 @@ Handle<Value> Shell::Write(const Arguments& args) {
printf(" ");
}
v8::String::Utf8Value str(args[i]);
- const char* cstr = ToCString(str);
- printf("%s", cstr);
+ fwrite(*str, sizeof(**str), str.length(), stdout);
}
return Undefined();
}
@@ -180,15 +179,15 @@ Handle<Value> Shell::Read(const Arguments& args) {
Handle<Value> Shell::ReadLine(const Arguments& args) {
- char line_buf[256];
- if (fgets(line_buf, sizeof(line_buf), stdin) == NULL) {
- return ThrowException(String::New("Error reading line"));
+ i::SmartPointer<char> line(i::ReadLine(""));
+ if (*line == NULL) {
+ return Null();
}
- int len = strlen(line_buf);
- if (line_buf[len - 1] == '\n') {
+ size_t len = strlen(*line);
+ if (len > 0 && line[len - 1] == '\n') {
--len;
}
- return String::New(line_buf, len);
+ return String::New(*line, len);
}
diff --git a/deps/v8/src/d8.js b/deps/v8/src/d8.js
index da5be1f3d3..14b50603c9 100644
--- a/deps/v8/src/d8.js
+++ b/deps/v8/src/d8.js
@@ -102,7 +102,8 @@ Debug.ScriptCompilationType = { Host: 0,
Debug.ScopeType = { Global: 0,
Local: 1,
With: 2,
- Closure: 3 };
+ Closure: 3,
+ Catch: 4 };
// Current debug state.
@@ -900,6 +901,10 @@ function formatScope_(scope) {
result += 'With, ';
result += '#' + scope.object.ref + '#';
break;
+ case Debug.ScopeType.Catch:
+ result += 'Catch, ';
+ result += '#' + scope.object.ref + '#';
+ break;
case Debug.ScopeType.Closure:
result += 'Closure';
break;
diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc
index e341022aa4..f3e11ae751 100644
--- a/deps/v8/src/debug.cc
+++ b/deps/v8/src/debug.cc
@@ -75,6 +75,9 @@ BreakLocationIterator::BreakLocationIterator(Handle<DebugInfo> debug_info,
BreakLocatorType type) {
debug_info_ = debug_info;
type_ = type;
+ // Get the stub early to avoid possible GC during iterations. We may need
+ // this stub to detect debugger calls generated from debugger statements.
+ debug_break_stub_ = RuntimeStub(Runtime::kDebugBreak, 0).GetCode();
reloc_iterator_ = NULL;
reloc_iterator_original_ = NULL;
Reset(); // Initialize the rest of the member variables.
@@ -126,6 +129,10 @@ void BreakLocationIterator::Next() {
return;
}
if (code->kind() == Code::STUB) {
+ if (IsDebuggerStatement()) {
+ break_point_++;
+ return;
+ }
if (type_ == ALL_BREAK_LOCATIONS) {
if (Debug::IsBreakStub(code)) {
break_point_++;
@@ -238,7 +245,7 @@ void BreakLocationIterator::SetBreakPoint(Handle<Object> break_point_object) {
if (!HasBreakPoint()) {
SetDebugBreak();
}
- ASSERT(IsDebugBreak());
+ ASSERT(IsDebugBreak() || IsDebuggerStatement());
// Set the break point information.
DebugInfo::SetBreakPoint(debug_info_, code_position(),
position(), statement_position(),
@@ -258,6 +265,11 @@ void BreakLocationIterator::ClearBreakPoint(Handle<Object> break_point_object) {
void BreakLocationIterator::SetOneShot() {
+ // Debugger statement always calls debugger. No need to modify it.
+ if (IsDebuggerStatement()) {
+ return;
+ }
+
// If there is a real break point here no more to do.
if (HasBreakPoint()) {
ASSERT(IsDebugBreak());
@@ -270,6 +282,11 @@ void BreakLocationIterator::SetOneShot() {
void BreakLocationIterator::ClearOneShot() {
+ // Debugger statement always calls debugger. No need to modify it.
+ if (IsDebuggerStatement()) {
+ return;
+ }
+
// If there is a real break point here no more to do.
if (HasBreakPoint()) {
ASSERT(IsDebugBreak());
@@ -283,6 +300,11 @@ void BreakLocationIterator::ClearOneShot() {
void BreakLocationIterator::SetDebugBreak() {
+ // Debugger statement always calls debugger. No need to modify it.
+ if (IsDebuggerStatement()) {
+ return;
+ }
+
// If there is already a break point here just return. This might happen if
// the same code is flooded with break points twice. Flooding the same
// function twice might happen when stepping in a function with an exception
@@ -303,6 +325,11 @@ void BreakLocationIterator::SetDebugBreak() {
void BreakLocationIterator::ClearDebugBreak() {
+ // Debugger statement always calls debugger. No need to modify it.
+ if (IsDebuggerStatement()) {
+ return;
+ }
+
if (RelocInfo::IsJSReturn(rmode())) {
// Restore the frame exit code.
ClearDebugBreakAtReturn();
@@ -317,10 +344,10 @@ void BreakLocationIterator::ClearDebugBreak() {
void BreakLocationIterator::PrepareStepIn() {
HandleScope scope;
- // Step in can only be prepared if currently positioned on an IC call or
- // construct call.
+ // Step in can only be prepared if currently positioned on an IC call,
+ // construct call or CallFunction stub call.
Address target = rinfo()->target_address();
- Code* code = Code::GetCodeFromTargetAddress(target);
+ Handle<Code> code(Code::GetCodeFromTargetAddress(target));
if (code->is_call_stub()) {
// Step in through IC call is handled by the runtime system. Therefore make
// sure that the any current IC is cleared and the runtime system is
@@ -334,11 +361,29 @@ void BreakLocationIterator::PrepareStepIn() {
rinfo()->set_target_address(stub->entry());
}
} else {
+#ifdef DEBUG
+ // All the following stuff is needed only for assertion checks so the code
+ // is wrapped in ifdef.
+ Handle<Code> maybe_call_function_stub = code;
+ if (IsDebugBreak()) {
+ Address original_target = original_rinfo()->target_address();
+ maybe_call_function_stub =
+ Handle<Code>(Code::GetCodeFromTargetAddress(original_target));
+ }
+ bool is_call_function_stub =
+ (maybe_call_function_stub->kind() == Code::STUB &&
+ maybe_call_function_stub->major_key() == CodeStub::CallFunction);
+
// Step in through construct call requires no changes to the running code.
// Step in through getters/setters should already be prepared as well
// because caller of this function (Debug::PrepareStep) is expected to
// flood the top frame's function with one shot breakpoints.
- ASSERT(RelocInfo::IsConstructCall(rmode()) || code->is_inline_cache_stub());
+ // Step in through CallFunction stub should also be prepared by caller of
+ // this function (Debug::PrepareStep) which should flood target function
+ // with breakpoints.
+ ASSERT(RelocInfo::IsConstructCall(rmode()) || code->is_inline_cache_stub()
+ || is_call_function_stub);
+#endif
}
}
@@ -409,6 +454,21 @@ void BreakLocationIterator::ClearDebugBreakAtIC() {
}
+bool BreakLocationIterator::IsDebuggerStatement() {
+ if (RelocInfo::IsCodeTarget(rmode())) {
+ Address target = original_rinfo()->target_address();
+ Code* code = Code::GetCodeFromTargetAddress(target);
+ if (code->kind() == Code::STUB) {
+ CodeStub::Major major_key = code->major_key();
+ if (major_key == CodeStub::Runtime) {
+ return (*debug_break_stub_ == code);
+ }
+ }
+ }
+ return false;
+}
+
+
Object* BreakLocationIterator::BreakPointObjects() {
return debug_info_->GetBreakPointObjects(code_position());
}
@@ -458,6 +518,7 @@ void Debug::ThreadInit() {
thread_local_.step_count_ = 0;
thread_local_.last_fp_ = 0;
thread_local_.step_into_fp_ = 0;
+ thread_local_.step_out_fp_ = 0;
thread_local_.after_break_target_ = 0;
thread_local_.debugger_entry_ = NULL;
thread_local_.pending_interrupts_ = 0;
@@ -502,7 +563,6 @@ bool Debug::break_on_exception_ = false;
bool Debug::break_on_uncaught_exception_ = true;
Handle<Context> Debug::debug_context_ = Handle<Context>();
-Code* Debug::debug_break_return_entry_ = NULL;
Code* Debug::debug_break_return_ = NULL;
@@ -583,11 +643,6 @@ void ScriptCache::HandleWeakScript(v8::Persistent<v8::Value> obj, void* data) {
void Debug::Setup(bool create_heap_objects) {
ThreadInit();
if (create_heap_objects) {
- // Get code to handle entry to debug break on return.
- debug_break_return_entry_ =
- Builtins::builtin(Builtins::Return_DebugBreakEntry);
- ASSERT(debug_break_return_entry_->IsCode());
-
// Get code to handle debug break on return.
debug_break_return_ =
Builtins::builtin(Builtins::Return_DebugBreak);
@@ -749,7 +804,6 @@ void Debug::PreemptionWhileInDebugger() {
void Debug::Iterate(ObjectVisitor* v) {
- v->VisitPointer(bit_cast<Object**, Code**>(&(debug_break_return_entry_)));
v->VisitPointer(bit_cast<Object**, Code**>(&(debug_break_return_)));
}
@@ -804,11 +858,18 @@ Object* Debug::Break(Arguments args) {
break_points_hit = CheckBreakPoints(break_point_objects);
}
- // Notify debugger if a real break point is triggered or if performing single
- // stepping with no more steps to perform. Otherwise do another step.
- if (!break_points_hit->IsUndefined() ||
- (thread_local_.last_step_action_ != StepNone &&
- thread_local_.step_count_ == 0)) {
+ // If step out is active skip everything until the frame where we need to step
+ // out to is reached, unless real breakpoint is hit.
+ if (Debug::StepOutActive() && frame->fp() != Debug::step_out_fp() &&
+ break_points_hit->IsUndefined() ) {
+ // Step count should always be 0 for StepOut.
+ ASSERT(thread_local_.step_count_ == 0);
+ } else if (!break_points_hit->IsUndefined() ||
+ (thread_local_.last_step_action_ != StepNone &&
+ thread_local_.step_count_ == 0)) {
+ // Notify debugger if a real break point is triggered or if performing
+ // single stepping with no more steps to perform. Otherwise do another step.
+
// Clear all current stepping setup.
ClearStepping();
@@ -1044,7 +1105,13 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
// Remember this step action and count.
thread_local_.last_step_action_ = step_action;
- thread_local_.step_count_ = step_count;
+ if (step_action == StepOut) {
+ // For step out target frame will be found on the stack so there is no need
+ // to set step counter for it. It's expected to always be 0 for StepOut.
+ thread_local_.step_count_ = 0;
+ } else {
+ thread_local_.step_count_ = step_count;
+ }
// Get the frame where the execution has stopped and skip the debug frame if
// any. The debug frame will only be present if execution was stopped due to
@@ -1092,6 +1159,7 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
bool is_call_target = false;
bool is_load_or_store = false;
bool is_inline_cache_stub = false;
+ Handle<Code> call_function_stub;
if (RelocInfo::IsCodeTarget(it.rinfo()->rmode())) {
Address target = it.rinfo()->target_address();
Code* code = Code::GetCodeFromTargetAddress(target);
@@ -1102,19 +1170,51 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
is_inline_cache_stub = true;
is_load_or_store = !is_call_target;
}
+
+ // Check if target code is CallFunction stub.
+ Code* maybe_call_function_stub = code;
+ // If there is a breakpoint at this line look at the original code to
+ // check if it is a CallFunction stub.
+ if (it.IsDebugBreak()) {
+ Address original_target = it.original_rinfo()->target_address();
+ maybe_call_function_stub =
+ Code::GetCodeFromTargetAddress(original_target);
+ }
+ if (maybe_call_function_stub->kind() == Code::STUB &&
+ maybe_call_function_stub->major_key() == CodeStub::CallFunction) {
+ // Save reference to the code as we may need it to find out arguments
+ // count for 'step in' later.
+ call_function_stub = Handle<Code>(maybe_call_function_stub);
+ }
}
// If this is the last break code target step out is the only possibility.
if (it.IsExit() || step_action == StepOut) {
+ if (step_action == StepOut) {
+ // Skip step_count frames starting with the current one.
+ while (step_count-- > 0 && !frames_it.done()) {
+ frames_it.Advance();
+ }
+ } else {
+ ASSERT(it.IsExit());
+ frames_it.Advance();
+ }
+ // Skip builtin functions on the stack.
+ while (!frames_it.done() &&
+ JSFunction::cast(frames_it.frame()->function())->IsBuiltin()) {
+ frames_it.Advance();
+ }
// Step out: If there is a JavaScript caller frame, we need to
// flood it with breakpoints.
- frames_it.Advance();
if (!frames_it.done()) {
// Fill the function to return to with one-shot break points.
JSFunction* function = JSFunction::cast(frames_it.frame()->function());
FloodWithOneShot(Handle<SharedFunctionInfo>(function->shared()));
+ // Set target frame pointer.
+ ActivateStepOut(frames_it.frame());
}
- } else if (!(is_inline_cache_stub || RelocInfo::IsConstructCall(it.rmode()))
+ } else if (!(is_inline_cache_stub || RelocInfo::IsConstructCall(it.rmode()) ||
+ !call_function_stub.is_null())
|| step_action == StepNext || step_action == StepMin) {
// Step next or step min.
@@ -1126,6 +1226,45 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
debug_info->code()->SourceStatementPosition(frame->pc());
thread_local_.last_fp_ = frame->fp();
} else {
+ // If it's CallFunction stub ensure target function is compiled and flood
+ // it with one shot breakpoints.
+ if (!call_function_stub.is_null()) {
+ // Find out number of arguments from the stub minor key.
+ // Reverse lookup required as the minor key cannot be retrieved
+ // from the code object.
+ Handle<Object> obj(
+ Heap::code_stubs()->SlowReverseLookup(*call_function_stub));
+ ASSERT(*obj != Heap::undefined_value());
+ ASSERT(obj->IsSmi());
+ // Get the STUB key and extract major and minor key.
+ uint32_t key = Smi::cast(*obj)->value();
+ // Argc in the stub is the number of arguments passed - not the
+ // expected arguments of the called function.
+ int call_function_arg_count = CodeStub::MinorKeyFromKey(key);
+ ASSERT(call_function_stub->major_key() ==
+ CodeStub::MajorKeyFromKey(key));
+
+ // Find target function on the expression stack.
+ // Expression stack lools like this (top to bottom):
+ // argN
+ // ...
+ // arg0
+ // Receiver
+ // Function to call
+ int expressions_count = frame->ComputeExpressionsCount();
+ ASSERT(expressions_count - 2 - call_function_arg_count >= 0);
+ Object* fun = frame->GetExpression(
+ expressions_count - 2 - call_function_arg_count);
+ if (fun->IsJSFunction()) {
+ Handle<JSFunction> js_function(JSFunction::cast(fun));
+ // Don't step into builtins.
+ if (!js_function->IsBuiltin()) {
+ // It will also compile target function if it's not compiled yet.
+ FloodWithOneShot(Handle<SharedFunctionInfo>(js_function->shared()));
+ }
+ }
+ }
+
// Fill the current function with one-shot break points even for step in on
// a call target as the function called might be a native function for
// which step in will not stop. It also prepares for stepping in
@@ -1328,6 +1467,7 @@ void Debug::ClearStepping() {
// Clear the various stepping setup.
ClearOneShot();
ClearStepIn();
+ ClearStepOut();
ClearStepNext();
// Clear multiple step counter.
@@ -1355,6 +1495,7 @@ void Debug::ClearOneShot() {
void Debug::ActivateStepIn(StackFrame* frame) {
+ ASSERT(!StepOutActive());
thread_local_.step_into_fp_ = frame->fp();
}
@@ -1364,6 +1505,17 @@ void Debug::ClearStepIn() {
}
+void Debug::ActivateStepOut(StackFrame* frame) {
+ ASSERT(!StepInActive());
+ thread_local_.step_out_fp_ = frame->fp();
+}
+
+
+void Debug::ClearStepOut() {
+ thread_local_.step_out_fp_ = 0;
+}
+
+
void Debug::ClearStepNext() {
thread_local_.last_step_action_ = StepNone;
thread_local_.last_statement_position_ = RelocInfo::kNoPosition;
@@ -1455,26 +1607,25 @@ void Debug::SetAfterBreakTarget(JavaScriptFrame* frame) {
Address addr = frame->pc() - Assembler::kPatchReturnSequenceLength;
// Check if the location is at JS exit.
- bool at_js_exit = false;
+ bool at_js_return = false;
+ bool break_at_js_return_active = false;
RelocIterator it(debug_info->code());
while (!it.done()) {
if (RelocInfo::IsJSReturn(it.rinfo()->rmode())) {
- at_js_exit = (it.rinfo()->pc() ==
- addr - Assembler::kPatchReturnSequenceAddressOffset);
+ at_js_return = (it.rinfo()->pc() ==
+ addr - Assembler::kPatchReturnSequenceAddressOffset);
+ break_at_js_return_active = it.rinfo()->IsCallInstruction();
}
it.next();
}
// Handle the jump to continue execution after break point depending on the
// break location.
- if (at_js_exit) {
- // First check if the call in the code is still the debug break return
- // entry code. If it is the break point is still active. If not the break
- // point was removed during break point processing.
- if (Assembler::target_address_at(addr) ==
- debug_break_return_entry()->entry()) {
- // Break point still active. Jump to the corresponding place in the
- // original code.
+ if (at_js_return) {
+ // If the break point as return is still active jump to the corresponding
+ // place in the original code. If not the break point was removed during
+ // break point processing.
+ if (break_at_js_return_active) {
addr += original_code->instruction_start() - code->instruction_start();
}
diff --git a/deps/v8/src/debug.h b/deps/v8/src/debug.h
index 5b0273aa22..d6b2c088d3 100644
--- a/deps/v8/src/debug.h
+++ b/deps/v8/src/debug.h
@@ -119,6 +119,8 @@ class BreakLocationIterator {
return reloc_iterator_original_->rinfo()->rmode();
}
+ bool IsDebuggerStatement();
+
protected:
bool RinfoDone() const;
void RinfoNext();
@@ -128,6 +130,7 @@ class BreakLocationIterator {
int position_;
int statement_position_;
Handle<DebugInfo> debug_info_;
+ Handle<Code> debug_break_stub_;
RelocIterator* reloc_iterator_;
RelocIterator* reloc_iterator_original_;
@@ -279,6 +282,9 @@ class Debug {
static Address step_in_fp() { return thread_local_.step_into_fp_; }
static Address* step_in_fp_addr() { return &thread_local_.step_into_fp_; }
+ static bool StepOutActive() { return thread_local_.step_out_fp_ != 0; }
+ static Address step_out_fp() { return thread_local_.step_out_fp_; }
+
static EnterDebugger* debugger_entry() {
return thread_local_.debugger_entry_;
}
@@ -329,10 +335,8 @@ class Debug {
return &registers_[r];
}
- // Address of the debug break return entry code.
- static Code* debug_break_return_entry() { return debug_break_return_entry_; }
-
- // Support for getting the address of the debug break on return code.
+ // Access to the debug break on return code.
+ static Code* debug_break_return() { return debug_break_return_; }
static Code** debug_break_return_address() {
return &debug_break_return_;
}
@@ -379,7 +383,6 @@ class Debug {
static void GenerateKeyedStoreICDebugBreak(MacroAssembler* masm);
static void GenerateConstructCallDebugBreak(MacroAssembler* masm);
static void GenerateReturnDebugBreak(MacroAssembler* masm);
- static void GenerateReturnDebugBreakEntry(MacroAssembler* masm);
static void GenerateStubNoRegistersDebugBreak(MacroAssembler* masm);
// Called from stub-cache.cc.
@@ -390,6 +393,8 @@ class Debug {
static void ClearOneShot();
static void ActivateStepIn(StackFrame* frame);
static void ClearStepIn();
+ static void ActivateStepOut(StackFrame* frame);
+ static void ClearStepOut();
static void ClearStepNext();
// Returns whether the compile succeeded.
static bool EnsureCompiled(Handle<SharedFunctionInfo> shared);
@@ -442,6 +447,10 @@ class Debug {
// Frame pointer for frame from which step in was performed.
Address step_into_fp_;
+ // Frame pointer for the frame where debugger should be called when current
+ // step out action is completed.
+ Address step_out_fp_;
+
// Storage location for jump when exiting debug break calls.
Address after_break_target_;
@@ -457,9 +466,6 @@ class Debug {
static ThreadLocal thread_local_;
static void ThreadInit();
- // Code object for debug break return entry code.
- static Code* debug_break_return_entry_;
-
// Code to call for handling debug break on return.
static Code* debug_break_return_;
diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc
index c29815e550..949dd80c3b 100644
--- a/deps/v8/src/heap.cc
+++ b/deps/v8/src/heap.cc
@@ -77,11 +77,11 @@ int Heap::semispace_size_ = 512*KB;
int Heap::old_generation_size_ = 128*MB;
int Heap::initial_semispace_size_ = 128*KB;
#elif defined(V8_TARGET_ARCH_X64)
-int Heap::semispace_size_ = 8*MB;
+int Heap::semispace_size_ = 16*MB;
int Heap::old_generation_size_ = 1*GB;
int Heap::initial_semispace_size_ = 1*MB;
#else
-int Heap::semispace_size_ = 4*MB;
+int Heap::semispace_size_ = 8*MB;
int Heap::old_generation_size_ = 512*MB;
int Heap::initial_semispace_size_ = 512*KB;
#endif
@@ -1319,7 +1319,7 @@ bool Heap::CreateApiObjects() {
void Heap::CreateCEntryStub() {
- CEntryStub stub;
+ CEntryStub stub(1);
set_c_entry_code(*stub.GetCode());
}
@@ -2795,7 +2795,9 @@ STRUCT_LIST(MAKE_CASE)
bool Heap::IdleNotification() {
- static const int kIdlesBeforeCollection = 7;
+ static const int kIdlesBeforeScavenge = 4;
+ static const int kIdlesBeforeMarkSweep = 7;
+ static const int kIdlesBeforeMarkCompact = 8;
static int number_idle_notifications = 0;
static int last_gc_count = gc_count_;
@@ -2808,19 +2810,22 @@ bool Heap::IdleNotification() {
last_gc_count = gc_count_;
}
- if (number_idle_notifications >= kIdlesBeforeCollection) {
- // The first time through we collect without forcing compaction.
- // The second time through we force compaction and quit.
- bool force_compaction =
- number_idle_notifications > kIdlesBeforeCollection;
- CollectAllGarbage(force_compaction);
+ if (number_idle_notifications == kIdlesBeforeScavenge) {
+ CollectGarbage(0, NEW_SPACE);
+ new_space_.Shrink();
last_gc_count = gc_count_;
- if (force_compaction) {
- // Shrink new space.
- new_space_.Shrink();
- number_idle_notifications = 0;
- finished = true;
- }
+
+ } else if (number_idle_notifications == kIdlesBeforeMarkSweep) {
+ CollectAllGarbage(false);
+ new_space_.Shrink();
+ last_gc_count = gc_count_;
+
+ } else if (number_idle_notifications == kIdlesBeforeMarkCompact) {
+ CollectAllGarbage(true);
+ new_space_.Shrink();
+ last_gc_count = gc_count_;
+ number_idle_notifications = 0;
+ finished = true;
}
// Uncommit unused memory in new space.
@@ -3185,63 +3190,49 @@ bool Heap::Setup(bool create_heap_objects) {
if (!ConfigureHeapDefault()) return false;
}
- // Setup memory allocator and allocate an initial chunk of memory. The
- // initial chunk is double the size of the new space to ensure that we can
- // find a pair of semispaces that are contiguous and aligned to their size.
+ // Setup memory allocator and reserve a chunk of memory for new
+ // space. The chunk is double the size of the new space to ensure
+ // that we can find a pair of semispaces that are contiguous and
+ // aligned to their size.
if (!MemoryAllocator::Setup(MaxCapacity())) return false;
- void* chunk
- = MemoryAllocator::ReserveInitialChunk(2 * young_generation_size_);
+ void* chunk =
+ MemoryAllocator::ReserveInitialChunk(2 * young_generation_size_);
if (chunk == NULL) return false;
- // Put the initial chunk of the old space at the start of the initial
- // chunk, then the two new space semispaces, then the initial chunk of
- // code space. Align the pair of semispaces to their size, which must be
- // a power of 2.
+ // Align the pair of semispaces to their size, which must be a power
+ // of 2.
ASSERT(IsPowerOf2(young_generation_size_));
- Address code_space_start = reinterpret_cast<Address>(chunk);
- Address new_space_start = RoundUp(code_space_start, young_generation_size_);
- Address old_space_start = new_space_start + young_generation_size_;
- int code_space_size = new_space_start - code_space_start;
- int old_space_size = young_generation_size_ - code_space_size;
-
- // Initialize new space.
+ Address new_space_start =
+ RoundUp(reinterpret_cast<byte*>(chunk), young_generation_size_);
if (!new_space_.Setup(new_space_start, young_generation_size_)) return false;
- // Initialize old space, set the maximum capacity to the old generation
- // size. It will not contain code.
+ // Initialize old pointer space.
old_pointer_space_ =
new OldSpace(old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
if (old_pointer_space_ == NULL) return false;
- if (!old_pointer_space_->Setup(old_space_start, old_space_size >> 1)) {
- return false;
- }
+ if (!old_pointer_space_->Setup(NULL, 0)) return false;
+
+ // Initialize old data space.
old_data_space_ =
new OldSpace(old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
if (old_data_space_ == NULL) return false;
- if (!old_data_space_->Setup(old_space_start + (old_space_size >> 1),
- old_space_size >> 1)) {
- return false;
- }
+ if (!old_data_space_->Setup(NULL, 0)) return false;
// Initialize the code space, set its maximum capacity to the old
// generation size. It needs executable memory.
code_space_ =
new OldSpace(old_generation_size_, CODE_SPACE, EXECUTABLE);
if (code_space_ == NULL) return false;
- if (!code_space_->Setup(code_space_start, code_space_size)) return false;
+ if (!code_space_->Setup(NULL, 0)) return false;
// Initialize map space.
map_space_ = new MapSpace(kMaxMapSpaceSize, MAP_SPACE);
if (map_space_ == NULL) return false;
- // Setting up a paged space without giving it a virtual memory range big
- // enough to hold at least a page will cause it to allocate.
if (!map_space_->Setup(NULL, 0)) return false;
// Initialize global property cell space.
cell_space_ = new CellSpace(old_generation_size_, CELL_SPACE);
if (cell_space_ == NULL) return false;
- // Setting up a paged space without giving it a virtual memory range big
- // enough to hold at least a page will cause it to allocate.
if (!cell_space_->Setup(NULL, 0)) return false;
// The large object code space may contain code or data. We set the memory
@@ -3563,7 +3554,7 @@ namespace {
class JSConstructorProfile BASE_EMBEDDED {
public:
JSConstructorProfile() : zscope_(DELETE_ON_EXIT) {}
- void CollectStats(JSObject* obj);
+ void CollectStats(HeapObject* obj);
void PrintStats();
// Used by ZoneSplayTree::ForEach.
void Call(String* name, const NumberAndSizeInfo& number_and_size);
@@ -3608,33 +3599,36 @@ int JSConstructorProfile::CalculateJSObjectNetworkSize(JSObject* obj) {
void JSConstructorProfile::Call(String* name,
const NumberAndSizeInfo& number_and_size) {
- SmartPointer<char> s_name;
- if (name != NULL) {
- s_name = name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
- }
+ ASSERT(name != NULL);
+ SmartPointer<char> s_name(
+ name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
LOG(HeapSampleJSConstructorEvent(*s_name,
number_and_size.number(),
number_and_size.bytes()));
}
-void JSConstructorProfile::CollectStats(JSObject* obj) {
- String* constructor_func = NULL;
- if (obj->map()->constructor()->IsJSFunction()) {
- JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
- SharedFunctionInfo* sfi = constructor->shared();
- String* name = String::cast(sfi->name());
- constructor_func = name->length() > 0 ? name : sfi->inferred_name();
- } else if (obj->IsJSFunction()) {
- constructor_func = Heap::function_class_symbol();
+void JSConstructorProfile::CollectStats(HeapObject* obj) {
+ String* constructor = NULL;
+ int size;
+ if (obj->IsString()) {
+ constructor = Heap::String_symbol();
+ size = obj->Size();
+ } else if (obj->IsJSObject()) {
+ JSObject* js_obj = JSObject::cast(obj);
+ constructor = js_obj->constructor_name();
+ size = CalculateJSObjectNetworkSize(js_obj);
+ } else {
+ return;
}
+
JSObjectsInfoTree::Locator loc;
- if (!js_objects_info_tree_.Find(constructor_func, &loc)) {
- js_objects_info_tree_.Insert(constructor_func, &loc);
+ if (!js_objects_info_tree_.Find(constructor, &loc)) {
+ js_objects_info_tree_.Insert(constructor, &loc);
}
NumberAndSizeInfo number_and_size = loc.value();
number_and_size.increment_number(1);
- number_and_size.increment_bytes(CalculateJSObjectNetworkSize(obj));
+ number_and_size.increment_bytes(size);
loc.set_value(number_and_size);
}
@@ -3676,9 +3670,7 @@ void HeapProfiler::WriteSample() {
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
CollectStats(obj, info);
- if (obj->IsJSObject()) {
- js_cons_profile.CollectStats(JSObject::cast(obj));
- }
+ js_cons_profile.CollectStats(obj);
}
// Lump all the string types together.
diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc
index 55dc92dd92..7793e49265 100644
--- a/deps/v8/src/ia32/builtins-ia32.cc
+++ b/deps/v8/src/ia32/builtins-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2006-2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -129,11 +129,12 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// eax: initial map
__ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
__ shl(edi, kPointerSizeLog2);
- // Make sure that the maximum heap object size will never cause us
- // problem here, because it is always greater than the maximum
- // instance size that can be represented in a byte.
- ASSERT(Heap::MaxObjectSizeInPagedSpace() >= JSObject::kMaxInstanceSize);
- __ AllocateObjectInNewSpace(edi, ebx, edi, no_reg, &rt_call, false);
+ __ AllocateObjectInNewSpace(edi,
+ ebx,
+ edi,
+ no_reg,
+ &rt_call,
+ NO_ALLOCATION_FLAGS);
// Allocated the JSObject, now initialize the fields.
// eax: initial map
// ebx: JSObject
@@ -188,8 +189,6 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// ebx: JSObject
// edi: start of next object (will be start of FixedArray)
// edx: number of elements in properties array
- ASSERT(Heap::MaxObjectSizeInPagedSpace() >
- (FixedArray::kHeaderSize + 255*kPointerSize));
__ AllocateObjectInNewSpace(FixedArray::kHeaderSize,
times_pointer_size,
edx,
@@ -197,7 +196,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
ecx,
no_reg,
&undo_allocation,
- true);
+ RESULT_CONTAINS_TOP);
// Initialize the FixedArray.
// ebx: JSObject
@@ -245,10 +244,10 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
}
// Allocate the new receiver object using the runtime call.
- // edi: function (constructor)
__ bind(&rt_call);
// Must restore edi (constructor) before calling runtime.
__ mov(edi, Operand(esp, 0));
+ // edi: function (constructor)
__ push(edi);
__ CallRuntime(Runtime::kNewObject, 1);
__ mov(ebx, Operand(eax)); // store result in ebx
diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc
index a9face1d70..400a3607b7 100644
--- a/deps/v8/src/ia32/codegen-ia32.cc
+++ b/deps/v8/src/ia32/codegen-ia32.cc
@@ -6886,7 +6886,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
__ j(above_equal, &string1);
// First and second argument are strings.
- __ TailCallRuntime(ExternalReference(Runtime::kStringAdd), 2);
+ __ TailCallRuntime(ExternalReference(Runtime::kStringAdd), 2, 1);
// Only first argument is a string.
__ bind(&string1);
@@ -6954,12 +6954,11 @@ void FloatingPointHelper::AllocateHeapNumber(MacroAssembler* masm,
scratch1,
scratch2,
need_gc,
- false);
+ TAG_OBJECT);
- // Set the map and tag the result.
- __ mov(Operand(result, HeapObject::kMapOffset),
+ // Set the map.
+ __ mov(FieldOperand(result, HeapObject::kMapOffset),
Immediate(Factory::heap_number_map()));
- __ or_(Operand(result), Immediate(kHeapObjectTag));
}
@@ -7176,7 +7175,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
__ pop(ebx); // Return address.
__ push(edx);
__ push(ebx);
- __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
+ __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1, 1);
}
@@ -7201,7 +7200,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
- __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3);
+ __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3, 1);
}
@@ -7437,7 +7436,7 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
__ push(eax);
// Do tail-call to runtime routine.
- __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1);
+ __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1, 1);
}
@@ -7469,6 +7468,13 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
}
+int CEntryStub::MinorKey() {
+ ASSERT(result_size_ <= 2);
+ // Result returned in eax, or eax+edx if result_size_ is 2.
+ return 0;
+}
+
+
void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
// eax holds the exception.
diff --git a/deps/v8/src/ia32/debug-ia32.cc b/deps/v8/src/ia32/debug-ia32.cc
index 4ef0862af1..7e0dfd1489 100644
--- a/deps/v8/src/ia32/debug-ia32.cc
+++ b/deps/v8/src/ia32/debug-ia32.cc
@@ -36,9 +36,7 @@ namespace internal {
#ifdef ENABLE_DEBUGGER_SUPPORT
-// A debug break in the frame exit code is identified by a call instruction.
bool BreakLocationIterator::IsDebugBreakAtReturn() {
- // Opcode E8 is call.
return Debug::IsDebugBreakAtReturn(rinfo());
}
@@ -49,7 +47,7 @@ bool BreakLocationIterator::IsDebugBreakAtReturn() {
void BreakLocationIterator::SetDebugBreakAtReturn() {
ASSERT(Debug::kIa32JSReturnSequenceLength >=
Debug::kIa32CallInstructionLength);
- rinfo()->PatchCodeWithCall(Debug::debug_break_return_entry()->entry(),
+ rinfo()->PatchCodeWithCall(Debug::debug_break_return()->entry(),
Debug::kIa32JSReturnSequenceLength - Debug::kIa32CallInstructionLength);
}
@@ -61,11 +59,11 @@ void BreakLocationIterator::ClearDebugBreakAtReturn() {
}
-// Check whether the JS frame exit code has been patched with a debug break.
+// A debug break in the frame exit code is identified by the JS frame exit code
+// having been patched with a call instruction.
bool Debug::IsDebugBreakAtReturn(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()));
- // Opcode E8 is call.
- return (*(rinfo->pc()) == 0xE8);
+ return rinfo->IsCallInstruction();
}
@@ -194,17 +192,6 @@ void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateReturnDebugBreakEntry(MacroAssembler* masm) {
- // OK to clobber ebx as we are returning from a JS function through the code
- // generated by CodeGenerator::GenerateReturnSequence()
- ExternalReference debug_break_return =
- ExternalReference(Debug_Address::DebugBreakReturn());
- __ mov(ebx, Operand::StaticVariable(debug_break_return));
- __ add(Operand(ebx), Immediate(Code::kHeaderSize - kHeapObjectTag));
- __ jmp(Operand(ebx));
-}
-
-
void Debug::GenerateStubNoRegistersDebugBreak(MacroAssembler* masm) {
// Register state for stub CallFunction (from CallFunctionStub in ic-ia32.cc).
// ----------- S t a t e -------------
diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc
index e39808b2ee..9a2753d495 100644
--- a/deps/v8/src/ia32/ic-ia32.cc
+++ b/deps/v8/src/ia32/ic-ia32.cc
@@ -404,7 +404,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ push(eax);
__ push(ecx);
// Do tail-call to runtime routine.
- __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
+ __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3, 1);
// Check whether the elements is a pixel array.
// eax: value
@@ -667,7 +667,7 @@ void CallIC::Generate(MacroAssembler* masm,
__ push(ebx);
// Call the entry.
- CEntryStub stub;
+ CEntryStub stub(1);
__ mov(eax, Immediate(2));
__ mov(ebx, Immediate(f));
__ CallStub(&stub);
@@ -799,7 +799,7 @@ void LoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
__ push(ebx); // return address
// Perform tail call to the entry.
- __ TailCallRuntime(f, 2);
+ __ TailCallRuntime(f, 2, 1);
}
@@ -927,7 +927,7 @@ void KeyedLoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
__ push(ebx); // return address
// Perform tail call to the entry.
- __ TailCallRuntime(f, 2);
+ __ TailCallRuntime(f, 2, 1);
}
@@ -967,7 +967,7 @@ void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
// Perform tail call to the entry.
__ TailCallRuntime(
- ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
+ ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3, 1);
}
@@ -987,7 +987,7 @@ void StoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
__ push(ebx);
// Perform tail call to the entry.
- __ TailCallRuntime(f, 3);
+ __ TailCallRuntime(f, 3, 1);
}
@@ -1010,7 +1010,7 @@ void KeyedStoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
__ push(ecx);
// Do tail-call to runtime routine.
- __ TailCallRuntime(f, 3);
+ __ TailCallRuntime(f, 3, 1);
}
@@ -1032,7 +1032,7 @@ void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
// Do tail-call to runtime routine.
__ TailCallRuntime(
- ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
+ ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3, 1);
}
#undef __
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc
index 754b74abef..79b308944f 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.cc
+++ b/deps/v8/src/ia32/macro-assembler-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2006-2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -620,18 +620,22 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
-void MacroAssembler::LoadAllocationTopHelper(
- Register result,
- Register result_end,
- Register scratch,
- bool result_contains_top_on_entry) {
+void MacroAssembler::LoadAllocationTopHelper(Register result,
+ Register result_end,
+ Register scratch,
+ AllocationFlags flags) {
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address();
// Just return if allocation top is already known.
- if (result_contains_top_on_entry) {
+ if ((flags & RESULT_CONTAINS_TOP) != 0) {
// No use of scratch if allocation top is provided.
ASSERT(scratch.is(no_reg));
+#ifdef DEBUG
+ // Assert that result actually contains top on entry.
+ cmp(result, Operand::StaticVariable(new_space_allocation_top));
+ Check(equal, "Unexpected allocation top");
+#endif
return;
}
@@ -659,20 +663,17 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
}
}
-void MacroAssembler::AllocateObjectInNewSpace(
- int object_size,
- Register result,
- Register result_end,
- Register scratch,
- Label* gc_required,
- bool result_contains_top_on_entry) {
+
+void MacroAssembler::AllocateObjectInNewSpace(int object_size,
+ Register result,
+ Register result_end,
+ Register scratch,
+ Label* gc_required,
+ AllocationFlags flags) {
ASSERT(!result.is(result_end));
// Load address of new object into result.
- LoadAllocationTopHelper(result,
- result_end,
- scratch,
- result_contains_top_on_entry);
+ LoadAllocationTopHelper(result, result_end, scratch, flags);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
@@ -683,25 +684,26 @@ void MacroAssembler::AllocateObjectInNewSpace(
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
+
+ // Tag result if requested.
+ if ((flags & TAG_OBJECT) != 0) {
+ or_(Operand(result), Immediate(kHeapObjectTag));
+ }
}
-void MacroAssembler::AllocateObjectInNewSpace(
- int header_size,
- ScaleFactor element_size,
- Register element_count,
- Register result,
- Register result_end,
- Register scratch,
- Label* gc_required,
- bool result_contains_top_on_entry) {
+void MacroAssembler::AllocateObjectInNewSpace(int header_size,
+ ScaleFactor element_size,
+ Register element_count,
+ Register result,
+ Register result_end,
+ Register scratch,
+ Label* gc_required,
+ AllocationFlags flags) {
ASSERT(!result.is(result_end));
// Load address of new object into result.
- LoadAllocationTopHelper(result,
- result_end,
- scratch,
- result_contains_top_on_entry);
+ LoadAllocationTopHelper(result, result_end, scratch, flags);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
@@ -712,24 +714,24 @@ void MacroAssembler::AllocateObjectInNewSpace(
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
+
+ // Tag result if requested.
+ if ((flags & TAG_OBJECT) != 0) {
+ or_(Operand(result), Immediate(kHeapObjectTag));
+ }
}
-void MacroAssembler::AllocateObjectInNewSpace(
- Register object_size,
- Register result,
- Register result_end,
- Register scratch,
- Label* gc_required,
- bool result_contains_top_on_entry) {
+void MacroAssembler::AllocateObjectInNewSpace(Register object_size,
+ Register result,
+ Register result_end,
+ Register scratch,
+ Label* gc_required,
+ AllocationFlags flags) {
ASSERT(!result.is(result_end));
// Load address of new object into result.
- LoadAllocationTopHelper(result,
- result_end,
- scratch,
- result_contains_top_on_entry);
-
+ LoadAllocationTopHelper(result, result_end, scratch, flags);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
@@ -743,6 +745,11 @@ void MacroAssembler::AllocateObjectInNewSpace(
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
+
+ // Tag result if requested.
+ if ((flags & TAG_OBJECT) != 0) {
+ or_(Operand(result), Immediate(kHeapObjectTag));
+ }
}
@@ -889,7 +896,8 @@ void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
- int num_arguments) {
+ int num_arguments,
+ int result_size) {
// TODO(1236192): Most runtime routines don't need the number of
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
@@ -902,7 +910,7 @@ void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
// Set the entry point and jump to the C entry runtime stub.
mov(ebx, Immediate(ext));
- CEntryStub ces;
+ CEntryStub ces(1);
jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
}
@@ -1162,8 +1170,9 @@ void MacroAssembler::Abort(const char* msg) {
}
+#ifdef ENABLE_DEBUGGER_SUPPORT
CodePatcher::CodePatcher(byte* address, int size)
- : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
+ : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to patch.
// The size is adjusted with kGap on order for the assembler to generate size
// bytes of instructions without failing with buffer size constraints.
@@ -1179,6 +1188,7 @@ CodePatcher::~CodePatcher() {
ASSERT(masm_.pc_ == address_ + size_);
ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
}
+#endif // ENABLE_DEBUGGER_SUPPORT
} } // namespace v8::internal
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h
index f10ec16aa2..fa61183e58 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.h
+++ b/deps/v8/src/ia32/macro-assembler-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2006-2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -37,25 +37,6 @@ namespace internal {
class JumpTarget;
-// Helper types to make flags easier to read at call sites.
-enum InvokeFlag {
- CALL_FUNCTION,
- JUMP_FUNCTION
-};
-
-enum CodeLocation {
- IN_JAVASCRIPT,
- IN_JS_ENTRY,
- IN_C_ENTRY
-};
-
-enum HandlerType {
- TRY_CATCH_HANDLER,
- TRY_FINALLY_HANDLER,
- JS_ENTRY_HANDLER
-};
-
-
// MacroAssembler implements a collection of frequently used macros.
class MacroAssembler: public Assembler {
public:
@@ -201,7 +182,7 @@ class MacroAssembler: public Assembler {
Register result_end,
Register scratch,
Label* gc_required,
- bool result_contains_top_on_entry);
+ AllocationFlags flags);
void AllocateObjectInNewSpace(int header_size,
ScaleFactor element_size,
@@ -210,14 +191,14 @@ class MacroAssembler: public Assembler {
Register result_end,
Register scratch,
Label* gc_required,
- bool result_contains_top_on_entry);
+ AllocationFlags flags);
void AllocateObjectInNewSpace(Register object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
- bool result_contains_top_on_entry);
+ AllocationFlags flags);
// Undo allocation in new space. The object passed and objects allocated after
// it will no longer be allocated. Make sure that no pointers are left to the
@@ -275,7 +256,9 @@ class MacroAssembler: public Assembler {
// Tail call of a runtime routine (jump).
// Like JumpToBuiltin, but also takes care of passing the number
// of arguments.
- void TailCallRuntime(const ExternalReference& ext, int num_arguments);
+ void TailCallRuntime(const ExternalReference& ext,
+ int num_arguments,
+ int result_size);
// Jump to the builtin routine.
void JumpToBuiltin(const ExternalReference& ext);
@@ -350,11 +333,12 @@ class MacroAssembler: public Assembler {
void LoadAllocationTopHelper(Register result,
Register result_end,
Register scratch,
- bool result_contains_top_on_entry);
+ AllocationFlags flags);
void UpdateAllocationTopHelper(Register result_end, Register scratch);
};
+#ifdef ENABLE_DEBUGGER_SUPPORT
// The code patcher is used to patch (typically) small parts of code e.g. for
// debugging and other types of instrumentation. When using the code patcher
// the exact number of bytes specified must be emitted. Is not legal to emit
@@ -373,6 +357,7 @@ class CodePatcher {
int size_; // Number of bytes of the expected patch size.
MacroAssembler masm_; // Macro assembler used to generate the code.
};
+#endif // ENABLE_DEBUGGER_SUPPORT
// -----------------------------------------------------------------------------
diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc
index 049c57e4f1..58a3ce5209 100644
--- a/deps/v8/src/ia32/stub-cache-ia32.cc
+++ b/deps/v8/src/ia32/stub-cache-ia32.cc
@@ -302,7 +302,7 @@ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
__ mov(eax, Immediate(5));
__ mov(ebx, Immediate(ref));
- CEntryStub stub;
+ CEntryStub stub(1);
__ CallStub(&stub);
}
@@ -467,7 +467,7 @@ class LoadInterceptorCompiler BASE_EMBEDDED {
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
- __ TailCallRuntime(ref, 5);
+ __ TailCallRuntime(ref, 5, 1);
__ bind(&cleanup);
__ pop(scratch1);
@@ -489,7 +489,7 @@ class LoadInterceptorCompiler BASE_EMBEDDED {
ExternalReference ref = ExternalReference(
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
- __ TailCallRuntime(ref, 5);
+ __ TailCallRuntime(ref, 5, 1);
}
private:
@@ -593,7 +593,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
__ mov(eax, Immediate(5));
__ mov(ebx, Immediate(ref));
- CEntryStub stub;
+ CEntryStub stub(1);
__ CallStub(&stub);
__ LeaveInternalFrame();
@@ -789,7 +789,7 @@ void StubCompiler::GenerateLoadCallback(JSObject* object,
// Do tail-call to the runtime system.
ExternalReference load_callback_property =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
- __ TailCallRuntime(load_callback_property, 5);
+ __ TailCallRuntime(load_callback_property, 5, 1);
}
@@ -1237,7 +1237,7 @@ Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
// Do tail-call to the runtime system.
ExternalReference store_callback_property =
ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
- __ TailCallRuntime(store_callback_property, 4);
+ __ TailCallRuntime(store_callback_property, 4, 1);
// Handle store cache miss.
__ bind(&miss);
@@ -1290,7 +1290,7 @@ Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
- __ TailCallRuntime(store_ic_property, 3);
+ __ TailCallRuntime(store_ic_property, 3, 1);
// Handle store cache miss.
__ bind(&miss);
@@ -1783,10 +1783,12 @@ Object* ConstructStubCompiler::CompileConstructStub(
// ebx: initial map
__ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
__ shl(ecx, kPointerSizeLog2);
- // Make sure that the maximum heap object size will never cause us
- // problems here.
- ASSERT(Heap::MaxObjectSizeInPagedSpace() >= JSObject::kMaxInstanceSize);
- __ AllocateObjectInNewSpace(ecx, edx, ecx, no_reg, &generic_stub_call, false);
+ __ AllocateObjectInNewSpace(ecx,
+ edx,
+ ecx,
+ no_reg,
+ &generic_stub_call,
+ NO_ALLOCATION_FLAGS);
// Allocated the JSObject, now initialize the fields and add the heap tag.
// ebx: initial map
diff --git a/deps/v8/src/list.h b/deps/v8/src/list.h
index b6c06d8461..dd7ea1c9ca 100644
--- a/deps/v8/src/list.h
+++ b/deps/v8/src/list.h
@@ -62,9 +62,8 @@ class List {
return data_[i];
}
inline T& at(int i) const { return operator[](i); }
- inline T& last() const {
- return at(length_ - 1);
- }
+ inline T& last() const { return at(length_ - 1); }
+ inline T& first() const { return at(0); }
INLINE(bool is_empty() const) { return length_ == 0; }
INLINE(int length() const) { return length_; }
diff --git a/deps/v8/src/log.cc b/deps/v8/src/log.cc
index 56808202ad..6bbefbceb1 100644
--- a/deps/v8/src/log.cc
+++ b/deps/v8/src/log.cc
@@ -890,9 +890,7 @@ void Logger::HeapSampleJSConstructorEvent(const char* constructor,
if (!Log::IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg;
msg.Append("heap-js-cons-item,%s,%d,%d\n",
- constructor != NULL ?
- (constructor[0] != '\0' ? constructor : "(anonymous)") :
- "(no_constructor)",
+ constructor[0] != '\0' ? constructor : "(anonymous)",
number, bytes);
msg.WriteToLogFile();
#endif
diff --git a/deps/v8/src/macro-assembler.h b/deps/v8/src/macro-assembler.h
index 983802e6a7..5631decba9 100644
--- a/deps/v8/src/macro-assembler.h
+++ b/deps/v8/src/macro-assembler.h
@@ -28,6 +28,40 @@
#ifndef V8_MACRO_ASSEMBLER_H_
#define V8_MACRO_ASSEMBLER_H_
+
+// Helper types to make boolean flag easier to read at call-site.
+enum InvokeFlag {
+ CALL_FUNCTION,
+ JUMP_FUNCTION
+};
+
+
+enum CodeLocation {
+ IN_JAVASCRIPT,
+ IN_JS_ENTRY,
+ IN_C_ENTRY
+};
+
+
+enum HandlerType {
+ TRY_CATCH_HANDLER,
+ TRY_FINALLY_HANDLER,
+ JS_ENTRY_HANDLER
+};
+
+
+// Flags used for the AllocateObjectInNewSpace functions.
+enum AllocationFlags {
+ // No special flags.
+ NO_ALLOCATION_FLAGS = 0,
+ // Return the pointer to the allocated already tagged as a heap object.
+ TAG_OBJECT = 1 << 0,
+ // The content of the result register already contains the allocation top in
+ // new space.
+ RESULT_CONTAINS_TOP = 1 << 1
+};
+
+
#if V8_TARGET_ARCH_IA32
#include "assembler.h"
#include "ia32/assembler-ia32.h"
diff --git a/deps/v8/src/mark-compact.cc b/deps/v8/src/mark-compact.cc
index d139093a17..e682fe29f9 100644
--- a/deps/v8/src/mark-compact.cc
+++ b/deps/v8/src/mark-compact.cc
@@ -41,6 +41,7 @@ namespace internal {
bool MarkCompactCollector::force_compaction_ = false;
bool MarkCompactCollector::compacting_collection_ = false;
+bool MarkCompactCollector::compact_on_next_gc_ = false;
int MarkCompactCollector::previous_marked_count_ = 0;
GCTracer* MarkCompactCollector::tracer_ = NULL;
@@ -104,35 +105,15 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
// variable.
tracer_ = tracer;
- static const int kFragmentationLimit = 50; // Percent.
#ifdef DEBUG
ASSERT(state_ == IDLE);
state_ = PREPARE_GC;
#endif
ASSERT(!FLAG_always_compact || !FLAG_never_compact);
- compacting_collection_ = FLAG_always_compact || force_compaction_;
-
- // We compact the old generation if it gets too fragmented (ie, we could
- // recover an expected amount of space by reclaiming the waste and free
- // list blocks). We always compact when the flag --gc-global is true
- // because objects do not get promoted out of new space on non-compacting
- // GCs.
- if (!compacting_collection_) {
- int old_gen_recoverable = 0;
- int old_gen_used = 0;
-
- OldSpaces spaces;
- while (OldSpace* space = spaces.next()) {
- old_gen_recoverable += space->Waste() + space->AvailableFree();
- old_gen_used += space->Size();
- }
- int old_gen_fragmentation =
- static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used);
- if (old_gen_fragmentation > kFragmentationLimit) {
- compacting_collection_ = true;
- }
- }
+ compacting_collection_ =
+ FLAG_always_compact || force_compaction_ || compact_on_next_gc_;
+ compact_on_next_gc_ = false;
if (FLAG_never_compact) compacting_collection_ = false;
if (FLAG_collect_maps) CreateBackPointers();
@@ -173,6 +154,31 @@ void MarkCompactCollector::Finish() {
// GC, because it relies on the new address of certain old space
// objects (empty string, illegal builtin).
StubCache::Clear();
+
+ // If we've just compacted old space there's no reason to check the
+ // fragmentation limit. Just return.
+ if (HasCompacted()) return;
+
+ // We compact the old generation on the next GC if it has gotten too
+ // fragmented (ie, we could recover an expected amount of space by
+ // reclaiming the waste and free list blocks).
+ static const int kFragmentationLimit = 15; // Percent.
+ static const int kFragmentationAllowed = 1 * MB; // Absolute.
+ int old_gen_recoverable = 0;
+ int old_gen_used = 0;
+
+ OldSpaces spaces;
+ while (OldSpace* space = spaces.next()) {
+ old_gen_recoverable += space->Waste() + space->AvailableFree();
+ old_gen_used += space->Size();
+ }
+
+ int old_gen_fragmentation =
+ static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used);
+ if (old_gen_fragmentation > kFragmentationLimit &&
+ old_gen_recoverable > kFragmentationAllowed) {
+ compact_on_next_gc_ = true;
+ }
}
diff --git a/deps/v8/src/mark-compact.h b/deps/v8/src/mark-compact.h
index 0bd212e0c1..2da2b1f757 100644
--- a/deps/v8/src/mark-compact.h
+++ b/deps/v8/src/mark-compact.h
@@ -130,6 +130,9 @@ class MarkCompactCollector: public AllStatic {
// Global flag indicating whether spaces were compacted on the last GC.
static bool compacting_collection_;
+ // Global flag indicating whether spaces will be compacted on the next GC.
+ static bool compact_on_next_gc_;
+
// The number of objects left marked at the end of the last completed full
// GC (expected to be zero).
static int previous_marked_count_;
diff --git a/deps/v8/src/mirror-delay.js b/deps/v8/src/mirror-delay.js
index 76ae75bf69..ee3dd647a6 100644
--- a/deps/v8/src/mirror-delay.js
+++ b/deps/v8/src/mirror-delay.js
@@ -201,7 +201,8 @@ PropertyAttribute.DontDelete = DONT_DELETE;
ScopeType = { Global: 0,
Local: 1,
With: 2,
- Closure: 3 };
+ Closure: 3,
+ Catch: 4 };
// Mirror hierarchy:
diff --git a/deps/v8/src/objects-debug.cc b/deps/v8/src/objects-debug.cc
index ef4aae5311..9fc9b1d4ef 100644
--- a/deps/v8/src/objects-debug.cc
+++ b/deps/v8/src/objects-debug.cc
@@ -769,11 +769,14 @@ void JSRegExp::JSRegExpVerify() {
FixedArray* arr = FixedArray::cast(data());
Object* ascii_data = arr->get(JSRegExp::kIrregexpASCIICodeIndex);
- ASSERT(ascii_data->IsTheHole()
- || (is_native ? ascii_data->IsCode() : ascii_data->IsByteArray()));
+ // TheHole : Not compiled yet.
+ // JSObject: Compilation error.
+ // Code/ByteArray: Compiled code.
+ ASSERT(ascii_data->IsTheHole() || ascii_data->IsJSObject() ||
+ (is_native ? ascii_data->IsCode() : ascii_data->IsByteArray()));
Object* uc16_data = arr->get(JSRegExp::kIrregexpUC16CodeIndex);
- ASSERT(uc16_data->IsTheHole()
- || (is_native ? uc16_data->IsCode() : uc16_data->IsByteArray()));
+ ASSERT(uc16_data->IsTheHole() || ascii_data->IsJSObject() ||
+ (is_native ? uc16_data->IsCode() : uc16_data->IsByteArray()));
ASSERT(arr->get(JSRegExp::kIrregexpCaptureCountIndex)->IsSmi());
ASSERT(arr->get(JSRegExp::kIrregexpMaxRegisterCountIndex)->IsSmi());
break;
diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc
index 9ea131fa7b..583af7c58a 100644
--- a/deps/v8/src/objects.cc
+++ b/deps/v8/src/objects.cc
@@ -1186,7 +1186,9 @@ void HeapNumber::HeapNumberPrint(StringStream* accumulator) {
String* JSObject::class_name() {
- if (IsJSFunction()) return Heap::function_class_symbol();
+ if (IsJSFunction()) {
+ return Heap::function_class_symbol();
+ }
if (map()->constructor()->IsJSFunction()) {
JSFunction* constructor = JSFunction::cast(map()->constructor());
return String::cast(constructor->shared()->instance_class_name());
@@ -1196,6 +1198,20 @@ String* JSObject::class_name() {
}
+String* JSObject::constructor_name() {
+ if (IsJSFunction()) {
+ return Heap::function_class_symbol();
+ }
+ if (map()->constructor()->IsJSFunction()) {
+ JSFunction* constructor = JSFunction::cast(map()->constructor());
+ String* name = String::cast(constructor->shared()->name());
+ return name->length() > 0 ? name : constructor->shared()->inferred_name();
+ }
+ // If the constructor is not present, return "Object".
+ return Heap::Object_symbol();
+}
+
+
void JSObject::JSObjectIterateBody(int object_size, ObjectVisitor* v) {
// Iterate over all fields in the body. Assumes all are Object*.
IteratePointers(v, kPropertiesOffset, object_size);
diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h
index 3f6f5fff52..d9edce7796 100644
--- a/deps/v8/src/objects.h
+++ b/deps/v8/src/objects.h
@@ -1392,6 +1392,10 @@ class JSObject: public HeapObject {
// Returns the class name ([[Class]] property in the specification).
String* class_name();
+ // Returns the constructor name (the name (possibly, inferred name) of the
+ // function that was used to instantiate the object).
+ String* constructor_name();
+
// Retrieve interceptors.
InterceptorInfo* GetNamedInterceptor();
InterceptorInfo* GetIndexedInterceptor();
@@ -2634,8 +2638,8 @@ class Code: public HeapObject {
// the layout of the code object into account.
int ExecutableSize() {
// Check that the assumptions about the layout of the code object holds.
- ASSERT_EQ(instruction_start() - address(),
- static_cast<intptr_t>(Code::kHeaderSize));
+ ASSERT_EQ(static_cast<int>(instruction_start() - address()),
+ Code::kHeaderSize);
return instruction_size() + Code::kHeaderSize;
}
@@ -2891,8 +2895,12 @@ class Map: public HeapObject {
// Byte offsets within kInstanceSizesOffset.
static const int kInstanceSizeOffset = kInstanceSizesOffset + 0;
- static const int kInObjectPropertiesOffset = kInstanceSizesOffset + 1;
- static const int kPreAllocatedPropertyFieldsOffset = kInstanceSizesOffset + 2;
+ static const int kInObjectPropertiesByte = 1;
+ static const int kInObjectPropertiesOffset =
+ kInstanceSizesOffset + kInObjectPropertiesByte;
+ static const int kPreAllocatedPropertyFieldsByte = 2;
+ static const int kPreAllocatedPropertyFieldsOffset =
+ kInstanceSizesOffset + kPreAllocatedPropertyFieldsByte;
// The byte at position 3 is not in use at the moment.
// Byte offsets within kInstanceAttributesOffset attributes.
@@ -3097,9 +3105,7 @@ class SharedFunctionInfo: public HeapObject {
inline bool is_expression();
inline void set_is_expression(bool value);
- // Is this function a top-level function. Used for accessing the
- // caller of functions. Top-level functions (scripts, evals) are
- // returned as null; see JSFunction::GetCallerAccessor(...).
+ // Is this function a top-level function (scripts, evals).
inline bool is_toplevel();
inline void set_is_toplevel(bool value);
@@ -3528,9 +3534,13 @@ class JSRegExp: public JSObject {
static const int kAtomDataSize = kAtomPatternIndex + 1;
- // Irregexp compiled code or bytecode for ASCII.
+ // Irregexp compiled code or bytecode for ASCII. If compilation
+ // fails, this fields hold an exception object that should be
+ // thrown if the regexp is used again.
static const int kIrregexpASCIICodeIndex = kDataIndex;
- // Irregexp compiled code or bytecode for UC16.
+ // Irregexp compiled code or bytecode for UC16. If compilation
+ // fails, this fields hold an exception object that should be
+ // thrown if the regexp is used again.
static const int kIrregexpUC16CodeIndex = kDataIndex + 1;
// Maximal number of registers used by either ASCII or UC16.
// Only used to check that there is enough stack space
diff --git a/deps/v8/src/platform-freebsd.cc b/deps/v8/src/platform-freebsd.cc
index 92d72f8524..44d283b36d 100644
--- a/deps/v8/src/platform-freebsd.cc
+++ b/deps/v8/src/platform-freebsd.cc
@@ -141,7 +141,9 @@ void* OS::Allocate(const size_t requested,
void OS::Free(void* buf, const size_t length) {
// TODO(1240712): munmap has a return value which is ignored here.
- munmap(buf, length);
+ int result = munmap(buf, length);
+ USE(result);
+ ASSERT(result == 0);
}
@@ -334,7 +336,7 @@ bool VirtualMemory::Commit(void* address, size_t size, bool executable) {
bool VirtualMemory::Uncommit(void* address, size_t size) {
return mmap(address, size, PROT_NONE,
- MAP_PRIVATE | MAP_ANON | MAP_NORESERVE,
+ MAP_PRIVATE | MAP_ANON | MAP_NORESERVE | MAP_FIXED,
kMmapFd, kMmapFdOffset) != MAP_FAILED;
}
diff --git a/deps/v8/src/platform-linux.cc b/deps/v8/src/platform-linux.cc
index 6ec5070f91..fe4c31f515 100644
--- a/deps/v8/src/platform-linux.cc
+++ b/deps/v8/src/platform-linux.cc
@@ -56,6 +56,8 @@
#include "v8.h"
#include "platform.h"
+#include "top.h"
+#include "v8threads.h"
namespace v8 {
@@ -145,7 +147,9 @@ void* OS::Allocate(const size_t requested,
void OS::Free(void* address, const size_t size) {
// TODO(1240712): munmap has a return value which is ignored here.
- munmap(address, size);
+ int result = munmap(address, size);
+ USE(result);
+ ASSERT(result == 0);
}
@@ -360,7 +364,7 @@ bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) {
bool VirtualMemory::Uncommit(void* address, size_t size) {
return mmap(address, size, PROT_NONE,
- MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE,
+ MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE | MAP_FIXED,
kMmapFd, kMmapFdOffset) != MAP_FAILED;
}
@@ -580,6 +584,7 @@ Semaphore* OS::CreateSemaphore(int count) {
#ifdef ENABLE_LOGGING_AND_PROFILING
static Sampler* active_sampler_ = NULL;
+static pthread_t vm_thread_ = 0;
#if !defined(__GLIBC__) && (defined(__arm__) || defined(__thumb__))
@@ -608,6 +613,30 @@ enum ArmRegisters {R15 = 15, R13 = 13, R11 = 11};
#endif
+// A function that determines if a signal handler is called in the context
+// of a VM thread.
+//
+// The problem is that SIGPROF signal can be delivered to an arbitrary thread
+// (see http://code.google.com/p/google-perftools/issues/detail?id=106#c2)
+// So, if the signal is being handled in the context of a non-VM thread,
+// it means that the VM thread is running, and trying to sample its stack can
+// cause a crash.
+static inline bool IsVmThread() {
+ // In the case of a single VM thread, this check is enough.
+ if (pthread_equal(pthread_self(), vm_thread_)) return true;
+ // If there are multiple threads that use VM, they must have a thread id
+ // stored in TLS. To verify that the thread is really executing VM,
+ // we check Top's data. Having that ThreadManager::RestoreThread first
+ // restores ThreadLocalTop from TLS, and only then erases the TLS value,
+ // reading Top::thread_id() should not be affected by races.
+ if (ThreadManager::HasId() && !ThreadManager::IsArchived() &&
+ ThreadManager::CurrentId() == Top::thread_id()) {
+ return true;
+ }
+ return false;
+}
+
+
static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
USE(info);
if (signal != SIGPROF) return;
@@ -640,7 +669,8 @@ static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
sample.fp = mcontext.arm_fp;
#endif
#endif
- active_sampler_->SampleStack(&sample);
+ if (IsVmThread())
+ active_sampler_->SampleStack(&sample);
}
// We always sample the VM state.
@@ -678,6 +708,8 @@ void Sampler::Start() {
// platforms.
if (active_sampler_ != NULL) return;
+ vm_thread_ = pthread_self();
+
// Request profiling signals.
struct sigaction sa;
sa.sa_sigaction = ProfilerSignalHandler;
@@ -713,6 +745,7 @@ void Sampler::Stop() {
active_ = false;
}
+
#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
diff --git a/deps/v8/src/platform-macos.cc b/deps/v8/src/platform-macos.cc
index c0810649fe..596b0fb040 100644
--- a/deps/v8/src/platform-macos.cc
+++ b/deps/v8/src/platform-macos.cc
@@ -141,7 +141,9 @@ void* OS::Allocate(const size_t requested,
void OS::Free(void* address, const size_t size) {
// TODO(1240712): munmap has a return value which is ignored here.
- munmap(address, size);
+ int result = munmap(address, size);
+ USE(result);
+ ASSERT(result == 0);
}
@@ -211,8 +213,17 @@ void OS::LogSharedLibraryAddresses() {
for (unsigned int i = 0; i < images_count; ++i) {
const mach_header* header = _dyld_get_image_header(i);
if (header == NULL) continue;
+#if V8_HOST_ARCH_X64
+ uint64_t size;
+ char* code_ptr = getsectdatafromheader_64(
+ reinterpret_cast<const mach_header_64*>(header),
+ SEG_TEXT,
+ SECT_TEXT,
+ &size);
+#else
unsigned int size;
char* code_ptr = getsectdatafromheader(header, SEG_TEXT, SECT_TEXT, &size);
+#endif
if (code_ptr == NULL) continue;
const uintptr_t slide = _dyld_get_image_vmaddr_slide(i);
const uintptr_t start = reinterpret_cast<uintptr_t>(code_ptr) + slide;
@@ -309,7 +320,7 @@ bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) {
bool VirtualMemory::Uncommit(void* address, size_t size) {
return mmap(address, size, PROT_NONE,
- MAP_PRIVATE | MAP_ANON | MAP_NORESERVE,
+ MAP_PRIVATE | MAP_ANON | MAP_NORESERVE | MAP_FIXED,
kMmapFd, kMmapFdOffset) != MAP_FAILED;
}
diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc
index f772d32d9d..95776e54af 100644
--- a/deps/v8/src/runtime.cc
+++ b/deps/v8/src/runtime.cc
@@ -4556,22 +4556,25 @@ static Object* Runtime_LookupContext(Arguments args) {
}
-// A mechanism to return pairs of Object*'s. This is somewhat
-// compiler-dependent as it assumes that a 64-bit value (a long long)
-// is returned via two registers (edx:eax on ia32). Both the ia32 and
-// arm platform support this; it is mostly an issue of "coaxing" the
-// compiler to do the right thing.
-//
-// TODO(1236026): This is a non-portable hack that should be removed.
+// A mechanism to return a pair of Object pointers in registers (if possible).
+// How this is achieved is calling convention-dependent.
+// All currently supported x86 compiles uses calling conventions that are cdecl
+// variants where a 64-bit value is returned in two 32-bit registers
+// (edx:eax on ia32, r1:r0 on ARM).
+// In AMD-64 calling convention a struct of two pointers is returned in rdx:rax.
+// In Win64 calling convention, a struct of two pointers is returned in memory,
+// allocated by the caller, and passed as a pointer in a hidden first parameter.
#ifdef V8_HOST_ARCH_64_BIT
-// Tested with GCC, not with MSVC.
struct ObjectPair {
Object* x;
Object* y;
};
+
static inline ObjectPair MakePair(Object* x, Object* y) {
ObjectPair result = {x, y};
- return result; // Pointers x and y returned in rax and rdx, in AMD-x64-abi.
+ // Pointers x and y returned in rax and rdx, in AMD-x64-abi.
+ // In Win64 they are assigned to a hidden first argument.
+ return result;
}
#else
typedef uint64_t ObjectPair;
@@ -4582,8 +4585,6 @@ static inline ObjectPair MakePair(Object* x, Object* y) {
#endif
-
-
static inline Object* Unhole(Object* x, PropertyAttributes attributes) {
ASSERT(!x->IsTheHole() || (attributes & READ_ONLY) != 0);
USE(attributes);
@@ -4612,7 +4613,7 @@ static JSObject* ComputeReceiverForNonGlobal(JSObject* holder) {
static ObjectPair LoadContextSlotHelper(Arguments args, bool throw_error) {
HandleScope scope;
- ASSERT(args.length() == 2);
+ ASSERT_EQ(2, args.length());
if (!args[0]->IsContext() || !args[1]->IsString()) {
return MakePair(Top::ThrowIllegalOperation(), NULL);
@@ -6341,7 +6342,12 @@ class ScopeIterator {
ScopeTypeGlobal = 0,
ScopeTypeLocal,
ScopeTypeWith,
- ScopeTypeClosure
+ ScopeTypeClosure,
+ // Every catch block contains an implicit with block (its parameter is
+ // a JSContextExtensionObject) that extends current scope with a variable
+ // holding exception object. Such with blocks are treated as scopes of their
+ // own type.
+ ScopeTypeCatch
};
explicit ScopeIterator(JavaScriptFrame* frame)
@@ -6417,7 +6423,14 @@ class ScopeIterator {
return ScopeTypeClosure;
}
ASSERT(context_->has_extension());
- ASSERT(!context_->extension()->IsJSContextExtensionObject());
+ // Current scope is either an explicit with statement or a with statement
+ // implicitely generated for a catch block.
+ // If the extension object here is a JSContextExtensionObject then
+ // current with statement is one frome a catch block otherwise it's a
+ // regular with statement.
+ if (context_->extension()->IsJSContextExtensionObject()) {
+ return ScopeTypeCatch;
+ }
return ScopeTypeWith;
}
@@ -6432,6 +6445,7 @@ class ScopeIterator {
return MaterializeLocalScope(frame_);
break;
case ScopeIterator::ScopeTypeWith:
+ case ScopeIterator::ScopeTypeCatch:
// Return the with object.
return Handle<JSObject>(CurrentContext()->extension());
break;
@@ -6488,6 +6502,14 @@ class ScopeIterator {
break;
}
+ case ScopeIterator::ScopeTypeCatch: {
+ PrintF("Catch:\n");
+ Handle<JSObject> extension =
+ Handle<JSObject>(CurrentContext()->extension());
+ extension->Print();
+ break;
+ }
+
case ScopeIterator::ScopeTypeClosure: {
PrintF("Closure:\n");
CurrentContext()->Print();
@@ -6799,8 +6821,20 @@ Object* Runtime::FindSharedFunctionInfoInScript(Handle<Script> script,
target_start_position = start_position;
target = shared;
} else {
- if (target_start_position < start_position &&
- shared->end_position() < target->end_position()) {
+ if (target_start_position == start_position &&
+ shared->end_position() == target->end_position()) {
+ // If a top-level function contain only one function
+ // declartion the source for the top-level and the function is
+ // the same. In that case prefer the non top-level function.
+ if (!shared->is_toplevel()) {
+ target_start_position = start_position;
+ target = shared;
+ }
+ } else if (target_start_position <= start_position &&
+ shared->end_position() <= target->end_position()) {
+ // This containment check includes equality as a function inside
+ // a top-level function can share either start or end position
+ // with the top-level function.
target_start_position = start_position;
target = shared;
}
@@ -6912,7 +6946,8 @@ static Object* Runtime_ChangeBreakOnException(Arguments args) {
// Prepare for stepping
// args[0]: break id for checking execution state
// args[1]: step action from the enumeration StepAction
-// args[2]: number of times to perform the step
+// args[2]: number of times to perform the step, for step out it is the number
+// of frames to step down.
static Object* Runtime_PrepareStep(Arguments args) {
HandleScope scope;
ASSERT(args.length() == 3);
@@ -6939,6 +6974,9 @@ static Object* Runtime_PrepareStep(Arguments args) {
return Top::Throw(Heap::illegal_argument_symbol());
}
+ // Clear all current stepping setup.
+ Debug::ClearStepping();
+
// Prepare step.
Debug::PrepareStep(static_cast<StepAction>(step_action), step_count);
return Heap::undefined_value();
@@ -7598,7 +7636,7 @@ static Object* Runtime_ListNatives(Arguments args) {
HandleScope scope;
Handle<JSArray> result = Factory::NewJSArray(0);
int index = 0;
-#define ADD_ENTRY(Name, argc) \
+#define ADD_ENTRY(Name, argc, ressize) \
{ \
HandleScope inner; \
Handle<String> name = \
@@ -7634,13 +7672,13 @@ static Object* Runtime_IS_VAR(Arguments args) {
// ----------------------------------------------------------------------------
// Implementation of Runtime
-#define F(name, nargs) \
+#define F(name, nargs, ressize) \
{ #name, "RuntimeStub_" #name, FUNCTION_ADDR(Runtime_##name), nargs, \
- static_cast<int>(Runtime::k##name) },
+ static_cast<int>(Runtime::k##name), ressize },
static Runtime::Function Runtime_functions[] = {
RUNTIME_FUNCTION_LIST(F)
- { NULL, NULL, NULL, 0, -1 }
+ { NULL, NULL, NULL, 0, -1, 0 }
};
#undef F
diff --git a/deps/v8/src/runtime.h b/deps/v8/src/runtime.h
index 1be677a116..ca38baf8ee 100644
--- a/deps/v8/src/runtime.h
+++ b/deps/v8/src/runtime.h
@@ -43,269 +43,269 @@ namespace internal {
// this problem. Please avoid large recursive macros whenever possible.
#define RUNTIME_FUNCTION_LIST_ALWAYS_1(F) \
/* Property access */ \
- F(GetProperty, 2) \
- F(KeyedGetProperty, 2) \
- F(DeleteProperty, 2) \
- F(HasLocalProperty, 2) \
- F(HasProperty, 2) \
- F(HasElement, 2) \
- F(IsPropertyEnumerable, 2) \
- F(GetPropertyNames, 1) \
- F(GetPropertyNamesFast, 1) \
- F(GetArgumentsProperty, 1) \
- F(ToFastProperties, 1) \
- F(ToSlowProperties, 1) \
- \
- F(IsInPrototypeChain, 2) \
- F(SetHiddenPrototype, 2) \
- \
- F(IsConstructCall, 0) \
+ F(GetProperty, 2, 1) \
+ F(KeyedGetProperty, 2, 1) \
+ F(DeleteProperty, 2, 1) \
+ F(HasLocalProperty, 2, 1) \
+ F(HasProperty, 2, 1) \
+ F(HasElement, 2, 1) \
+ F(IsPropertyEnumerable, 2, 1) \
+ F(GetPropertyNames, 1, 1) \
+ F(GetPropertyNamesFast, 1, 1) \
+ F(GetArgumentsProperty, 1, 1) \
+ F(ToFastProperties, 1, 1) \
+ F(ToSlowProperties, 1, 1) \
+ \
+ F(IsInPrototypeChain, 2, 1) \
+ F(SetHiddenPrototype, 2, 1) \
+ \
+ F(IsConstructCall, 0, 1) \
\
/* Utilities */ \
- F(GetCalledFunction, 0) \
- F(GetFunctionDelegate, 1) \
- F(GetConstructorDelegate, 1) \
- F(NewArguments, 1) \
- F(NewArgumentsFast, 3) \
- F(LazyCompile, 1) \
- F(SetNewFunctionAttributes, 1) \
+ F(GetCalledFunction, 0, 1) \
+ F(GetFunctionDelegate, 1, 1) \
+ F(GetConstructorDelegate, 1, 1) \
+ F(NewArguments, 1, 1) \
+ F(NewArgumentsFast, 3, 1) \
+ F(LazyCompile, 1, 1) \
+ F(SetNewFunctionAttributes, 1, 1) \
\
/* Array join support */ \
- F(PushIfAbsent, 2) \
- F(ArrayConcat, 1) \
+ F(PushIfAbsent, 2, 1) \
+ F(ArrayConcat, 1, 1) \
\
/* Conversions */ \
- F(ToBool, 1) \
- F(Typeof, 1) \
- \
- F(StringToNumber, 1) \
- F(StringFromCharCodeArray, 1) \
- F(StringParseInt, 2) \
- F(StringParseFloat, 1) \
- F(StringToLowerCase, 1) \
- F(StringToUpperCase, 1) \
- F(CharFromCode, 1) \
- F(URIEscape, 1) \
- F(URIUnescape, 1) \
- \
- F(NumberToString, 1) \
- F(NumberToInteger, 1) \
- F(NumberToJSUint32, 1) \
- F(NumberToJSInt32, 1) \
- F(NumberToSmi, 1) \
+ F(ToBool, 1, 1) \
+ F(Typeof, 1, 1) \
+ \
+ F(StringToNumber, 1, 1) \
+ F(StringFromCharCodeArray, 1, 1) \
+ F(StringParseInt, 2, 1) \
+ F(StringParseFloat, 1, 1) \
+ F(StringToLowerCase, 1, 1) \
+ F(StringToUpperCase, 1, 1) \
+ F(CharFromCode, 1, 1) \
+ F(URIEscape, 1, 1) \
+ F(URIUnescape, 1, 1) \
+ \
+ F(NumberToString, 1, 1) \
+ F(NumberToInteger, 1, 1) \
+ F(NumberToJSUint32, 1, 1) \
+ F(NumberToJSInt32, 1, 1) \
+ F(NumberToSmi, 1, 1) \
\
/* Arithmetic operations */ \
- F(NumberAdd, 2) \
- F(NumberSub, 2) \
- F(NumberMul, 2) \
- F(NumberDiv, 2) \
- F(NumberMod, 2) \
- F(NumberUnaryMinus, 1) \
+ F(NumberAdd, 2, 1) \
+ F(NumberSub, 2, 1) \
+ F(NumberMul, 2, 1) \
+ F(NumberDiv, 2, 1) \
+ F(NumberMod, 2, 1) \
+ F(NumberUnaryMinus, 1, 1) \
\
- F(StringAdd, 2) \
- F(StringBuilderConcat, 2) \
+ F(StringAdd, 2, 1) \
+ F(StringBuilderConcat, 2, 1) \
\
/* Bit operations */ \
- F(NumberOr, 2) \
- F(NumberAnd, 2) \
- F(NumberXor, 2) \
- F(NumberNot, 1) \
+ F(NumberOr, 2, 1) \
+ F(NumberAnd, 2, 1) \
+ F(NumberXor, 2, 1) \
+ F(NumberNot, 1, 1) \
\
- F(NumberShl, 2) \
- F(NumberShr, 2) \
- F(NumberSar, 2) \
+ F(NumberShl, 2, 1) \
+ F(NumberShr, 2, 1) \
+ F(NumberSar, 2, 1) \
\
/* Comparisons */ \
- F(NumberEquals, 2) \
- F(StringEquals, 2) \
+ F(NumberEquals, 2, 1) \
+ F(StringEquals, 2, 1) \
\
- F(NumberCompare, 3) \
- F(SmiLexicographicCompare, 2) \
- F(StringCompare, 2) \
+ F(NumberCompare, 3, 1) \
+ F(SmiLexicographicCompare, 2, 1) \
+ F(StringCompare, 2, 1) \
\
/* Math */ \
- F(Math_abs, 1) \
- F(Math_acos, 1) \
- F(Math_asin, 1) \
- F(Math_atan, 1) \
- F(Math_atan2, 2) \
- F(Math_ceil, 1) \
- F(Math_cos, 1) \
- F(Math_exp, 1) \
- F(Math_floor, 1) \
- F(Math_log, 1) \
- F(Math_pow, 2) \
- F(Math_round, 1) \
- F(Math_sin, 1) \
- F(Math_sqrt, 1) \
- F(Math_tan, 1) \
+ F(Math_abs, 1, 1) \
+ F(Math_acos, 1, 1) \
+ F(Math_asin, 1, 1) \
+ F(Math_atan, 1, 1) \
+ F(Math_atan2, 2, 1) \
+ F(Math_ceil, 1, 1) \
+ F(Math_cos, 1, 1) \
+ F(Math_exp, 1, 1) \
+ F(Math_floor, 1, 1) \
+ F(Math_log, 1, 1) \
+ F(Math_pow, 2, 1) \
+ F(Math_round, 1, 1) \
+ F(Math_sin, 1, 1) \
+ F(Math_sqrt, 1, 1) \
+ F(Math_tan, 1, 1) \
\
/* Regular expressions */ \
- F(RegExpCompile, 3) \
- F(RegExpExec, 4) \
+ F(RegExpCompile, 3, 1) \
+ F(RegExpExec, 4, 1) \
\
/* Strings */ \
- F(StringCharCodeAt, 2) \
- F(StringIndexOf, 3) \
- F(StringLastIndexOf, 3) \
- F(StringLocaleCompare, 2) \
- F(StringSlice, 3) \
- F(StringReplaceRegExpWithString, 4) \
- F(StringMatch, 3) \
+ F(StringCharCodeAt, 2, 1) \
+ F(StringIndexOf, 3, 1) \
+ F(StringLastIndexOf, 3, 1) \
+ F(StringLocaleCompare, 2, 1) \
+ F(StringSlice, 3, 1) \
+ F(StringReplaceRegExpWithString, 4, 1) \
+ F(StringMatch, 3, 1) \
\
/* Numbers */ \
- F(NumberToRadixString, 2) \
- F(NumberToFixed, 2) \
- F(NumberToExponential, 2) \
- F(NumberToPrecision, 2)
+ F(NumberToRadixString, 2, 1) \
+ F(NumberToFixed, 2, 1) \
+ F(NumberToExponential, 2, 1) \
+ F(NumberToPrecision, 2, 1)
#define RUNTIME_FUNCTION_LIST_ALWAYS_2(F) \
/* Reflection */ \
- F(FunctionSetInstanceClassName, 2) \
- F(FunctionSetLength, 2) \
- F(FunctionSetPrototype, 2) \
- F(FunctionGetName, 1) \
- F(FunctionSetName, 2) \
- F(FunctionGetSourceCode, 1) \
- F(FunctionGetScript, 1) \
- F(FunctionGetScriptSourcePosition, 1) \
- F(FunctionGetPositionForOffset, 2) \
- F(FunctionIsAPIFunction, 1) \
- F(GetScript, 1) \
- F(CollectStackTrace, 2) \
- \
- F(ClassOf, 1) \
- F(SetCode, 2) \
- \
- F(CreateApiFunction, 1) \
- F(IsTemplate, 1) \
- F(GetTemplateField, 2) \
- F(DisableAccessChecks, 1) \
- F(EnableAccessChecks, 1) \
+ F(FunctionSetInstanceClassName, 2, 1) \
+ F(FunctionSetLength, 2, 1) \
+ F(FunctionSetPrototype, 2, 1) \
+ F(FunctionGetName, 1, 1) \
+ F(FunctionSetName, 2, 1) \
+ F(FunctionGetSourceCode, 1, 1) \
+ F(FunctionGetScript, 1, 1) \
+ F(FunctionGetScriptSourcePosition, 1, 1) \
+ F(FunctionGetPositionForOffset, 2, 1) \
+ F(FunctionIsAPIFunction, 1, 1) \
+ F(GetScript, 1, 1) \
+ F(CollectStackTrace, 2, 1) \
+ \
+ F(ClassOf, 1, 1) \
+ F(SetCode, 2, 1) \
+ \
+ F(CreateApiFunction, 1, 1) \
+ F(IsTemplate, 1, 1) \
+ F(GetTemplateField, 2, 1) \
+ F(DisableAccessChecks, 1, 1) \
+ F(EnableAccessChecks, 1, 1) \
\
/* Dates */ \
- F(DateCurrentTime, 0) \
- F(DateParseString, 2) \
- F(DateLocalTimezone, 1) \
- F(DateLocalTimeOffset, 0) \
- F(DateDaylightSavingsOffset, 1) \
+ F(DateCurrentTime, 0, 1) \
+ F(DateParseString, 2, 1) \
+ F(DateLocalTimezone, 1, 1) \
+ F(DateLocalTimeOffset, 0, 1) \
+ F(DateDaylightSavingsOffset, 1, 1) \
\
/* Numbers */ \
- F(NumberIsFinite, 1) \
+ F(NumberIsFinite, 1, 1) \
\
/* Globals */ \
- F(CompileString, 2) \
- F(GlobalPrint, 1) \
+ F(CompileString, 2, 1) \
+ F(GlobalPrint, 1, 1) \
\
/* Eval */ \
- F(GlobalReceiver, 1) \
- F(ResolvePossiblyDirectEval, 2) \
+ F(GlobalReceiver, 1, 1) \
+ F(ResolvePossiblyDirectEval, 2, 1) \
\
- F(SetProperty, -1 /* 3 or 4 */) \
- F(IgnoreAttributesAndSetProperty, -1 /* 3 or 4 */) \
+ F(SetProperty, -1 /* 3 or 4 */, 1) \
+ F(IgnoreAttributesAndSetProperty, -1 /* 3 or 4 */, 1) \
\
/* Arrays */ \
- F(RemoveArrayHoles, 2) \
- F(GetArrayKeys, 2) \
- F(MoveArrayContents, 2) \
- F(EstimateNumberOfElements, 1) \
+ F(RemoveArrayHoles, 2, 1) \
+ F(GetArrayKeys, 2, 1) \
+ F(MoveArrayContents, 2, 1) \
+ F(EstimateNumberOfElements, 1, 1) \
\
/* Getters and Setters */ \
- F(DefineAccessor, -1 /* 4 or 5 */) \
- F(LookupAccessor, 3) \
+ F(DefineAccessor, -1 /* 4 or 5 */, 1) \
+ F(LookupAccessor, 3, 1) \
\
/* Literals */ \
- F(MaterializeRegExpLiteral, 4)\
- F(CreateArrayLiteralBoilerplate, 3) \
- F(CreateObjectLiteralBoilerplate, 3) \
- F(CloneLiteralBoilerplate, 1) \
- F(CloneShallowLiteralBoilerplate, 1) \
+ F(MaterializeRegExpLiteral, 4, 1)\
+ F(CreateArrayLiteralBoilerplate, 3, 1) \
+ F(CreateObjectLiteralBoilerplate, 3, 1) \
+ F(CloneLiteralBoilerplate, 1, 1) \
+ F(CloneShallowLiteralBoilerplate, 1, 1) \
\
/* Catch context extension objects */ \
- F(CreateCatchExtensionObject, 2) \
+ F(CreateCatchExtensionObject, 2, 1) \
\
/* Statements */ \
- F(NewClosure, 2) \
- F(NewObject, 1) \
- F(Throw, 1) \
- F(ReThrow, 1) \
- F(ThrowReferenceError, 1) \
- F(StackGuard, 1) \
+ F(NewClosure, 2, 1) \
+ F(NewObject, 1, 1) \
+ F(Throw, 1, 1) \
+ F(ReThrow, 1, 1) \
+ F(ThrowReferenceError, 1, 1) \
+ F(StackGuard, 1, 1) \
\
/* Contexts */ \
- F(NewContext, 1) \
- F(PushContext, 1) \
- F(PushCatchContext, 1) \
- F(LookupContext, 2) \
- F(LoadContextSlot, 2) \
- F(LoadContextSlotNoReferenceError, 2) \
- F(StoreContextSlot, 3) \
+ F(NewContext, 1, 1) \
+ F(PushContext, 1, 1) \
+ F(PushCatchContext, 1, 1) \
+ F(LookupContext, 2, 1) \
+ F(LoadContextSlot, 2, 2) \
+ F(LoadContextSlotNoReferenceError, 2, 2) \
+ F(StoreContextSlot, 3, 1) \
\
/* Declarations and initialization */ \
- F(DeclareGlobals, 3) \
- F(DeclareContextSlot, 4) \
- F(InitializeVarGlobal, -1 /* 1 or 2 */) \
- F(InitializeConstGlobal, 2) \
- F(InitializeConstContextSlot, 3) \
- F(OptimizeObjectForAddingMultipleProperties, 2) \
- F(TransformToFastProperties, 1) \
+ F(DeclareGlobals, 3, 1) \
+ F(DeclareContextSlot, 4, 1) \
+ F(InitializeVarGlobal, -1 /* 1 or 2 */, 1) \
+ F(InitializeConstGlobal, 2, 1) \
+ F(InitializeConstContextSlot, 3, 1) \
+ F(OptimizeObjectForAddingMultipleProperties, 2, 1) \
+ F(TransformToFastProperties, 1, 1) \
\
/* Debugging */ \
- F(DebugPrint, 1) \
- F(DebugTrace, 0) \
- F(TraceEnter, 0) \
- F(TraceExit, 1) \
- F(Abort, 2) \
+ F(DebugPrint, 1, 1) \
+ F(DebugTrace, 0, 1) \
+ F(TraceEnter, 0, 1) \
+ F(TraceExit, 1, 1) \
+ F(Abort, 2, 1) \
/* Logging */ \
- F(Log, 2) \
+ F(Log, 2, 1) \
\
/* Pseudo functions - handled as macros by parser */ \
- F(IS_VAR, 1)
+ F(IS_VAR, 1, 1)
#ifdef ENABLE_DEBUGGER_SUPPORT
#define RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F) \
/* Debugger support*/ \
- F(DebugBreak, 0) \
- F(SetDebugEventListener, 2) \
- F(Break, 0) \
- F(DebugGetPropertyDetails, 2) \
- F(DebugGetProperty, 2) \
- F(DebugLocalPropertyNames, 1) \
- F(DebugLocalElementNames, 1) \
- F(DebugPropertyTypeFromDetails, 1) \
- F(DebugPropertyAttributesFromDetails, 1) \
- F(DebugPropertyIndexFromDetails, 1) \
- F(DebugInterceptorInfo, 1) \
- F(DebugNamedInterceptorPropertyNames, 1) \
- F(DebugIndexedInterceptorElementNames, 1) \
- F(DebugNamedInterceptorPropertyValue, 2) \
- F(DebugIndexedInterceptorElementValue, 2) \
- F(CheckExecutionState, 1) \
- F(GetFrameCount, 1) \
- F(GetFrameDetails, 2) \
- F(GetScopeCount, 2) \
- F(GetScopeDetails, 3) \
- F(DebugPrintScopes, 0) \
- F(GetCFrames, 1) \
- F(GetThreadCount, 1) \
- F(GetThreadDetails, 2) \
- F(GetBreakLocations, 1) \
- F(SetFunctionBreakPoint, 3) \
- F(SetScriptBreakPoint, 3) \
- F(ClearBreakPoint, 1) \
- F(ChangeBreakOnException, 2) \
- F(PrepareStep, 3) \
- F(ClearStepping, 0) \
- F(DebugEvaluate, 4) \
- F(DebugEvaluateGlobal, 3) \
- F(DebugGetLoadedScripts, 0) \
- F(DebugReferencedBy, 3) \
- F(DebugConstructedBy, 2) \
- F(DebugGetPrototype, 1) \
- F(SystemBreak, 0) \
- F(DebugDisassembleFunction, 1) \
- F(DebugDisassembleConstructor, 1) \
- F(FunctionGetInferredName, 1)
+ F(DebugBreak, 0, 1) \
+ F(SetDebugEventListener, 2, 1) \
+ F(Break, 0, 1) \
+ F(DebugGetPropertyDetails, 2, 1) \
+ F(DebugGetProperty, 2, 1) \
+ F(DebugLocalPropertyNames, 1, 1) \
+ F(DebugLocalElementNames, 1, 1) \
+ F(DebugPropertyTypeFromDetails, 1, 1) \
+ F(DebugPropertyAttributesFromDetails, 1, 1) \
+ F(DebugPropertyIndexFromDetails, 1, 1) \
+ F(DebugInterceptorInfo, 1, 1) \
+ F(DebugNamedInterceptorPropertyNames, 1, 1) \
+ F(DebugIndexedInterceptorElementNames, 1, 1) \
+ F(DebugNamedInterceptorPropertyValue, 2, 1) \
+ F(DebugIndexedInterceptorElementValue, 2, 1) \
+ F(CheckExecutionState, 1, 1) \
+ F(GetFrameCount, 1, 1) \
+ F(GetFrameDetails, 2, 1) \
+ F(GetScopeCount, 2, 1) \
+ F(GetScopeDetails, 3, 1) \
+ F(DebugPrintScopes, 0, 1) \
+ F(GetCFrames, 1, 1) \
+ F(GetThreadCount, 1, 1) \
+ F(GetThreadDetails, 2, 1) \
+ F(GetBreakLocations, 1, 1) \
+ F(SetFunctionBreakPoint, 3, 1) \
+ F(SetScriptBreakPoint, 3, 1) \
+ F(ClearBreakPoint, 1, 1) \
+ F(ChangeBreakOnException, 2, 1) \
+ F(PrepareStep, 3, 1) \
+ F(ClearStepping, 0, 1) \
+ F(DebugEvaluate, 4, 1) \
+ F(DebugEvaluateGlobal, 3, 1) \
+ F(DebugGetLoadedScripts, 0, 1) \
+ F(DebugReferencedBy, 3, 1) \
+ F(DebugConstructedBy, 2, 1) \
+ F(DebugGetPrototype, 1, 1) \
+ F(SystemBreak, 0, 1) \
+ F(DebugDisassembleFunction, 1, 1) \
+ F(DebugDisassembleConstructor, 1, 1) \
+ F(FunctionGetInferredName, 1, 1)
#else
#define RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F)
#endif
@@ -313,7 +313,7 @@ namespace internal {
#ifdef DEBUG
#define RUNTIME_FUNCTION_LIST_DEBUG(F) \
/* Testing */ \
- F(ListNatives, 0)
+ F(ListNatives, 0, 1)
#else
#define RUNTIME_FUNCTION_LIST_DEBUG(F)
#endif
@@ -336,7 +336,7 @@ namespace internal {
class Runtime : public AllStatic {
public:
enum FunctionId {
-#define F(name, nargs) k##name,
+#define F(name, nargs, ressize) k##name,
RUNTIME_FUNCTION_LIST(F)
kNofFunctions
#undef F
@@ -357,6 +357,9 @@ class Runtime : public AllStatic {
// arguments.
int nargs;
int stub_id;
+ // Size of result, if complex (larger than a single pointer),
+ // otherwise zero.
+ int result_size;
};
// Get the runtime function with the given function id.
diff --git a/deps/v8/src/serialize.cc b/deps/v8/src/serialize.cc
index a16032ad3d..c89476241a 100644
--- a/deps/v8/src/serialize.cc
+++ b/deps/v8/src/serialize.cc
@@ -70,7 +70,7 @@ const int kPageAndOffsetMask = (1 << kPageAndOffsetBits) - 1;
// These values are special allocation space tags used for
// serialization.
-// Mar the pages executable on platforms that support it.
+// Mark the pages executable on platforms that support it.
const int kLargeCode = LAST_SPACE + 1;
// Allocate extra remembered-set bits.
const int kLargeFixedArray = LAST_SPACE + 2;
@@ -541,7 +541,7 @@ void ExternalReferenceTable::PopulateTable() {
#undef DEF_ENTRY_A
// Runtime functions
-#define RUNTIME_ENTRY(name, nargs) \
+#define RUNTIME_ENTRY(name, nargs, ressize) \
{ RUNTIME_FUNCTION, \
Runtime::k##name, \
"Runtime::" #name },
@@ -1201,19 +1201,25 @@ void Serializer::PutGlobalHandleStack(const List<Handle<Object> >& stack) {
void Serializer::PutContextStack() {
- List<Handle<Object> > contexts(2);
+ List<Context*> contexts(2);
while (HandleScopeImplementer::instance()->HasSavedContexts()) {
- Handle<Object> context =
+ Context* context =
HandleScopeImplementer::instance()->RestoreContext();
contexts.Add(context);
}
for (int i = contexts.length() - 1; i >= 0; i--) {
HandleScopeImplementer::instance()->SaveContext(contexts[i]);
}
- PutGlobalHandleStack(contexts);
+ writer_->PutC('C');
+ writer_->PutC('[');
+ writer_->PutInt(contexts.length());
+ if (!contexts.is_empty()) {
+ Object** start = reinterpret_cast<Object**>(&contexts.first());
+ VisitPointers(start, start + contexts.length());
+ }
+ writer_->PutC(']');
}
-
void Serializer::PutEncodedAddress(Address addr) {
writer_->PutC('P');
writer_->PutAddress(addr);
@@ -1541,9 +1547,16 @@ void Deserializer::GetGlobalHandleStack(List<Handle<Object> >* stack) {
void Deserializer::GetContextStack() {
- List<Handle<Object> > entered_contexts(2);
- GetGlobalHandleStack(&entered_contexts);
- for (int i = 0; i < entered_contexts.length(); i++) {
+ reader_.ExpectC('C');
+ CHECK_EQ(reader_.GetC(), '[');
+ int count = reader_.GetInt();
+ List<Context*> entered_contexts(count);
+ if (count > 0) {
+ Object** start = reinterpret_cast<Object**>(&entered_contexts.first());
+ VisitPointers(start, start + count);
+ }
+ reader_.ExpectC(']');
+ for (int i = 0; i < count; i++) {
HandleScopeImplementer::instance()->SaveContext(entered_contexts[i]);
}
}
diff --git a/deps/v8/src/serialize.h b/deps/v8/src/serialize.h
index 1b24065cf7..f5780ae35b 100644
--- a/deps/v8/src/serialize.h
+++ b/deps/v8/src/serialize.h
@@ -197,7 +197,7 @@ class Serializer: public ObjectVisitor {
int flags_end_; // The position right after the flags.
- // An array of per-space SimulatedHeapSpacees used as memory allocators.
+ // An array of per-space SimulatedHeapSpaces used as memory allocators.
SimulatedHeapSpace* allocator_[LAST_SPACE+1];
// A list of global handles at serialization time.
List<Object**> global_handles_;
diff --git a/deps/v8/src/spaces.cc b/deps/v8/src/spaces.cc
index 45e82f4513..de9b233637 100644
--- a/deps/v8/src/spaces.cc
+++ b/deps/v8/src/spaces.cc
@@ -2561,10 +2561,12 @@ void LargeObjectSpace::Verify() {
ASSERT(map->IsMap());
ASSERT(Heap::map_space()->Contains(map));
- // We have only code, sequential strings, fixed arrays, and byte arrays
- // in large object space.
- ASSERT(object->IsCode() || object->IsSeqString()
- || object->IsFixedArray() || object->IsByteArray());
+ // We have only code, sequential strings, external strings
+ // (sequential strings that have been morphed into external
+ // strings), fixed arrays, and byte arrays in large object space.
+ ASSERT(object->IsCode() || object->IsSeqString() ||
+ object->IsExternalString() || object->IsFixedArray() ||
+ object->IsByteArray());
// The object itself should look OK.
object->Verify();
diff --git a/deps/v8/src/spaces.h b/deps/v8/src/spaces.h
index 98663db025..babdd3ff27 100644
--- a/deps/v8/src/spaces.h
+++ b/deps/v8/src/spaces.h
@@ -445,12 +445,12 @@ class MemoryAllocator : public AllStatic {
// Due to encoding limitation, we can only have 8K chunks.
static const int kMaxNofChunks = 1 << Page::kPageSizeBits;
- // If a chunk has at least 32 pages, the maximum heap size is about
- // 8 * 1024 * 32 * 8K = 2G bytes.
-#if defined(ANDROID)
- static const int kPagesPerChunk = 16;
+ // If a chunk has at least 16 pages, the maximum heap size is about
+ // 8K * 8K * 16 = 1G bytes.
+#ifdef V8_TARGET_ARCH_X64
+ static const int kPagesPerChunk = 32;
#else
- static const int kPagesPerChunk = 64;
+ static const int kPagesPerChunk = 16;
#endif
static const int kChunkSize = kPagesPerChunk * Page::kPageSize;
diff --git a/deps/v8/src/v8threads.cc b/deps/v8/src/v8threads.cc
index 8e0a8be5a5..3022a7e9a6 100644
--- a/deps/v8/src/v8threads.cc
+++ b/deps/v8/src/v8threads.cc
@@ -241,7 +241,10 @@ ThreadState* ThreadState::Next() {
}
-int ThreadManager::next_id_ = 0;
+// Thread ids must start with 1, because in TLS having thread id 0 can't
+// be distinguished from not having a thread id at all (since NULL is
+// defined as 0.)
+int ThreadManager::last_id_ = 0;
Mutex* ThreadManager::mutex_ = OS::CreateMutex();
ThreadHandle ThreadManager::mutex_owner_(ThreadHandle::INVALID);
ThreadHandle ThreadManager::lazily_archived_thread_(ThreadHandle::INVALID);
@@ -250,7 +253,7 @@ ThreadState* ThreadManager::lazily_archived_thread_state_ = NULL;
void ThreadManager::ArchiveThread() {
ASSERT(!lazily_archived_thread_.IsValid());
- ASSERT(Thread::GetThreadLocal(thread_state_key) == NULL);
+ ASSERT(!IsArchived());
ThreadState* state = ThreadState::GetFree();
state->Unlink();
Thread::SetThreadLocal(thread_state_key, reinterpret_cast<void*>(state));
@@ -281,6 +284,11 @@ void ThreadManager::EagerlyArchiveThread() {
}
+bool ThreadManager::IsArchived() {
+ return Thread::HasThreadLocal(thread_state_key);
+}
+
+
void ThreadManager::Iterate(ObjectVisitor* v) {
// Expecting no threads during serialization/deserialization
for (ThreadState* state = ThreadState::FirstInUse();
@@ -321,15 +329,21 @@ int ThreadManager::CurrentId() {
void ThreadManager::AssignId() {
- if (!Thread::HasThreadLocal(thread_id_key)) {
+ if (!HasId()) {
ASSERT(Locker::IsLocked());
- int thread_id = next_id_++;
+ int thread_id = ++last_id_;
+ ASSERT(thread_id > 0); // see the comment near last_id_ definition.
Thread::SetThreadLocalInt(thread_id_key, thread_id);
Top::set_thread_id(thread_id);
}
}
+bool ThreadManager::HasId() {
+ return Thread::HasThreadLocal(thread_id_key);
+}
+
+
void ThreadManager::TerminateExecution(int thread_id) {
for (ThreadState* state = ThreadState::FirstInUse();
state != NULL;
diff --git a/deps/v8/src/v8threads.h b/deps/v8/src/v8threads.h
index 3f81f5706f..f808e54fed 100644
--- a/deps/v8/src/v8threads.h
+++ b/deps/v8/src/v8threads.h
@@ -86,6 +86,7 @@ class ThreadManager : public AllStatic {
static void ArchiveThread();
static bool RestoreThread();
+ static bool IsArchived();
static void Iterate(ObjectVisitor* v);
static void MarkCompactPrologue(bool is_compacting);
@@ -94,6 +95,7 @@ class ThreadManager : public AllStatic {
static int CurrentId();
static void AssignId();
+ static bool HasId();
static void TerminateExecution(int thread_id);
@@ -101,7 +103,7 @@ class ThreadManager : public AllStatic {
private:
static void EagerlyArchiveThread();
- static int next_id_; // V8 threads are identified through an integer.
+ static int last_id_; // V8 threads are identified through an integer.
static Mutex* mutex_;
static ThreadHandle mutex_owner_;
static ThreadHandle lazily_archived_thread_;
diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc
index 39d18fd4da..df5058afee 100644
--- a/deps/v8/src/version.cc
+++ b/deps/v8/src/version.cc
@@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 1
#define MINOR_VERSION 3
-#define BUILD_NUMBER 9
+#define BUILD_NUMBER 10
#define PATCH_LEVEL 0
#define CANDIDATE_VERSION false
diff --git a/deps/v8/src/x64/assembler-x64.cc b/deps/v8/src/x64/assembler-x64.cc
index 6304324da0..af98ef9e05 100644
--- a/deps/v8/src/x64/assembler-x64.cc
+++ b/deps/v8/src/x64/assembler-x64.cc
@@ -174,6 +174,7 @@ void CpuFeatures::Probe() {
// Additional guard int3 instructions can be added if required.
void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
// Call instruction takes up 13 bytes and int3 takes up one byte.
+ static const int kCallInstructionSize = 13;
Address patch_site = pc_;
Memory::uint16_at(patch_site) = 0xBA49u; // movq r10, imm64
// Write "0x00, call r10" starting at last byte of address. We overwrite
@@ -183,8 +184,11 @@ void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
// Add the requested number of int3 instructions after the call.
for (int i = 0; i < guard_bytes; i++) {
- *(patch_site + 13 + i) = 0xCC; // int3
+ *(patch_site + kCallInstructionSize + i) = 0xCC; // int3
}
+
+ // Indicate that code has changed.
+ CPU::FlushICache(patch_site, kCallInstructionSize + guard_bytes);
}
@@ -275,7 +279,7 @@ Assembler::Assembler(void* buffer, int buffer_size) {
// Clear the buffer in debug mode unless it was provided by the
// caller in which case we can't be sure it's okay to overwrite
- // existing code in it; see CodePatcher::CodePatcher(...).
+ // existing code in it.
#ifdef DEBUG
if (own_buffer_) {
memset(buffer_, 0xCC, buffer_size); // int3
diff --git a/deps/v8/src/x64/builtins-x64.cc b/deps/v8/src/x64/builtins-x64.cc
index 44d8b46c06..1fea61ef53 100644
--- a/deps/v8/src/x64/builtins-x64.cc
+++ b/deps/v8/src/x64/builtins-x64.cc
@@ -44,7 +44,7 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
// rax, but JumpToBuiltin expects rax to contain the number of
// arguments including the receiver.
__ incq(rax);
- __ JumpToBuiltin(ExternalReference(id));
+ __ JumpToBuiltin(ExternalReference(id), 1);
}
@@ -536,11 +536,12 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
__ shl(rdi, Immediate(kPointerSizeLog2));
// rdi: size of new object
- // Make sure that the maximum heap object size will never cause us
- // problem here, because it is always greater than the maximum
- // instance size that can be represented in a byte.
- ASSERT(Heap::MaxObjectSizeInPagedSpace() >= (1 << kBitsPerByte));
- __ AllocateObjectInNewSpace(rdi, rbx, rdi, no_reg, &rt_call, false);
+ __ AllocateObjectInNewSpace(rdi,
+ rbx,
+ rdi,
+ no_reg,
+ &rt_call,
+ NO_ALLOCATION_FLAGS);
// Allocated the JSObject, now initialize the fields.
// rax: initial map
// rbx: JSObject (not HeapObject tagged - the actual address).
@@ -595,8 +596,6 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// rbx: JSObject
// rdi: start of next object (will be start of FixedArray)
// rdx: number of elements in properties array
- ASSERT(Heap::MaxObjectSizeInPagedSpace() >
- (FixedArray::kHeaderSize + 255*kPointerSize));
__ AllocateObjectInNewSpace(FixedArray::kHeaderSize,
times_pointer_size,
rdx,
@@ -604,7 +603,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
rax,
no_reg,
&undo_allocation,
- true);
+ RESULT_CONTAINS_TOP);
// Initialize the FixedArray.
// rbx: JSObject
diff --git a/deps/v8/src/x64/codegen-x64.cc b/deps/v8/src/x64/codegen-x64.cc
index 8d313c951c..31f55aeed6 100644
--- a/deps/v8/src/x64/codegen-x64.cc
+++ b/deps/v8/src/x64/codegen-x64.cc
@@ -6576,7 +6576,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
// One operand is a smi.
// Check whether the non-smi is a heap number.
- ASSERT_EQ(static_cast<intptr_t>(1), kSmiTagMask);
+ ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
// rcx still holds rax & kSmiTag, which is either zero or one.
__ decq(rcx); // If rax is a smi, all 1s, else all 0s.
__ movq(rbx, rdx);
@@ -6829,7 +6829,8 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
- __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3);
+ Runtime::Function* f = Runtime::FunctionForId(Runtime::kNewArgumentsFast);
+ __ TailCallRuntime(ExternalReference(f), 3, f->result_size);
}
@@ -6891,7 +6892,9 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
__ pop(rbx); // Return address.
__ push(rdx);
__ push(rbx);
- __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
+ Runtime::Function* f =
+ Runtime::FunctionForId(Runtime::kGetArgumentsProperty);
+ __ TailCallRuntime(ExternalReference(f), 1, f->result_size);
}
@@ -6915,6 +6918,23 @@ void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) {
}
+int CEntryStub::MinorKey() {
+ ASSERT(result_size_ <= 2);
+#ifdef _WIN64
+ // Simple results returned in rax (using default code).
+ // Complex results must be written to address passed as first argument.
+ // Use even numbers for minor keys, reserving the odd numbers for
+ // CEntryDebugBreakStub.
+ return (result_size_ < 2) ? 0 : result_size_ * 2;
+#else
+ // Single results returned in rax (both AMD64 and Win64 calling conventions)
+ // and a struct of two pointers in rax+rdx (AMD64 calling convention only)
+ // by default.
+ return 0;
+#endif
+}
+
+
void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
// Check that stack should contain next handler, frame pointer, state and
// return address in that order.
@@ -6986,8 +7006,18 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
__ movq(Operand(rsp, 4 * kPointerSize), r14); // argc.
__ movq(Operand(rsp, 5 * kPointerSize), r15); // argv.
- // Pass a pointer to the Arguments object as the first argument.
- __ lea(rcx, Operand(rsp, 4 * kPointerSize));
+ if (result_size_ < 2) {
+ // Pass a pointer to the Arguments object as the first argument.
+ // Return result in single register (rax).
+ __ lea(rcx, Operand(rsp, 4 * kPointerSize));
+ } else {
+ ASSERT_EQ(2, result_size_);
+ // Pass a pointer to the result location as the first argument.
+ __ lea(rcx, Operand(rsp, 6 * kPointerSize));
+ // Pass a pointer to the Arguments object as the second argument.
+ __ lea(rdx, Operand(rsp, 4 * kPointerSize));
+ }
+
#else // ! defined(_WIN64)
// GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
__ movq(rdi, r14); // argc.
@@ -7010,7 +7040,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
__ j(zero, &failure_returned);
// Exit the JavaScript to C++ exit frame.
- __ LeaveExitFrame(frame_type);
+ __ LeaveExitFrame(frame_type, result_size_);
__ ret(0);
// Handling of failure.
@@ -7146,7 +7176,7 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) {
StackFrame::EXIT;
// Enter the exit frame that transitions from JavaScript to C++.
- __ EnterExitFrame(frame_type);
+ __ EnterExitFrame(frame_type, result_size_);
// rax: Holds the context at this point, but should not be used.
// On entry to code generated by GenerateCore, it must hold
@@ -7333,7 +7363,8 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
__ push(rax);
// Do tail-call to runtime routine.
- __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1);
+ Runtime::Function* f = Runtime::FunctionForId(Runtime::kStackGuard);
+ __ TailCallRuntime(ExternalReference(f), 1, f->result_size);
}
@@ -7347,10 +7378,9 @@ void FloatingPointHelper::AllocateHeapNumber(MacroAssembler* masm,
scratch,
no_reg,
need_gc,
- false);
+ TAG_OBJECT);
// Set the map and tag the result.
- __ addq(result, Immediate(kHeapObjectTag));
__ LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
__ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
}
diff --git a/deps/v8/src/x64/debug-x64.cc b/deps/v8/src/x64/debug-x64.cc
index f2bb62bc69..10092c55ca 100644
--- a/deps/v8/src/x64/debug-x64.cc
+++ b/deps/v8/src/x64/debug-x64.cc
@@ -160,18 +160,6 @@ void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateReturnDebugBreakEntry(MacroAssembler* masm) {
- // OK to clobber rbx as we are returning from a JS function through the code
- // generated by CodeGenerator::GenerateReturnSequence()
- ExternalReference debug_break_return =
- ExternalReference(Debug_Address::DebugBreakReturn());
- __ movq(rbx, debug_break_return);
- __ movq(rbx, Operand(rbx, 0));
- __ addq(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
- __ jmp(rbx);
-}
-
-
void Debug::GenerateStoreICDebugBreak(MacroAssembler* masm) {
// REgister state for IC store call (from ic-x64.cc).
// ----------- S t a t e -------------
@@ -207,7 +195,7 @@ bool BreakLocationIterator::IsDebugBreakAtReturn() {
void BreakLocationIterator::SetDebugBreakAtReturn() {
ASSERT(Debug::kX64JSReturnSequenceLength >= Debug::kX64CallInstructionLength);
- rinfo()->PatchCodeWithCall(Debug::debug_break_return_entry()->entry(),
+ rinfo()->PatchCodeWithCall(Debug::debug_break_return()->entry(),
Debug::kX64JSReturnSequenceLength - Debug::kX64CallInstructionLength);
}
diff --git a/deps/v8/src/x64/ic-x64.cc b/deps/v8/src/x64/ic-x64.cc
index e2f7c30d27..bf415d925b 100644
--- a/deps/v8/src/x64/ic-x64.cc
+++ b/deps/v8/src/x64/ic-x64.cc
@@ -236,7 +236,7 @@ void KeyedLoadIC::Generate(MacroAssembler* masm,
__ push(rbx); // return address
// Perform tail call to the entry.
- __ TailCallRuntime(f, 2);
+ __ TailCallRuntime(f, 2, 1);
}
@@ -373,7 +373,7 @@ void KeyedStoreIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
__ push(rcx); // return address
// Do tail-call to runtime routine.
- __ TailCallRuntime(f, 3);
+ __ TailCallRuntime(f, 3, 1);
}
@@ -394,7 +394,7 @@ void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
// Do tail-call to runtime routine.
__ TailCallRuntime(
- ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
+ ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3, 1);
}
@@ -460,7 +460,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ push(rax);
__ push(rcx);
// Do tail-call to runtime routine.
- __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
+ __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3, 1);
// Extra capacity case: Check if there is extra capacity to
@@ -532,7 +532,7 @@ void CallIC::Generate(MacroAssembler* masm,
__ push(rbx);
// Call the entry.
- CEntryStub stub;
+ CEntryStub stub(1);
__ movq(rax, Immediate(2));
__ movq(rbx, f);
__ CallStub(&stub);
@@ -763,7 +763,7 @@ void LoadIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
__ push(rbx); // return address
// Perform tail call to the entry.
- __ TailCallRuntime(f, 2);
+ __ TailCallRuntime(f, 2, 1);
}
@@ -940,7 +940,7 @@ void StoreIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
__ push(rbx); // return address
// Perform tail call to the entry.
- __ TailCallRuntime(f, 3);
+ __ TailCallRuntime(f, 3, 1);
}
void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
@@ -959,7 +959,7 @@ void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
// Perform tail call to the entry.
__ TailCallRuntime(
- ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
+ ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3, 1);
}
void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc
index ae45eaba54..104ccb8c15 100644
--- a/deps/v8/src/x64/macro-assembler-x64.cc
+++ b/deps/v8/src/x64/macro-assembler-x64.cc
@@ -318,7 +318,8 @@ void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
- int num_arguments) {
+ int num_arguments,
+ int result_size) {
// ----------- S t a t e -------------
// -- rsp[0] : return address
// -- rsp[8] : argument num_arguments - 1
@@ -331,14 +332,15 @@ void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
// should remove this need and make the runtime routine entry code
// smarter.
movq(rax, Immediate(num_arguments));
- JumpToBuiltin(ext);
+ JumpToBuiltin(ext, result_size);
}
-void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
+void MacroAssembler::JumpToBuiltin(const ExternalReference& ext,
+ int result_size) {
// Set the entry point and jump to the C entry runtime stub.
movq(rbx, ext);
- CEntryStub ces;
+ CEntryStub ces(result_size);
movq(kScratchRegister, ces.GetCode(), RelocInfo::CODE_TARGET);
jmp(kScratchRegister);
}
@@ -971,7 +973,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
-void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
+void MacroAssembler::EnterExitFrame(StackFrame::Type type, int result_size) {
ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
// Setup the frame structure on the stack.
@@ -1016,6 +1018,21 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
}
#endif
+#ifdef _WIN64
+ // Reserve space on stack for result and argument structures, if necessary.
+ int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
+ // Reserve space for the Arguments object. The Windows 64-bit ABI
+ // requires us to pass this structure as a pointer to its location on
+ // the stack. The structure contains 2 values.
+ int argument_stack_space = 2 * kPointerSize;
+ // We also need backing space for 4 parameters, even though
+ // we only pass one or two parameter, and it is in a register.
+ int argument_mirror_space = 4 * kPointerSize;
+ int total_stack_space =
+ argument_mirror_space + argument_stack_space + result_stack_space;
+ subq(rsp, Immediate(total_stack_space));
+#endif
+
// Get the required frame alignment for the OS.
static const int kFrameAlignment = OS::ActivationFrameAlignment();
if (kFrameAlignment > 0) {
@@ -1024,30 +1041,19 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
and_(rsp, kScratchRegister);
}
-#ifdef _WIN64
- // Reserve space for the Arguments object. The Windows 64-bit ABI
- // requires us to pass this structure as a pointer to its location on
- // the stack. The structure contains 2 pointers.
- // The structure on the stack must be 16-byte aligned.
- // We also need backing space for 4 parameters, even though
- // we only pass one parameter, and it is in a register.
- subq(rsp, Immediate(6 * kPointerSize));
- ASSERT(kFrameAlignment == 2 * kPointerSize); // Change the padding if needed.
-#endif
-
// Patch the saved entry sp.
movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
}
-void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
+void MacroAssembler::LeaveExitFrame(StackFrame::Type type, int result_size) {
// Registers:
// r15 : argv
#ifdef ENABLE_DEBUGGER_SUPPORT
// Restore the memory copy of the registers by digging them out from
// the stack. This is needed to allow nested break points.
if (type == StackFrame::EXIT_DEBUG) {
- // It's okay to clobber register ebx below because we don't need
+ // It's okay to clobber register rbx below because we don't need
// the function pointer after this.
const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
@@ -1060,7 +1066,18 @@ void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
movq(rcx, Operand(rbp, 1 * kPointerSize));
movq(rbp, Operand(rbp, 0 * kPointerSize));
- // Pop the arguments and the receiver from the caller stack.
+#ifdef _WIN64
+ // If return value is on the stack, pop it to registers.
+ if (result_size > 1) {
+ ASSERT_EQ(2, result_size);
+ // Position above 4 argument mirrors and arguments object.
+ movq(rax, Operand(rsp, 6 * kPointerSize));
+ movq(rdx, Operand(rsp, 7 * kPointerSize));
+ }
+#endif
+
+ // Pop everything up to and including the arguments and the receiver
+ // from the caller stack.
lea(rsp, Operand(r15, 1 * kPointerSize));
// Restore current context from top and clear it in debug mode.
@@ -1231,18 +1248,23 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
-void MacroAssembler::LoadAllocationTopHelper(
- Register result,
- Register result_end,
- Register scratch,
- bool result_contains_top_on_entry) {
+void MacroAssembler::LoadAllocationTopHelper(Register result,
+ Register result_end,
+ Register scratch,
+ AllocationFlags flags) {
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address();
// Just return if allocation top is already known.
- if (result_contains_top_on_entry) {
+ if ((flags & RESULT_CONTAINS_TOP) != 0) {
// No use of scratch if allocation top is provided.
ASSERT(scratch.is(no_reg));
+#ifdef DEBUG
+ // Assert that result actually contains top on entry.
+ movq(kScratchRegister, new_space_allocation_top);
+ cmpq(result, Operand(kScratchRegister, 0));
+ Check(equal, "Unexpected allocation top");
+#endif
return;
}
@@ -1279,20 +1301,16 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
}
-void MacroAssembler::AllocateObjectInNewSpace(
- int object_size,
- Register result,
- Register result_end,
- Register scratch,
- Label* gc_required,
- bool result_contains_top_on_entry) {
+void MacroAssembler::AllocateObjectInNewSpace(int object_size,
+ Register result,
+ Register result_end,
+ Register scratch,
+ Label* gc_required,
+ AllocationFlags flags) {
ASSERT(!result.is(result_end));
// Load address of new object into result.
- LoadAllocationTopHelper(result,
- result_end,
- scratch,
- result_contains_top_on_entry);
+ LoadAllocationTopHelper(result, result_end, scratch, flags);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
@@ -1304,25 +1322,26 @@ void MacroAssembler::AllocateObjectInNewSpace(
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
+
+ // Tag the result if requested.
+ if ((flags & TAG_OBJECT) != 0) {
+ addq(result, Immediate(kHeapObjectTag));
+ }
}
-void MacroAssembler::AllocateObjectInNewSpace(
- int header_size,
- ScaleFactor element_size,
- Register element_count,
- Register result,
- Register result_end,
- Register scratch,
- Label* gc_required,
- bool result_contains_top_on_entry) {
+void MacroAssembler::AllocateObjectInNewSpace(int header_size,
+ ScaleFactor element_size,
+ Register element_count,
+ Register result,
+ Register result_end,
+ Register scratch,
+ Label* gc_required,
+ AllocationFlags flags) {
ASSERT(!result.is(result_end));
// Load address of new object into result.
- LoadAllocationTopHelper(result,
- result_end,
- scratch,
- result_contains_top_on_entry);
+ LoadAllocationTopHelper(result, result_end, scratch, flags);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
@@ -1334,23 +1353,22 @@ void MacroAssembler::AllocateObjectInNewSpace(
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
-}
+ // Tag the result if requested.
+ if ((flags & TAG_OBJECT) != 0) {
+ addq(result, Immediate(kHeapObjectTag));
+ }
+}
-void MacroAssembler::AllocateObjectInNewSpace(
- Register object_size,
- Register result,
- Register result_end,
- Register scratch,
- Label* gc_required,
- bool result_contains_top_on_entry) {
+void MacroAssembler::AllocateObjectInNewSpace(Register object_size,
+ Register result,
+ Register result_end,
+ Register scratch,
+ Label* gc_required,
+ AllocationFlags flags) {
// Load address of new object into result.
- LoadAllocationTopHelper(result,
- result_end,
- scratch,
- result_contains_top_on_entry);
-
+ LoadAllocationTopHelper(result, result_end, scratch, flags);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
@@ -1365,6 +1383,11 @@ void MacroAssembler::AllocateObjectInNewSpace(
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
+
+ // Tag the result if requested.
+ if ((flags & TAG_OBJECT) != 0) {
+ addq(result, Immediate(kHeapObjectTag));
+ }
}
diff --git a/deps/v8/src/x64/macro-assembler-x64.h b/deps/v8/src/x64/macro-assembler-x64.h
index 6f5c32c2b4..9da2676a99 100644
--- a/deps/v8/src/x64/macro-assembler-x64.h
+++ b/deps/v8/src/x64/macro-assembler-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -42,25 +42,6 @@ static const Register kScratchRegister = r10;
class JumpTarget;
-// Helper types to make flags easier to read at call sites.
-enum InvokeFlag {
- CALL_FUNCTION,
- JUMP_FUNCTION
-};
-
-enum CodeLocation {
- IN_JAVASCRIPT,
- IN_JS_ENTRY,
- IN_C_ENTRY
-};
-
-enum HandlerType {
- TRY_CATCH_HANDLER,
- TRY_FINALLY_HANDLER,
- JS_ENTRY_HANDLER
-};
-
-
// MacroAssembler implements a collection of frequently used macros.
class MacroAssembler: public Assembler {
public:
@@ -106,15 +87,15 @@ class MacroAssembler: public Assembler {
void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
// Enter specific kind of exit frame; either EXIT or
- // EXIT_DEBUG. Expects the number of arguments in register eax and
- // sets up the number of arguments in register edi and the pointer
- // to the first argument in register esi.
- void EnterExitFrame(StackFrame::Type type);
+ // EXIT_DEBUG. Expects the number of arguments in register rax and
+ // sets up the number of arguments in register rdi and the pointer
+ // to the first argument in register rsi.
+ void EnterExitFrame(StackFrame::Type type, int result_size = 1);
- // Leave the current exit frame. Expects the return value in
- // register eax:edx (untouched) and the pointer to the first
- // argument in register esi.
- void LeaveExitFrame(StackFrame::Type type);
+ // Leave the current exit frame. Expects/provides the return value in
+ // register rax:rdx (untouched) and the pointer to the first
+ // argument in register rsi.
+ void LeaveExitFrame(StackFrame::Type type, int result_size = 1);
// ---------------------------------------------------------------------------
@@ -244,7 +225,7 @@ class MacroAssembler: public Assembler {
Register result_end,
Register scratch,
Label* gc_required,
- bool result_contains_top_on_entry);
+ AllocationFlags flags);
void AllocateObjectInNewSpace(int header_size,
ScaleFactor element_size,
@@ -253,14 +234,14 @@ class MacroAssembler: public Assembler {
Register result_end,
Register scratch,
Label* gc_required,
- bool result_contains_top_on_entry);
+ AllocationFlags flags);
void AllocateObjectInNewSpace(Register object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
- bool result_contains_top_on_entry);
+ AllocationFlags flags);
// Undo allocation in new space. The object passed and objects allocated after
// it will no longer be allocated. Make sure that no pointers are left to the
@@ -317,10 +298,12 @@ class MacroAssembler: public Assembler {
// Tail call of a runtime routine (jump).
// Like JumpToBuiltin, but also takes care of passing the number
// of arguments.
- void TailCallRuntime(const ExternalReference& ext, int num_arguments);
+ void TailCallRuntime(const ExternalReference& ext,
+ int num_arguments,
+ int result_size);
// Jump to the builtin routine.
- void JumpToBuiltin(const ExternalReference& ext);
+ void JumpToBuiltin(const ExternalReference& ext, int result_size);
// ---------------------------------------------------------------------------
@@ -392,31 +375,11 @@ class MacroAssembler: public Assembler {
void LoadAllocationTopHelper(Register result,
Register result_end,
Register scratch,
- bool result_contains_top_on_entry);
+ AllocationFlags flags);
void UpdateAllocationTopHelper(Register result_end, Register scratch);
};
-// The code patcher is used to patch (typically) small parts of code e.g. for
-// debugging and other types of instrumentation. When using the code patcher
-// the exact number of bytes specified must be emitted. Is not legal to emit
-// relocation information. If any of these constraints are violated it causes
-// an assertion.
-class CodePatcher {
- public:
- CodePatcher(byte* address, int size);
- virtual ~CodePatcher();
-
- // Macro assembler to emit code.
- MacroAssembler* masm() { return &masm_; }
-
- private:
- byte* address_; // The address of the code being patched.
- int size_; // Number of bytes of the expected patch size.
- MacroAssembler masm_; // Macro assembler used to generate the code.
-};
-
-
// -----------------------------------------------------------------------------
// Static helper functions.
diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.cc b/deps/v8/src/x64/regexp-macro-assembler-x64.cc
index 373f359496..5d17a2d2af 100644
--- a/deps/v8/src/x64/regexp-macro-assembler-x64.cc
+++ b/deps/v8/src/x64/regexp-macro-assembler-x64.cc
@@ -612,7 +612,7 @@ Handle<Object> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
// MSVC passes arguments in rcx, rdx, r8, r9, with backing stack slots.
// Store register parameters in pre-allocated stack slots,
__ movq(Operand(rbp, kInputString), rcx);
- __ movq(Operand(rbp, kStartIndex), rdx);
+ __ movq(Operand(rbp, kStartIndex), rdx); // Passed as int32 in edx.
__ movq(Operand(rbp, kInputStart), r8);
__ movq(Operand(rbp, kInputEnd), r9);
// Callee-save on Win64.
@@ -711,7 +711,7 @@ Handle<Object> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
__ Move(code_object_pointer(), masm_->CodeObject());
// Load previous char as initial value of current-character.
Label at_start;
- __ cmpq(Operand(rbp, kAtStart), Immediate(0));
+ __ cmpb(Operand(rbp, kAtStart), Immediate(0));
__ j(not_equal, &at_start);
LoadCurrentCharacterUnchecked(-1, 1); // Load previous char.
__ jmp(&start_label_);
diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.h b/deps/v8/src/x64/regexp-macro-assembler-x64.h
index ab9647704b..3e6720dc0d 100644
--- a/deps/v8/src/x64/regexp-macro-assembler-x64.h
+++ b/deps/v8/src/x64/regexp-macro-assembler-x64.h
@@ -129,16 +129,18 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
static const int kReturn_eip = kFramePointer + kPointerSize;
static const int kFrameAlign = kReturn_eip + kPointerSize;
-#ifdef __MSVC__
+#ifdef _WIN64
// Parameters (first four passed as registers, but with room on stack).
// In Microsoft 64-bit Calling Convention, there is room on the callers
// stack (before the return address) to spill parameter registers. We
// use this space to store the register passed parameters.
static const int kInputString = kFrameAlign;
+ // StartIndex is passed as 32 bit int.
static const int kStartIndex = kInputString + kPointerSize;
static const int kInputStart = kStartIndex + kPointerSize;
static const int kInputEnd = kInputStart + kPointerSize;
static const int kRegisterOutput = kInputEnd + kPointerSize;
+ // AtStart is passed as 32 bit int (values 0 or 1).
static const int kAtStart = kRegisterOutput + kPointerSize;
static const int kStackHighEnd = kAtStart + kPointerSize;
#else
@@ -154,7 +156,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
static const int kStackHighEnd = kFrameAlign;
#endif
-#ifdef __MSVC__
+#ifdef _WIN64
// Microsoft calling convention has three callee-saved registers
// (that we are using). We push these after the frame pointer.
static const int kBackup_rsi = kFramePointer - kPointerSize;
diff --git a/deps/v8/src/x64/stub-cache-x64.cc b/deps/v8/src/x64/stub-cache-x64.cc
index fcddfc4e7e..fcb2092df8 100644
--- a/deps/v8/src/x64/stub-cache-x64.cc
+++ b/deps/v8/src/x64/stub-cache-x64.cc
@@ -354,7 +354,7 @@ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
__ movq(rax, Immediate(5));
__ movq(rbx, ref);
- CEntryStub stub;
+ CEntryStub stub(1);
__ CallStub(&stub);
}
@@ -489,7 +489,7 @@ class LoadInterceptorCompiler BASE_EMBEDDED {
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
- __ TailCallRuntime(ref, 5);
+ __ TailCallRuntime(ref, 5, 1);
__ bind(&cleanup);
__ pop(scratch1);
@@ -511,7 +511,7 @@ class LoadInterceptorCompiler BASE_EMBEDDED {
ExternalReference ref = ExternalReference(
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
- __ TailCallRuntime(ref, 5);
+ __ TailCallRuntime(ref, 5, 1);
}
private:
@@ -661,7 +661,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
__ movq(rax, Immediate(5));
__ movq(rbx, ref);
- CEntryStub stub;
+ CEntryStub stub(1);
__ CallStub(&stub);
__ LeaveInternalFrame();
@@ -1362,7 +1362,7 @@ Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
// Do tail-call to the runtime system.
ExternalReference store_callback_property =
ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
- __ TailCallRuntime(store_callback_property, 4);
+ __ TailCallRuntime(store_callback_property, 4, 1);
// Handle store cache miss.
__ bind(&miss);
@@ -1450,7 +1450,7 @@ Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
- __ TailCallRuntime(store_ic_property, 3);
+ __ TailCallRuntime(store_ic_property, 3, 1);
// Handle store cache miss.
__ bind(&miss);
@@ -1652,7 +1652,7 @@ void StubCompiler::GenerateLoadCallback(JSObject* object,
// Do tail-call to the runtime system.
ExternalReference load_callback_property =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
- __ TailCallRuntime(load_callback_property, 5);
+ __ TailCallRuntime(load_callback_property, 5, 1);
}
@@ -1784,10 +1784,12 @@ Object* ConstructStubCompiler::CompileConstructStub(
// rbx: initial map
__ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
__ shl(rcx, Immediate(kPointerSizeLog2));
- // Make sure that the maximum heap object size will never cause us
- // problems here.
- ASSERT(Heap::MaxObjectSizeInPagedSpace() >= JSObject::kMaxInstanceSize);
- __ AllocateObjectInNewSpace(rcx, rdx, rcx, no_reg, &generic_stub_call, false);
+ __ AllocateObjectInNewSpace(rcx,
+ rdx,
+ rcx,
+ no_reg,
+ &generic_stub_call,
+ NO_ALLOCATION_FLAGS);
// Allocated the JSObject, now initialize the fields and add the heap tag.
// rbx: initial map
diff --git a/deps/v8/test/cctest/cctest.status b/deps/v8/test/cctest/cctest.status
index 67e9d8a45b..68aabb5165 100644
--- a/deps/v8/test/cctest/cctest.status
+++ b/deps/v8/test/cctest/cctest.status
@@ -50,10 +50,6 @@ test-api/RegExpInterruption: SKIP
test-api/OutOfMemory: SKIP
test-api/OutOfMemoryNested: SKIP
-# BUG(432): Fail on ARM hardware.
-test-regexp/MacroAssemblerNativeSimple: PASS || FAIL
-test-regexp/MacroAssemblerNativeSimpleUC16: PASS || FAIL
-
# BUG(355): Test crashes on ARM.
test-log/ProfLazyMode: SKIP
diff --git a/deps/v8/test/cctest/test-assembler-arm.cc b/deps/v8/test/cctest/test-assembler-arm.cc
index 34f16395e2..f6e4d046ba 100644
--- a/deps/v8/test/cctest/test-assembler-arm.cc
+++ b/deps/v8/test/cctest/test-assembler-arm.cc
@@ -37,9 +37,9 @@ using namespace v8::internal;
// Define these function prototypes to match JSEntryFunction in execution.cc.
-typedef int (*F1)(int x, int p1, int p2, int p3, int p4);
-typedef int (*F2)(int x, int y, int p2, int p3, int p4);
-typedef int (*F3)(void* p, int p1, int p2, int p3, int p4);
+typedef Object* (*F1)(int x, int p1, int p2, int p3, int p4);
+typedef Object* (*F2)(int x, int y, int p2, int p3, int p4);
+typedef Object* (*F3)(void* p, int p1, int p2, int p3, int p4);
static v8::Persistent<v8::Context> env;
diff --git a/deps/v8/test/cctest/test-debug.cc b/deps/v8/test/cctest/test-debug.cc
index bd09d0d55b..0cae26cfb0 100644
--- a/deps/v8/test/cctest/test-debug.cc
+++ b/deps/v8/test/cctest/test-debug.cc
@@ -4503,14 +4503,16 @@ TEST(DebuggerHostDispatch) {
TEST(DebuggerAgent) {
- // Make sure this port is not used by other tests to allow tests to run in
+ // Make sure these ports is not used by other tests to allow tests to run in
// parallel.
- const int kPort = 5858;
+ const int kPort1 = 5858;
+ const int kPort2 = 5857;
+ const int kPort3 = 5856;
- // Make a string with the port number.
+ // Make a string with the port2 number.
const int kPortBufferLen = 6;
- char port_str[kPortBufferLen];
- OS::SNPrintF(i::Vector<char>(port_str, kPortBufferLen), "%d", kPort);
+ char port2_str[kPortBufferLen];
+ OS::SNPrintF(i::Vector<char>(port2_str, kPortBufferLen), "%d", kPort2);
bool ok;
@@ -4518,15 +4520,15 @@ TEST(DebuggerAgent) {
i::Socket::Setup();
// Test starting and stopping the agent without any client connection.
- i::Debugger::StartAgent("test", kPort);
+ i::Debugger::StartAgent("test", kPort1);
i::Debugger::StopAgent();
// Test starting the agent, connecting a client and shutting down the agent
// with the client connected.
- ok = i::Debugger::StartAgent("test", kPort);
+ ok = i::Debugger::StartAgent("test", kPort2);
CHECK(ok);
i::Socket* client = i::OS::CreateSocket();
- ok = client->Connect("localhost", port_str);
+ ok = client->Connect("localhost", port2_str);
CHECK(ok);
i::Debugger::StopAgent();
delete client;
@@ -4534,9 +4536,9 @@ TEST(DebuggerAgent) {
// Test starting and stopping the agent with the required port already
// occoupied.
i::Socket* server = i::OS::CreateSocket();
- server->Bind(kPort);
+ server->Bind(kPort3);
- i::Debugger::StartAgent("test", kPort);
+ i::Debugger::StartAgent("test", kPort3);
i::Debugger::StopAgent();
delete server;
diff --git a/deps/v8/test/cctest/test-log.cc b/deps/v8/test/cctest/test-log.cc
index 5884a419a9..dafd3aaad5 100644
--- a/deps/v8/test/cctest/test-log.cc
+++ b/deps/v8/test/cctest/test-log.cc
@@ -5,12 +5,15 @@
#ifdef ENABLE_LOGGING_AND_PROFILING
#ifdef __linux__
+#include <math.h>
+#include <pthread.h>
#include <signal.h>
#include <unistd.h>
-#endif
+#endif // __linux__
#include "v8.h"
#include "log.h"
+#include "v8threads.h"
#include "cctest.h"
using v8::internal::Address;
@@ -155,9 +158,10 @@ static bool was_sigprof_received = true;
#ifdef __linux__
struct sigaction old_sigprof_handler;
+pthread_t our_thread;
static void SigProfSignalHandler(int signal, siginfo_t* info, void* context) {
- if (signal != SIGPROF) return;
+ if (signal != SIGPROF || !pthread_equal(pthread_self(), our_thread)) return;
was_sigprof_received = true;
old_sigprof_handler.sa_sigaction(signal, info, context);
}
@@ -185,6 +189,7 @@ static int CheckThatProfilerWorks(int log_pos) {
// Intercept SIGPROF handler to make sure that the test process
// had received it. Under load, system can defer it causing test failure.
// It is important to execute this after 'ResumeProfiler'.
+ our_thread = pthread_self();
was_sigprof_received = false;
struct sigaction sa;
sa.sa_sigaction = SigProfSignalHandler;
@@ -280,6 +285,158 @@ TEST(ProfLazyMode) {
}
+// Profiling multiple threads that use V8 is currently only available on Linux.
+#ifdef __linux__
+
+namespace {
+
+class LoopingThread : public v8::internal::Thread {
+ public:
+ LoopingThread()
+ : v8::internal::Thread(),
+ semaphore_(v8::internal::OS::CreateSemaphore(0)),
+ run_(true) {
+ }
+
+ virtual ~LoopingThread() { delete semaphore_; }
+
+ void Run() {
+ self_ = pthread_self();
+ RunLoop();
+ }
+
+ void SendSigProf() { pthread_kill(self_, SIGPROF); }
+
+ void Stop() { run_ = false; }
+
+ bool WaitForRunning() { return semaphore_->Wait(1000000); }
+
+ protected:
+ bool IsRunning() { return run_; }
+
+ virtual void RunLoop() = 0;
+
+ void SetV8ThreadId() {
+ v8_thread_id_ = v8::V8::GetCurrentThreadId();
+ }
+
+ void SignalRunning() { semaphore_->Signal(); }
+
+ private:
+ v8::internal::Semaphore* semaphore_;
+ bool run_;
+ pthread_t self_;
+ int v8_thread_id_;
+};
+
+
+class LoopingJsThread : public LoopingThread {
+ public:
+ void RunLoop() {
+ {
+ v8::Locker locker;
+ CHECK(v8::internal::ThreadManager::HasId());
+ SetV8ThreadId();
+ }
+ while (IsRunning()) {
+ v8::Locker locker;
+ v8::HandleScope scope;
+ v8::Persistent<v8::Context> context = v8::Context::New();
+ v8::Context::Scope context_scope(context);
+ SignalRunning();
+ CompileAndRunScript(
+ "var j; for (var i=0; i<10000; ++i) { j = Math.sin(i); }");
+ context.Dispose();
+ i::OS::Sleep(1);
+ }
+ }
+};
+
+
+class LoopingNonJsThread : public LoopingThread {
+ public:
+ void RunLoop() {
+ v8::Locker locker;
+ v8::Unlocker unlocker;
+ // Now thread has V8's id, but will not run VM code.
+ CHECK(v8::internal::ThreadManager::HasId());
+ double i = 10;
+ SignalRunning();
+ while (IsRunning()) {
+ i = sin(i);
+ i::OS::Sleep(1);
+ }
+ }
+};
+
+
+class TestSampler : public v8::internal::Sampler {
+ public:
+ TestSampler()
+ : Sampler(0, true),
+ semaphore_(v8::internal::OS::CreateSemaphore(0)),
+ was_sample_stack_called_(false) {
+ }
+
+ ~TestSampler() { delete semaphore_; }
+
+ void SampleStack(v8::internal::TickSample*) {
+ was_sample_stack_called_ = true;
+ }
+
+ void Tick(v8::internal::TickSample*) { semaphore_->Signal(); }
+
+ bool WaitForTick() { return semaphore_->Wait(1000000); }
+
+ void Reset() { was_sample_stack_called_ = false; }
+
+ bool WasSampleStackCalled() { return was_sample_stack_called_; }
+
+ private:
+ v8::internal::Semaphore* semaphore_;
+ bool was_sample_stack_called_;
+};
+
+
+} // namespace
+
+TEST(ProfMultipleThreads) {
+ // V8 needs to be initialized before the first Locker
+ // instantiation. Otherwise, Top::Initialize will reset
+ // thread_id_ in ThreadTopLocal.
+ v8::HandleScope scope;
+ v8::Handle<v8::Context> env = v8::Context::New();
+ env->Enter();
+
+ LoopingJsThread jsThread;
+ jsThread.Start();
+ LoopingNonJsThread nonJsThread;
+ nonJsThread.Start();
+
+ TestSampler sampler;
+ sampler.Start();
+ CHECK(!sampler.WasSampleStackCalled());
+ jsThread.WaitForRunning();
+ jsThread.SendSigProf();
+ CHECK(sampler.WaitForTick());
+ CHECK(sampler.WasSampleStackCalled());
+ sampler.Reset();
+ CHECK(!sampler.WasSampleStackCalled());
+ nonJsThread.WaitForRunning();
+ nonJsThread.SendSigProf();
+ CHECK(sampler.WaitForTick());
+ CHECK(!sampler.WasSampleStackCalled());
+ sampler.Stop();
+
+ jsThread.Stop();
+ nonJsThread.Stop();
+ jsThread.Join();
+ nonJsThread.Join();
+}
+
+#endif // __linux__
+
+
static inline bool IsStringEqualTo(const char* r, const char* s) {
return strncmp(r, s, strlen(r)) == 0;
}
diff --git a/deps/v8/test/cctest/testcfg.py b/deps/v8/test/cctest/testcfg.py
index 75377dbc20..c2427c8dc5 100644
--- a/deps/v8/test/cctest/testcfg.py
+++ b/deps/v8/test/cctest/testcfg.py
@@ -31,7 +31,7 @@ from os.path import join, dirname, exists
import platform
import utils
-DEBUG_FLAGS = ['--enable-slow-asserts', '--debug-code', '--verify-heap']
+CCTEST_DEBUG_FLAGS = ['--enable-slow-asserts', '--debug-code', '--verify-heap']
class CcTestCase(test.TestCase):
@@ -55,7 +55,7 @@ class CcTestCase(test.TestCase):
serialization_option = '--testing_serialization_file=' + serialization_file
result = [ self.executable, name, serialization_option ]
if self.mode == 'debug':
- result += DEBUG_FLAGS
+ result += CCTEST_DEBUG_FLAGS
return result
def GetCommand(self):
diff --git a/deps/v8/test/mjsunit/debug-scopes.js b/deps/v8/test/mjsunit/debug-scopes.js
index 7b477e1c6e..e87cbb7c22 100644
--- a/deps/v8/test/mjsunit/debug-scopes.js
+++ b/deps/v8/test/mjsunit/debug-scopes.js
@@ -140,6 +140,11 @@ function CheckScopeContent(content, number, exec_state) {
if (!scope.scopeObject().property('arguments').isUndefined()) {
scope_size--;
}
+ // Also ignore synthetic variable from catch block.
+ if (!scope.scopeObject().property('.catch-var').isUndefined()) {
+ scope_size--;
+ }
+
if (count != scope_size) {
print('Names found in scope:');
var names = scope.scopeObject().propertyNames();
@@ -656,5 +661,101 @@ listener_delegate = function(exec_state) {
debugger;
EndTest();
+
+BeginTest("Catch block 1");
+function catch_block_1() {
+ try {
+ throw 'Exception';
+ } catch (e) {
+ debugger;
+ }
+};
+
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Catch,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({e:'Exception'}, 0, exec_state);
+}
+catch_block_1()
+EndTest();
+
+
+BeginTest("Catch block 2");
+function catch_block_2() {
+ try {
+ throw 'Exception';
+ } catch (e) {
+ with({n:10}) {
+ debugger;
+ }
+ }
+};
+
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.With,
+ debug.ScopeType.Catch,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({n:10}, 0, exec_state);
+ CheckScopeContent({e:'Exception'}, 1, exec_state);
+}
+catch_block_2()
+EndTest();
+
+
+BeginTest("Catch block 3");
+function catch_block_1() {
+ // Do eval to dynamically declare a local variable so that the context's
+ // extension slot is initialized with JSContextExtensionObject.
+ eval("var y = 78;");
+ try {
+ throw 'Exception';
+ } catch (e) {
+ debugger;
+ }
+};
+
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Catch,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({e:'Exception'}, 0, exec_state);
+ CheckScopeContent({y:78}, 1, exec_state);
+}
+catch_block_1()
+EndTest();
+
+
+BeginTest("Catch block 4");
+function catch_block_2() {
+ // Do eval to dynamically declare a local variable so that the context's
+ // extension slot is initialized with JSContextExtensionObject.
+ eval("var y = 98;");
+ try {
+ throw 'Exception';
+ } catch (e) {
+ with({n:10}) {
+ debugger;
+ }
+ }
+};
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.With,
+ debug.ScopeType.Catch,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({n:10}, 0, exec_state);
+ CheckScopeContent({e:'Exception'}, 1, exec_state);
+ CheckScopeContent({y:98}, 2, exec_state);
+}
+catch_block_2()
+EndTest();
+
+
assertEquals(begin_test_count, break_count, 'one or more tests did not enter the debugger');
assertEquals(begin_test_count, end_test_count, 'one or more tests did not have its result checked');
diff --git a/deps/v8/test/mjsunit/debug-step-stub-callfunction.js b/deps/v8/test/mjsunit/debug-step-stub-callfunction.js
index fbb8078621..50d095b532 100644
--- a/deps/v8/test/mjsunit/debug-step-stub-callfunction.js
+++ b/deps/v8/test/mjsunit/debug-step-stub-callfunction.js
@@ -54,7 +54,7 @@ function f() {
break_break_point_hit_count = 0;
f();
-assertEquals(5, break_break_point_hit_count);
+assertEquals(6, break_break_point_hit_count);
// Use an inner function to ensure that the function call is through CodeStub
// CallFunction see Ia32CodeGenerator::VisitCall and
@@ -67,7 +67,21 @@ function g() {
break_break_point_hit_count = 0;
g();
-assertEquals(4, break_break_point_hit_count);
+assertEquals(5, break_break_point_hit_count);
+
+
+// Use an inner function to ensure that the function call is through CodeStub
+// CallFunction.
+function testCallInExpreesion() {
+ function h() {}
+ debugger;
+ var x = 's' + h(10, 20);
+};
+
+break_break_point_hit_count = 0;
+testCallInExpreesion();
+assertEquals(5, break_break_point_hit_count);
+
// Get rid of the debug event listener.
Debug.setListener(null);
diff --git a/deps/v8/test/mjsunit/debug-stepin-call-function-stub.js b/deps/v8/test/mjsunit/debug-stepin-call-function-stub.js
new file mode 100644
index 0000000000..12f51429b0
--- /dev/null
+++ b/deps/v8/test/mjsunit/debug-stepin-call-function-stub.js
@@ -0,0 +1,115 @@
+// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+
+var exception = null;
+var state = 0;
+var expected_function_name = null;
+var expected_source_line_text = null;
+var expected_caller_source_line = null;
+var step_in_count = 2;
+
+// Simple debug event handler which first time will cause 'step in' action
+// to get into g.call and than check that execution is pauesed inside
+// function 'g'.
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ if (state == 0) {
+ // Step into f().
+ exec_state.prepareStep(Debug.StepAction.StepIn, step_in_count);
+ state = 2;
+ } else if (state == 2) {
+ assertEquals(expected_source_line_text,
+ event_data.sourceLineText());
+ assertEquals(expected_function_name, event_data.func().name());
+ state = 3;
+ }
+ }
+ } catch(e) {
+ exception = e;
+ }
+};
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+
+function g() {
+ return "s"; // expected line
+}
+
+function testFunction() {
+ var f = g;
+ var s = 1 +f(10);
+}
+
+function g2() {
+ return "s2"; // expected line
+}
+
+function testFunction2() {
+ var f = g2;
+ var s = 1 +f(10, 20);
+}
+
+// Run three times. First time the function will be compiled lazily,
+// second time cached version will be used.
+for (var i = 0; i < 3; i++) {
+ state = 0;
+ expected_function_name = 'g';
+ expected_source_line_text = ' return "s"; // expected line';
+ step_in_count = 2;
+ // Set a break point and call to invoke the debug event listener.
+ Debug.setBreakPoint(testFunction, 1, 0);
+ testFunction();
+ assertNull(exception);
+ assertEquals(3, state);
+}
+
+// Test stepping into function call when a breakpoint is set at the place
+// of call. Use different pair of functions so that g2 is compiled lazily.
+// Run twice: first time function will be compiled lazily, second time
+// cached version will be used.
+for (var i = 0; i < 3; i++) {
+ state = 0;
+ expected_function_name = 'g2';
+ expected_source_line_text = ' return "s2"; // expected line';
+ step_in_count = 1;
+ // Set a break point and call to invoke the debug event listener.
+ Debug.setBreakPoint(testFunction2, 2, 0);
+ testFunction2();
+ assertNull(exception);
+ assertEquals(3, state);
+}
+
+
+// Get rid of the debug event listener.
+Debug.setListener(null);
diff --git a/deps/v8/test/mjsunit/debug-stepout-recursive-function.js b/deps/v8/test/mjsunit/debug-stepout-recursive-function.js
new file mode 100644
index 0000000000..2f8780c950
--- /dev/null
+++ b/deps/v8/test/mjsunit/debug-stepout-recursive-function.js
@@ -0,0 +1,106 @@
+// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+
+var exception = null;
+var step_out_count = 1;
+
+// Simple debug event handler which counts the number of breaks hit and steps.
+var break_point_hit_count = 0;
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ break_point_hit_count++;
+ // Continue stepping until returned to bottom frame.
+ if (exec_state.frameCount() > 1) {
+ exec_state.prepareStep(Debug.StepAction.StepOut, step_out_count);
+ }
+
+ }
+ } catch(e) {
+ exception = e;
+ }
+
+};
+
+function BeginTest(name) {
+ test_name = name;
+ break_point_hit_count = 0;
+ exception = null;
+}
+
+function EndTest(expected_break_point_hit_count) {
+ assertEquals(expected_break_point_hit_count, break_point_hit_count, test_name);
+ assertNull(exception, test_name);
+ test_name = null;
+}
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+
+var shouldBreak = null;
+function fact(x) {
+ if (shouldBreak(x)) {
+ debugger;
+ }
+ if (x < 2) {
+ return 1;
+ } else {
+ return x*fact(x-1);
+ }
+}
+
+BeginTest('Test 1');
+shouldBreak = function(x) { return x == 3; };
+step_out_count = 1;
+fact(3);
+EndTest(2);
+
+BeginTest('Test 2');
+shouldBreak = function(x) { return x == 2; };
+step_out_count = 1;
+fact(3);
+EndTest(3);
+
+BeginTest('Test 3');
+shouldBreak = function(x) { return x == 1; };
+step_out_count = 2;
+fact(3);
+EndTest(2);
+
+BeginTest('Test 4');
+shouldBreak = function(x) { print(x); return x == 1 || x == 3; };
+step_out_count = 2;
+fact(3);
+EndTest(3);
+
+// Get rid of the debug event listener.
+Debug.setListener(null);
diff --git a/deps/v8/test/mjsunit/debug-stepout-to-builtin.js b/deps/v8/test/mjsunit/debug-stepout-to-builtin.js
new file mode 100644
index 0000000000..486eee0e4d
--- /dev/null
+++ b/deps/v8/test/mjsunit/debug-stepout-to-builtin.js
@@ -0,0 +1,84 @@
+// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+
+var exception = null;
+var state = 1;
+var expected_source_line_text = null;
+var expected_function_name = null;
+
+// Simple debug event handler which first time will cause 'step out' action
+// and than check that execution is paused inside function
+// expected_function_name.
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ if (state == 1) {
+ exec_state.prepareStep(Debug.StepAction.StepOut, 2);
+ state = 2;
+ } else if (state == 2) {
+ assertEquals(expected_function_name, event_data.func().name());
+ assertEquals(expected_source_line_text,
+ event_data.sourceLineText());
+ state = 3;
+ }
+ }
+ } catch(e) {
+ exception = e;
+ }
+};
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+var obj = {key:10};
+
+function replacer(key, value) {
+ if (key == 'key') {
+ debugger;
+ }
+ return value;
+}
+
+// Test step into function call from a function without local variables.
+function testStepOutToBuiltIn() {
+ expected_function_name = 'testStepOutToBuiltIn';
+ expected_source_line_text = '} // expected line';
+ JSON.stringify(obj, replacer);
+} // expected line
+
+state = 1;
+testStepOutToBuiltIn();
+assertNull(exception);
+assertEquals(3, state);
+
+// Get rid of the debug event listener.
+Debug.setListener(null);
diff --git a/deps/v8/test/mjsunit/function-prototype.js b/deps/v8/test/mjsunit/function-prototype.js
index 371311e49a..c5a5487dd0 100644
--- a/deps/v8/test/mjsunit/function-prototype.js
+++ b/deps/v8/test/mjsunit/function-prototype.js
@@ -90,8 +90,9 @@ assertEquals(F.prototype, GetPrototypeOf(F));
// in GetPrototypeOf and go to a monomorphic IC load instead.
assertEquals(87, GetPrototypeOf({prototype:87}));
-// Check the prototype is enumerable as specified in ECMA262, 15.3.5.2
+// Check the prototype is not enumerable, for compatibility with
+// safari. This is deliberately incompatible with ECMA262, 15.3.5.2.
var foo = new Function("return x");
var result = ""
for (var n in foo) result += n;
-assertEquals(result, "prototype");
+assertEquals(result, "");
diff --git a/deps/v8/test/mjsunit/mjsunit.status b/deps/v8/test/mjsunit/mjsunit.status
index 6ac4938281..3b89154dc7 100644
--- a/deps/v8/test/mjsunit/mjsunit.status
+++ b/deps/v8/test/mjsunit/mjsunit.status
@@ -60,8 +60,11 @@ debug-setbreakpoint: CRASH || FAIL || PASS
debug-step-stub-callfunction: SKIP
debug-stepin-accessor: CRASH || FAIL
debug-stepin-builtin: CRASH || FAIL
+debug-stepin-call-function-stub: CRASH || FAIL
debug-stepin-constructor: CRASH, FAIL
debug-stepin-function-call: CRASH || FAIL
+debug-stepout-recursive-function: CRASH || FAIL
+debug-stepout-to-builtin: CRASH || FAIL
debug-step: SKIP
debug-breakpoints: PASS || FAIL
debug-handle: CRASH || FAIL || PASS
diff --git a/deps/v8/test/mjsunit/regress/regress-246.js b/deps/v8/test/mjsunit/regress/regress-246.js
index 4324b54041..4324b54041 100755..100644
--- a/deps/v8/test/mjsunit/regress/regress-246.js
+++ b/deps/v8/test/mjsunit/regress/regress-246.js
diff --git a/deps/v8/test/mjsunit/regress/regress-254.js b/deps/v8/test/mjsunit/regress/regress-254.js
index ec4b40ac67..ec4b40ac67 100755..100644
--- a/deps/v8/test/mjsunit/regress/regress-254.js
+++ b/deps/v8/test/mjsunit/regress/regress-254.js
diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-18639.js b/deps/v8/test/mjsunit/regress/regress-crbug-18639.js
new file mode 100644
index 0000000000..23e225a4f4
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-crbug-18639.js
@@ -0,0 +1,34 @@
+// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See http://crbug.com/18639
+
+toString = toString;
+__defineGetter__("z", (0).toLocaleString);
+z;
+z;
+((0).toLocaleString)();
diff --git a/deps/v8/test/mjsunit/testcfg.py b/deps/v8/test/mjsunit/testcfg.py
index 96840f5cf8..97924c8de0 100644
--- a/deps/v8/test/mjsunit/testcfg.py
+++ b/deps/v8/test/mjsunit/testcfg.py
@@ -31,7 +31,7 @@ from os.path import join, dirname, exists
import re
import tempfile
-
+MJSUNIT_DEBUG_FLAGS = ['--enable-slow-asserts', '--debug-code', '--verify-heap']
FLAGS_PATTERN = re.compile(r"//\s+Flags:(.*)")
FILES_PATTERN = re.compile(r"//\s+Files:(.*)")
SELF_SCRIPT_PATTERN = re.compile(r"//\s+Env: TEST_FILE_NAME")
@@ -58,6 +58,8 @@ class MjsunitTestCase(test.TestCase):
flags_match = FLAGS_PATTERN.search(source)
if flags_match:
result += flags_match.group(1).strip().split()
+ if self.mode == 'debug':
+ result += MJSUNIT_DEBUG_FLAGS
additional_files = []
files_match = FILES_PATTERN.search(source);
# Accept several lines of 'Files:'
diff --git a/deps/v8/test/mozilla/mozilla.status b/deps/v8/test/mozilla/mozilla.status
index 41395b3ed6..9793dc8318 100644
--- a/deps/v8/test/mozilla/mozilla.status
+++ b/deps/v8/test/mozilla/mozilla.status
@@ -124,6 +124,10 @@ ecma/Date/15.9.5.28-1: PASS || ($ARM && FAIL)
ecma/Array/15.4.4.5-3: PASS || ($ARM && FAIL)
ecma/Date/15.9.5.22-2: PASS || ($ARM && FAIL)
+# Flaky test that fails due to what appears to be a bug in the test.
+# Occurs depending on current time
+ecma/Date/15.9.5.8: PASS || FAIL
+
# Severely brain-damaged test. Access to local variables must not
# be more than 2.5 times faster than access to global variables? WTF?
js1_5/Regress/regress-169559: PASS || FAIL
diff --git a/deps/v8/tools/tickprocessor.js b/deps/v8/tools/tickprocessor.js
index 72b3059dca..84f0eea580 100644
--- a/deps/v8/tools/tickprocessor.js
+++ b/deps/v8/tools/tickprocessor.js
@@ -476,7 +476,7 @@ UnixCppEntriesProvider.prototype.parseNextLine = function() {
function MacCppEntriesProvider(nmExec) {
UnixCppEntriesProvider.call(this, nmExec);
// Note an empty group. It is required, as UnixCppEntriesProvider expects 3 groups.
- this.FUNC_RE = /^([0-9a-fA-F]{8}) ()[iItT] (.*)$/;
+ this.FUNC_RE = /^([0-9a-fA-F]{8,16}) ()[iItT] (.*)$/;
};
inherits(MacCppEntriesProvider, UnixCppEntriesProvider);
diff --git a/deps/v8/tools/visual_studio/arm.vsprops b/deps/v8/tools/visual_studio/arm.vsprops
index 3aa937448e..0d6a88875f 100644
--- a/deps/v8/tools/visual_studio/arm.vsprops
+++ b/deps/v8/tools/visual_studio/arm.vsprops
@@ -2,11 +2,13 @@
<VisualStudioPropertySheet
ProjectType="Visual C++"
Version="8.00"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)Arm"
+ IntermediateDirectory="$(SolutionDir)$(ConfigurationName)Arm\obj\$(ProjectName)"
Name="arm"
>
<Tool
Name="VCCLCompilerTool"
- PreprocessorDefinitions="V8_TARGET_ARCH_ARM"
+ PreprocessorDefinitions="_USE_32BIT_TIME_T;V8_TARGET_ARCH_ARM;V8_NATIVE_REGEXP"
DisableSpecificWarnings="4996"
/>
</VisualStudioPropertySheet>
diff --git a/deps/v8/tools/visual_studio/common.vsprops b/deps/v8/tools/visual_studio/common.vsprops
index d23e4fcfe8..238dd97f9c 100644
--- a/deps/v8/tools/visual_studio/common.vsprops
+++ b/deps/v8/tools/visual_studio/common.vsprops
@@ -3,8 +3,6 @@
ProjectType="Visual C++"
Version="8.00"
Name="essential"
- OutputDirectory="$(SolutionDir)$(ConfigurationName)"
- IntermediateDirectory="$(SolutionDir)$(ConfigurationName)\obj\$(ProjectName)"
CharacterSet="1"
>
<Tool
diff --git a/deps/v8/tools/visual_studio/d8_arm.vcproj b/deps/v8/tools/visual_studio/d8_arm.vcproj
new file mode 100644
index 0000000000..fbebdb35c0
--- /dev/null
+++ b/deps/v8/tools/visual_studio/d8_arm.vcproj
@@ -0,0 +1,199 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="8.00"
+ Name="d8"
+ ProjectGUID="{7E4C7D2D-A4B9-40B9-8192-22654E626F6C}"
+ RootNamespace="d8"
+ Keyword="Win32Proj"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ ConfigurationType="1"
+ InheritedPropertySheets=".\common.vsprops;.\arm.vsprops;.\debug.vsprops"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="winmm.lib Ws2_32.lib"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCWebDeploymentTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ ConfigurationType="1"
+ InheritedPropertySheets=".\common.vsprops;.\arm.vsprops;.\release.vsprops"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="winmm.lib Ws2_32.lib"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCWebDeploymentTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <File
+ RelativePath="..\..\src\d8.cc"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\d8.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\d8-debug.cc"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\d8-debug.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\d8-windows.cc"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\d8.js"
+ >
+ <FileConfiguration
+ Name="Debug|Win32"
+ >
+ <Tool
+ Name="VCCustomBuildTool"
+ Description="Processing js files..."
+ CommandLine=".\d8js2c.cmd ..\..\src &quot;$(IntDir)\DerivedSources&quot;"
+ Outputs="$(IntDir)\DerivedSources\natives.cc;$(IntDir)\DerivedSources\natives-empty.cc"
+ />
+ </FileConfiguration>
+ <FileConfiguration
+ Name="Release|Win32"
+ >
+ <Tool
+ Name="VCCustomBuildTool"
+ Description="Processing js files..."
+ CommandLine=".\d8js2c.cmd ..\..\src &quot;$(IntDir)\DerivedSources&quot;"
+ Outputs="$(IntDir)\DerivedSources\natives.cc;$(IntDir)\DerivedSources\natives-empty.cc"
+ />
+ </FileConfiguration>
+ </File>
+ <Filter
+ Name="generated files"
+ >
+ <File
+ RelativePath="$(IntDir)\DerivedSources\natives.cc"
+ >
+ </File>
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/deps/v8/tools/visual_studio/ia32.vsprops b/deps/v8/tools/visual_studio/ia32.vsprops
index f48e808d84..0399bbbe6e 100644
--- a/deps/v8/tools/visual_studio/ia32.vsprops
+++ b/deps/v8/tools/visual_studio/ia32.vsprops
@@ -2,6 +2,8 @@
<VisualStudioPropertySheet
ProjectType="Visual C++"
Version="8.00"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(SolutionDir)$(ConfigurationName)\obj\$(ProjectName)"
Name="ia32"
>
<Tool
diff --git a/deps/v8/tools/visual_studio/v8_arm.sln b/deps/v8/tools/visual_studio/v8_arm.sln
index 2dc6cf5bfd..069ff32f67 100644
--- a/deps/v8/tools/visual_studio/v8_arm.sln
+++ b/deps/v8/tools/visual_studio/v8_arm.sln
@@ -1,11 +1,11 @@
Microsoft Visual Studio Solution File, Format Version 9.00
# Visual Studio 2005
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "v8", "v8.vcproj", "{21E22961-22BF-4493-BD3A-868F93DA5179}"
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "v8", "v8_arm.vcproj", "{21E22961-22BF-4493-BD3A-868F93DA5179}"
ProjectSection(ProjectDependencies) = postProject
{EC8B7909-62AF-470D-A75D-E1D89C837142} = {EC8B7909-62AF-470D-A75D-E1D89C837142}
EndProjectSection
EndProject
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "v8_shell_sample", "v8_shell_sample.vcproj", "{2DE20FFA-6F5E-48D9-84D8-09B044A5B119}"
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "v8_shell_sample", "v8_shell_sample_arm.vcproj", "{2DE20FFA-6F5E-48D9-84D8-09B044A5B119}"
ProjectSection(ProjectDependencies) = postProject
{EC8B7909-62AF-470D-A75D-E1D89C837142} = {EC8B7909-62AF-470D-A75D-E1D89C837142}
{21E22961-22BF-4493-BD3A-868F93DA5179} = {21E22961-22BF-4493-BD3A-868F93DA5179}
@@ -13,14 +13,14 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "v8_shell_sample", "v8_shell
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "samples", "samples", "{E131F77D-B713-48F3-B86D-097ECDCC4C3A}"
EndProject
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "v8_process_sample", "v8_process_sample.vcproj", "{EF019874-D38A-40E3-B17C-DB5923F0A79C}"
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "v8_process_sample", "v8_process_sample_arm.vcproj", "{EF019874-D38A-40E3-B17C-DB5923F0A79C}"
ProjectSection(ProjectDependencies) = postProject
{21E22961-22BF-4493-BD3A-868F93DA5179} = {21E22961-22BF-4493-BD3A-868F93DA5179}
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{AD933CE2-1303-448E-89C8-60B1FDD18EC3}"
EndProject
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "d8", "d8.vcproj", "{7E4C7D2D-A4B9-40B9-8192-22654E626F6C}"
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "d8", "d8_arm.vcproj", "{7E4C7D2D-A4B9-40B9-8192-22654E626F6C}"
ProjectSection(ProjectDependencies) = postProject
{21E22961-22BF-4493-BD3A-868F93DA5179} = {21E22961-22BF-4493-BD3A-868F93DA5179}
EndProjectSection
diff --git a/deps/v8/tools/visual_studio/v8_arm.vcproj b/deps/v8/tools/visual_studio/v8_arm.vcproj
new file mode 100644
index 0000000000..f8cbcc4c24
--- /dev/null
+++ b/deps/v8/tools/visual_studio/v8_arm.vcproj
@@ -0,0 +1,223 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="8.00"
+ Name="v8"
+ ProjectGUID="{21E22961-22BF-4493-BD3A-868F93DA5179}"
+ RootNamespace="v8"
+ Keyword="Win32Proj"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ ConfigurationType="4"
+ InheritedPropertySheets=".\common.vsprops;.\arm.vsprops;.\debug.vsprops"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLibrarianTool"
+ LinkLibraryDependencies="true"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ ConfigurationType="4"
+ InheritedPropertySheets=".\common.vsprops;.\arm.vsprops;.\release.vsprops"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLibrarianTool"
+ LinkLibraryDependencies="true"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="js"
+ >
+ <File
+ RelativePath="..\..\src\apinatives.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\array.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\date-delay.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\debug-delay.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\macros.py"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\math.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\messages.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\mirror-delay.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\regexp-delay.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\json-delay.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\runtime.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\string.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\uri.js"
+ >
+ </File>
+ <File
+ RelativePath="..\..\src\v8natives.js"
+ >
+ <FileConfiguration
+ Name="Debug|Win32"
+ >
+ <Tool
+ Name="VCCustomBuildTool"
+ Description="Processing js files..."
+ CommandLine=".\js2c.cmd ..\..\src &quot;$(IntDir)\DerivedSources&quot;"
+ AdditionalDependencies="..\..\src\macros.py;..\..\src\runtime.js;..\..\src\v8natives.js;..\..\src\array.js;..\..\src\string.js;..\..\src\uri.js;..\..\src\math.js;..\..\src\messages.js;..\..\src\apinatives.js;..\..\src\debug-delay.js;..\..\src\mirror-delay.js;..\..\src\date-delay.js;..\..\src\regexp-delay.js;..\..\src\json-delay.js"
+ Outputs="$(IntDir)\DerivedSources\natives.cc;$(IntDir)\DerivedSources\natives-empty.cc"
+ />
+ </FileConfiguration>
+ <FileConfiguration
+ Name="Release|Win32"
+ >
+ <Tool
+ Name="VCCustomBuildTool"
+ Description="Processing js files..."
+ CommandLine=".\js2c.cmd ..\..\src &quot;$(IntDir)\DerivedSources&quot;"
+ AdditionalDependencies="..\..\src\macros.py;..\..\src\runtime.js;..\..\src\v8natives.js;..\..\src\array.js;..\..\src\string.js;..\..\src\uri.js;..\..\src\math.js;..\..\src\messages.js;..\..\src\apinatives.js;..\..\src\debug-delay.js;..\..\src\mirror-delay.js;..\..\src\date-delay.js;..\..\src\regexp-delay.js;..\..\src\json-delay.js"
+ Outputs="$(IntDir)\DerivedSources\natives.cc;$(IntDir)\DerivedSources\natives-empty.cc"
+ />
+ </FileConfiguration>
+ </File>
+ </Filter>
+ <Filter
+ Name="generated files"
+ >
+ <File
+ RelativePath="$(IntDir)\DerivedSources\natives.cc"
+ >
+ </File>
+ </Filter>
+ <File
+ RelativePath="..\..\src\snapshot-empty.cc"
+ >
+ </File>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/deps/v8/tools/visual_studio/v8_base_arm.vcproj b/deps/v8/tools/visual_studio/v8_base_arm.vcproj
index 8fe54afb39..ee8e339920 100644
--- a/deps/v8/tools/visual_studio/v8_base_arm.vcproj
+++ b/deps/v8/tools/visual_studio/v8_base_arm.vcproj
@@ -313,6 +313,10 @@
>
</File>
<File
+ RelativePath="..\..\src\arm\constants-arm.cc"
+ >
+ </File>
+ <File
RelativePath="..\..\src\arm\constants-arm.h"
>
</File>
diff --git a/deps/v8/tools/visual_studio/v8_cctest_arm.vcproj b/deps/v8/tools/visual_studio/v8_cctest_arm.vcproj
index a027a84145..bd49f3b062 100644
--- a/deps/v8/tools/visual_studio/v8_cctest_arm.vcproj
+++ b/deps/v8/tools/visual_studio/v8_cctest_arm.vcproj
@@ -49,6 +49,7 @@
/>
<Tool
Name="VCLinkerTool"
+ AdditionalDependencies="winmm.lib Ws2_32.lib"
/>
<Tool
Name="VCALinkTool"
@@ -109,6 +110,7 @@
/>
<Tool
Name="VCLinkerTool"
+ AdditionalDependencies="winmm.lib Ws2_32.lib"
/>
<Tool
Name="VCALinkTool"
diff --git a/deps/v8/tools/visual_studio/v8_process_sample_arm.vcproj b/deps/v8/tools/visual_studio/v8_process_sample_arm.vcproj
new file mode 100644
index 0000000000..7320231cf0
--- /dev/null
+++ b/deps/v8/tools/visual_studio/v8_process_sample_arm.vcproj
@@ -0,0 +1,151 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="8.00"
+ Name="v8_process_sample"
+ ProjectGUID="{EF019874-D38A-40E3-B17C-DB5923F0A79C}"
+ RootNamespace="v8_process_sample"
+ Keyword="Win32Proj"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ ConfigurationType="1"
+ InheritedPropertySheets=".\common.vsprops;.\arm.vsprops;.\debug.vsprops"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="winmm.lib Ws2_32.lib"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCWebDeploymentTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ ConfigurationType="1"
+ InheritedPropertySheets=".\common.vsprops;.\arm.vsprops;.\release.vsprops"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="winmm.lib Ws2_32.lib"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCWebDeploymentTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <File
+ RelativePath="..\..\samples\process.cc"
+ >
+ </File>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/deps/v8/tools/visual_studio/v8_shell_sample_arm.vcproj b/deps/v8/tools/visual_studio/v8_shell_sample_arm.vcproj
new file mode 100644
index 0000000000..ba7e0e0554
--- /dev/null
+++ b/deps/v8/tools/visual_studio/v8_shell_sample_arm.vcproj
@@ -0,0 +1,151 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="8.00"
+ Name="v8_shell_sample"
+ ProjectGUID="{2DE20FFA-6F5E-48D9-84D8-09B044A5B119}"
+ RootNamespace="v8_shell_sample"
+ Keyword="Win32Proj"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ ConfigurationType="1"
+ InheritedPropertySheets=".\common.vsprops;.\arm.vsprops;.\debug.vsprops"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="winmm.lib Ws2_32.lib"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCWebDeploymentTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ ConfigurationType="1"
+ InheritedPropertySheets=".\common.vsprops;.\arm.vsprops;.\release.vsprops"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="winmm.lib Ws2_32.lib"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCWebDeploymentTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <File
+ RelativePath="..\..\samples\shell.cc"
+ >
+ </File>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/deps/v8/tools/visual_studio/x64.vsprops b/deps/v8/tools/visual_studio/x64.vsprops
index af0e47c4b6..7587acfe95 100644
--- a/deps/v8/tools/visual_studio/x64.vsprops
+++ b/deps/v8/tools/visual_studio/x64.vsprops
@@ -2,6 +2,8 @@
<VisualStudioPropertySheet
ProjectType="Visual C++"
Version="8.00"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)64"
+ IntermediateDirectory="$(SolutionDir)$(ConfigurationName)64\obj\$(ProjectName)"
Name="x64"
>
<Tool