diff options
author | Ryan Dahl <ry@tinyclouds.org> | 2010-08-12 10:11:03 -0700 |
---|---|---|
committer | Ryan Dahl <ry@tinyclouds.org> | 2010-08-12 10:11:03 -0700 |
commit | 083ee0f8b7a8d98d51bf2debf47d68117f13087f (patch) | |
tree | a82e78989562eaf41da7963fe0c80d8f1af94c6d | |
parent | 9acd76ed6e020fa540f64eabf4bdb33513795924 (diff) | |
download | node-new-083ee0f8b7a8d98d51bf2debf47d68117f13087f.tar.gz |
Upgrade V8 to 2.3.7
71 files changed, 3528 insertions, 2463 deletions
diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS index 4d87db3be0..65b8965f1a 100644 --- a/deps/v8/AUTHORS +++ b/deps/v8/AUTHORS @@ -29,4 +29,5 @@ Rodolph Perfetta <rodolph.perfetta@arm.com> Ryan Dahl <coldredlemur@gmail.com> Subrato K De <subratokde@codeaurora.org> Burcu Dogan <burcujdogan@gmail.com> +Vlad Burlik <vladbph@gmail.com> diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog index b5663744d3..caa63feba1 100644 --- a/deps/v8/ChangeLog +++ b/deps/v8/ChangeLog @@ -1,11 +1,33 @@ +2010-08-11: Version 2.3.7 + + Reduced size of heap snapshots produced by heap profiler (issue 783). + + Introduced v8::Value::IsRegExp method. + + Fixed CPU profiler crash in start / stop sequence when non-existent + name is passed (issue http://crbug.com/51594). + + Introduced new indexed property query callbacks API (issue 816). This + API is guarded by USE_NEW_QUERY_CALLBACK define and is disabled + by default. + + Removed support for object literal get/set with number/string + property name. + + Fixed handling of JSObject::elements in CalculateNetworkSize + (issue 822). + + Allow compiling with strict aliasing enabled on GCC 4.4 (issue 463). + + 2010-08-09: Version 2.3.6 - RegExp literals create a new object every time they are evaluated - (issue 704). + RegExp literals create a new object every time they are evaluated + (issue 704). - Object.seal and Object.freeze return the modified object (issue 809). + Object.seal and Object.freeze return the modified object (issue 809). - Fix building using GCC 4.4.4. + Fix building using GCC 4.4.4. 2010-08-04: Version 2.3.5 diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct index 00b8fb7219..8fc192637c 100644 --- a/deps/v8/SConstruct +++ b/deps/v8/SConstruct @@ -58,7 +58,7 @@ else: # on linux we need these compiler flags to avoid crashes in the v8 test suite # and avoid dtoa.c strict aliasing issues if os.environ.get('GCC_VERSION') == '44': - GCC_EXTRA_CCFLAGS = ['-fno-tree-vrp', '-fno-strict-aliasing'] + GCC_EXTRA_CCFLAGS = ['-fno-tree-vrp'] GCC_DTOA_EXTRA_CCFLAGS = [] else: GCC_EXTRA_CCFLAGS = [] @@ -80,7 +80,6 @@ ANDROID_FLAGS = ['-march=armv7-a', '-frerun-cse-after-loop', '-frename-registers', '-fomit-frame-pointer', - '-fno-strict-aliasing', '-finline-limit=64', '-DCAN_USE_VFP_INSTRUCTIONS=1', '-DCAN_USE_ARMV7_INSTRUCTIONS=1', diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h index c99eb0d9f8..9e3cb873c6 100644 --- a/deps/v8/include/v8-profiler.h +++ b/deps/v8/include/v8-profiler.h @@ -194,10 +194,10 @@ class HeapGraphNode; class V8EXPORT HeapGraphEdge { public: enum Type { - CONTEXT_VARIABLE = 0, // A variable from a function context. - ELEMENT = 1, // An element of an array. - PROPERTY = 2, // A named object property. - INTERNAL = 3 // A link that can't be accessed from JS, + kContextVariable = 0, // A variable from a function context. + kElement = 1, // An element of an array. + kProperty = 2, // A named object property. + kInternal = 3 // A link that can't be accessed from JS, // thus, its name isn't a real property name. }; @@ -240,12 +240,12 @@ class V8EXPORT HeapGraphPath { class V8EXPORT HeapGraphNode { public: enum Type { - INTERNAL = 0, // Internal node, a virtual one, for housekeeping. - ARRAY = 1, // An array of elements. - STRING = 2, // A string. - OBJECT = 3, // A JS object (except for arrays and strings). - CODE = 4, // Compiled code. - CLOSURE = 5 // Function closure. + kInternal = 0, // Internal node, a virtual one, for housekeeping. + kArray = 1, // An array of elements. + kString = 2, // A string. + kObject = 3, // A JS object (except for arrays and strings). + kCode = 4, // Compiled code. + kClosure = 5 // Function closure. }; /** Returns node type (see HeapGraphNode::Type). */ @@ -268,13 +268,15 @@ class V8EXPORT HeapGraphNode { int GetSelfSize() const; /** Returns node's network (self + reachable nodes) size, in bytes. */ - int GetTotalSize() const; + int GetReachableSize() const; /** - * Returns node's private size, in bytes. That is, the size of memory - * that will be reclaimed having this node collected. + * Returns node's retained size, in bytes. That is, self + sizes of + * the objects that are reachable only from this object. In other + * words, the size of memory that will be reclaimed having this node + * collected. */ - int GetPrivateSize() const; + int GetRetainedSize() const; /** Returns child nodes count of the node. */ int GetChildrenCount() const; diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h index 3ac10ab917..20cef79a7f 100644 --- a/deps/v8/include/v8.h +++ b/deps/v8/include/v8.h @@ -919,6 +919,11 @@ class Value : public Data { */ V8EXPORT bool IsDate() const; + /** + * Returns true if this value is a RegExp. + */ + V8EXPORT bool IsRegExp() const; + V8EXPORT Local<Boolean> ToBoolean() const; V8EXPORT Local<Number> ToNumber() const; V8EXPORT Local<String> ToString() const; @@ -1819,10 +1824,19 @@ typedef Handle<Value> (*IndexedPropertySetter)(uint32_t index, /** * Returns a non-empty handle if the interceptor intercepts the request. - * The result is true if the property exists and false otherwise. + * The result is true if either a boolean (true if property exists and false + * otherwise) or an integer encoding property attributes. */ +#ifdef USE_NEW_QUERY_CALLBACKS +typedef Handle<Integer> (*IndexedPropertyQuery)(uint32_t index, + const AccessorInfo& info); +#else typedef Handle<Boolean> (*IndexedPropertyQuery)(uint32_t index, const AccessorInfo& info); +#endif + +typedef Handle<Value> (*IndexedPropertyQueryImpl)(uint32_t index, + const AccessorInfo& info); /** * Returns a non-empty handle if the deleter intercepts the request. @@ -2040,7 +2054,23 @@ class V8EXPORT FunctionTemplate : public Template { IndexedPropertyQuery query, IndexedPropertyDeleter remover, IndexedPropertyEnumerator enumerator, - Handle<Value> data); + Handle<Value> data) { + IndexedPropertyQueryImpl casted = + reinterpret_cast<IndexedPropertyQueryImpl>(query); + SetIndexedInstancePropertyHandlerImpl(getter, + setter, + casted, + remover, + enumerator, + data); + } + void SetIndexedInstancePropertyHandlerImpl( + IndexedPropertyGetter getter, + IndexedPropertySetter setter, + IndexedPropertyQueryImpl query, + IndexedPropertyDeleter remover, + IndexedPropertyEnumerator enumerator, + Handle<Value> data); void SetInstanceCallAsFunctionHandler(InvocationCallback callback, Handle<Value> data); @@ -2139,7 +2169,25 @@ class V8EXPORT ObjectTemplate : public Template { IndexedPropertyQuery query = 0, IndexedPropertyDeleter deleter = 0, IndexedPropertyEnumerator enumerator = 0, - Handle<Value> data = Handle<Value>()); + Handle<Value> data = Handle<Value>()) { + IndexedPropertyQueryImpl casted = + reinterpret_cast<IndexedPropertyQueryImpl>(query); + SetIndexedPropertyHandlerImpl(getter, + setter, + casted, + deleter, + enumerator, + data); + } + private: + void SetIndexedPropertyHandlerImpl(IndexedPropertyGetter getter, + IndexedPropertySetter setter, + IndexedPropertyQueryImpl query, + IndexedPropertyDeleter deleter, + IndexedPropertyEnumerator enumerator, + Handle<Value> data); + public: + /** * Sets the callback to be used when calling instances created from * this template as a function. If no callback is set, instances diff --git a/deps/v8/src/accessors.cc b/deps/v8/src/accessors.cc index ed0bbd7a1c..3c49846601 100644 --- a/deps/v8/src/accessors.cc +++ b/deps/v8/src/accessors.cc @@ -488,7 +488,7 @@ Object* Accessors::FunctionGetLength(Object* object, void*) { JSFunction* function = FindInPrototypeChain<JSFunction>(object, &found_it); if (!found_it) return Smi::FromInt(0); // Check if already compiled. - if (!function->is_compiled()) { + if (!function->shared()->is_compiled()) { // If the function isn't compiled yet, the length is not computed // correctly yet. Compile it now and return the right length. HandleScope scope; diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc index 4fdc95f5ee..b3164dd01e 100644 --- a/deps/v8/src/api.cc +++ b/deps/v8/src/api.cc @@ -886,10 +886,10 @@ void FunctionTemplate::SetNamedInstancePropertyHandler( } -void FunctionTemplate::SetIndexedInstancePropertyHandler( +void FunctionTemplate::SetIndexedInstancePropertyHandlerImpl( IndexedPropertyGetter getter, IndexedPropertySetter setter, - IndexedPropertyQuery query, + IndexedPropertyQueryImpl query, IndexedPropertyDeleter remover, IndexedPropertyEnumerator enumerator, Handle<Value> data) { @@ -1054,10 +1054,10 @@ void ObjectTemplate::SetAccessCheckCallbacks( } -void ObjectTemplate::SetIndexedPropertyHandler( +void ObjectTemplate::SetIndexedPropertyHandlerImpl( IndexedPropertyGetter getter, IndexedPropertySetter setter, - IndexedPropertyQuery query, + IndexedPropertyQueryImpl query, IndexedPropertyDeleter remover, IndexedPropertyEnumerator enumerator, Handle<Value> data) { @@ -1068,12 +1068,12 @@ void ObjectTemplate::SetIndexedPropertyHandler( i::FunctionTemplateInfo* constructor = i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor()); i::Handle<i::FunctionTemplateInfo> cons(constructor); - Utils::ToLocal(cons)->SetIndexedInstancePropertyHandler(getter, - setter, - query, - remover, - enumerator, - data); + Utils::ToLocal(cons)->SetIndexedInstancePropertyHandlerImpl(getter, + setter, + query, + remover, + enumerator, + data); } @@ -1792,6 +1792,13 @@ bool Value::IsDate() const { } +bool Value::IsRegExp() const { + if (IsDeadCheck("v8::Value::IsRegExp()")) return false; + i::Handle<i::Object> obj = Utils::OpenHandle(this); + return obj->IsJSRegExp(); +} + + Local<String> Value::ToString() const { if (IsDeadCheck("v8::Value::ToString()")) return Local<String>(); LOG_API("ToString"); @@ -4491,24 +4498,27 @@ const CpuProfile* CpuProfiler::StopProfiling(Handle<String> title, } +static i::HeapGraphEdge* ToInternal(const HeapGraphEdge* edge) { + return const_cast<i::HeapGraphEdge*>( + reinterpret_cast<const i::HeapGraphEdge*>(edge)); +} + HeapGraphEdge::Type HeapGraphEdge::GetType() const { IsDeadCheck("v8::HeapGraphEdge::GetType"); - return static_cast<HeapGraphEdge::Type>( - reinterpret_cast<const i::HeapGraphEdge*>(this)->type()); + return static_cast<HeapGraphEdge::Type>(ToInternal(this)->type()); } Handle<Value> HeapGraphEdge::GetName() const { IsDeadCheck("v8::HeapGraphEdge::GetName"); - const i::HeapGraphEdge* edge = - reinterpret_cast<const i::HeapGraphEdge*>(this); + i::HeapGraphEdge* edge = ToInternal(this); switch (edge->type()) { - case i::HeapGraphEdge::CONTEXT_VARIABLE: - case i::HeapGraphEdge::INTERNAL: - case i::HeapGraphEdge::PROPERTY: + case i::HeapGraphEdge::kContextVariable: + case i::HeapGraphEdge::kInternal: + case i::HeapGraphEdge::kProperty: return Handle<String>(ToApi<String>(i::Factory::LookupAsciiSymbol( edge->name()))); - case i::HeapGraphEdge::ELEMENT: + case i::HeapGraphEdge::kElement: return Handle<Number>(ToApi<Number>(i::Factory::NewNumberFromInt( edge->index()))); default: UNREACHABLE(); @@ -4519,28 +4529,32 @@ Handle<Value> HeapGraphEdge::GetName() const { const HeapGraphNode* HeapGraphEdge::GetFromNode() const { IsDeadCheck("v8::HeapGraphEdge::GetFromNode"); - const i::HeapEntry* from = - reinterpret_cast<const i::HeapGraphEdge*>(this)->from(); + const i::HeapEntry* from = ToInternal(this)->From(); return reinterpret_cast<const HeapGraphNode*>(from); } const HeapGraphNode* HeapGraphEdge::GetToNode() const { IsDeadCheck("v8::HeapGraphEdge::GetToNode"); - const i::HeapEntry* to = - reinterpret_cast<const i::HeapGraphEdge*>(this)->to(); + const i::HeapEntry* to = ToInternal(this)->to(); return reinterpret_cast<const HeapGraphNode*>(to); } +static i::HeapGraphPath* ToInternal(const HeapGraphPath* path) { + return const_cast<i::HeapGraphPath*>( + reinterpret_cast<const i::HeapGraphPath*>(path)); +} + + int HeapGraphPath::GetEdgesCount() const { - return reinterpret_cast<const i::HeapGraphPath*>(this)->path()->length(); + return ToInternal(this)->path()->length(); } const HeapGraphEdge* HeapGraphPath::GetEdge(int index) const { return reinterpret_cast<const HeapGraphEdge*>( - reinterpret_cast<const i::HeapGraphPath*>(this)->path()->at(index)); + ToInternal(this)->path()->at(index)); } @@ -4555,137 +4569,136 @@ const HeapGraphNode* HeapGraphPath::GetToNode() const { } +static i::HeapEntry* ToInternal(const HeapGraphNode* entry) { + return const_cast<i::HeapEntry*>( + reinterpret_cast<const i::HeapEntry*>(entry)); +} + + HeapGraphNode::Type HeapGraphNode::GetType() const { IsDeadCheck("v8::HeapGraphNode::GetType"); - return static_cast<HeapGraphNode::Type>( - reinterpret_cast<const i::HeapEntry*>(this)->type()); + return static_cast<HeapGraphNode::Type>(ToInternal(this)->type()); } Handle<String> HeapGraphNode::GetName() const { IsDeadCheck("v8::HeapGraphNode::GetName"); return Handle<String>(ToApi<String>(i::Factory::LookupAsciiSymbol( - reinterpret_cast<const i::HeapEntry*>(this)->name()))); + ToInternal(this)->name()))); } uint64_t HeapGraphNode::GetId() const { IsDeadCheck("v8::HeapGraphNode::GetId"); - return reinterpret_cast<const i::HeapEntry*>(this)->id(); + return ToInternal(this)->id(); } int HeapGraphNode::GetSelfSize() const { IsDeadCheck("v8::HeapGraphNode::GetSelfSize"); - return reinterpret_cast<const i::HeapEntry*>(this)->self_size(); + return ToInternal(this)->self_size(); } -int HeapGraphNode::GetTotalSize() const { - IsDeadCheck("v8::HeapSnapshot::GetHead"); - return const_cast<i::HeapEntry*>( - reinterpret_cast<const i::HeapEntry*>(this))->TotalSize(); +int HeapGraphNode::GetReachableSize() const { + IsDeadCheck("v8::HeapSnapshot::GetReachableSize"); + return ToInternal(this)->ReachableSize(); } -int HeapGraphNode::GetPrivateSize() const { - IsDeadCheck("v8::HeapSnapshot::GetPrivateSize"); - return const_cast<i::HeapEntry*>( - reinterpret_cast<const i::HeapEntry*>(this))->NonSharedTotalSize(); +int HeapGraphNode::GetRetainedSize() const { + IsDeadCheck("v8::HeapSnapshot::GetRetainedSize"); + return ToInternal(this)->RetainedSize(); } int HeapGraphNode::GetChildrenCount() const { IsDeadCheck("v8::HeapSnapshot::GetChildrenCount"); - return reinterpret_cast<const i::HeapEntry*>(this)->children()->length(); + return ToInternal(this)->children().length(); } const HeapGraphEdge* HeapGraphNode::GetChild(int index) const { IsDeadCheck("v8::HeapSnapshot::GetChild"); return reinterpret_cast<const HeapGraphEdge*>( - reinterpret_cast<const i::HeapEntry*>(this)->children()->at(index)); + &ToInternal(this)->children()[index]); } int HeapGraphNode::GetRetainersCount() const { IsDeadCheck("v8::HeapSnapshot::GetRetainersCount"); - return reinterpret_cast<const i::HeapEntry*>(this)->retainers()->length(); + return ToInternal(this)->retainers().length(); } const HeapGraphEdge* HeapGraphNode::GetRetainer(int index) const { IsDeadCheck("v8::HeapSnapshot::GetRetainer"); return reinterpret_cast<const HeapGraphEdge*>( - reinterpret_cast<const i::HeapEntry*>(this)->retainers()->at(index)); + ToInternal(this)->retainers()[index]); } int HeapGraphNode::GetRetainingPathsCount() const { IsDeadCheck("v8::HeapSnapshot::GetRetainingPathsCount"); - return const_cast<i::HeapEntry*>( - reinterpret_cast<const i::HeapEntry*>( - this))->GetRetainingPaths()->length(); + return ToInternal(this)->GetRetainingPaths()->length(); } const HeapGraphPath* HeapGraphNode::GetRetainingPath(int index) const { IsDeadCheck("v8::HeapSnapshot::GetRetainingPath"); return reinterpret_cast<const HeapGraphPath*>( - const_cast<i::HeapEntry*>( - reinterpret_cast<const i::HeapEntry*>( - this))->GetRetainingPaths()->at(index)); + ToInternal(this)->GetRetainingPaths()->at(index)); } const HeapGraphNode* HeapSnapshotsDiff::GetAdditionsRoot() const { IsDeadCheck("v8::HeapSnapshotsDiff::GetAdditionsRoot"); - const i::HeapSnapshotsDiff* diff = - reinterpret_cast<const i::HeapSnapshotsDiff*>(this); + i::HeapSnapshotsDiff* diff = + const_cast<i::HeapSnapshotsDiff*>( + reinterpret_cast<const i::HeapSnapshotsDiff*>(this)); return reinterpret_cast<const HeapGraphNode*>(diff->additions_root()); } const HeapGraphNode* HeapSnapshotsDiff::GetDeletionsRoot() const { IsDeadCheck("v8::HeapSnapshotsDiff::GetDeletionsRoot"); - const i::HeapSnapshotsDiff* diff = - reinterpret_cast<const i::HeapSnapshotsDiff*>(this); + i::HeapSnapshotsDiff* diff = + const_cast<i::HeapSnapshotsDiff*>( + reinterpret_cast<const i::HeapSnapshotsDiff*>(this)); return reinterpret_cast<const HeapGraphNode*>(diff->deletions_root()); } +static i::HeapSnapshot* ToInternal(const HeapSnapshot* snapshot) { + return const_cast<i::HeapSnapshot*>( + reinterpret_cast<const i::HeapSnapshot*>(snapshot)); +} + + unsigned HeapSnapshot::GetUid() const { IsDeadCheck("v8::HeapSnapshot::GetUid"); - return reinterpret_cast<const i::HeapSnapshot*>(this)->uid(); + return ToInternal(this)->uid(); } Handle<String> HeapSnapshot::GetTitle() const { IsDeadCheck("v8::HeapSnapshot::GetTitle"); - const i::HeapSnapshot* snapshot = - reinterpret_cast<const i::HeapSnapshot*>(this); return Handle<String>(ToApi<String>(i::Factory::LookupAsciiSymbol( - snapshot->title()))); + ToInternal(this)->title()))); } const HeapGraphNode* HeapSnapshot::GetRoot() const { IsDeadCheck("v8::HeapSnapshot::GetHead"); - const i::HeapSnapshot* snapshot = - reinterpret_cast<const i::HeapSnapshot*>(this); - return reinterpret_cast<const HeapGraphNode*>(snapshot->const_root()); + return reinterpret_cast<const HeapGraphNode*>(ToInternal(this)->root()); } const HeapSnapshotsDiff* HeapSnapshot::CompareWith( const HeapSnapshot* snapshot) const { IsDeadCheck("v8::HeapSnapshot::CompareWith"); - i::HeapSnapshot* snapshot1 = const_cast<i::HeapSnapshot*>( - reinterpret_cast<const i::HeapSnapshot*>(this)); - i::HeapSnapshot* snapshot2 = const_cast<i::HeapSnapshot*>( - reinterpret_cast<const i::HeapSnapshot*>(snapshot)); return reinterpret_cast<const HeapSnapshotsDiff*>( - snapshot1->CompareWith(snapshot2)); + ToInternal(this)->CompareWith(ToInternal(snapshot))); } diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc index b1f29ba381..37768e8d5f 100644 --- a/deps/v8/src/arm/builtins-arm.cc +++ b/deps/v8/src/arm/builtins-arm.cc @@ -1050,7 +1050,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ ldr(r2, FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); __ mov(r2, Operand(r2, ASR, kSmiTagSize)); - __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); + __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeOffset)); __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); __ cmp(r2, r0); // Check formal and actual parameter counts. __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)), diff --git a/deps/v8/src/arm/codegen-arm.cc b/deps/v8/src/arm/codegen-arm.cc index 4bcf1a07df..aec80d7315 100644 --- a/deps/v8/src/arm/codegen-arm.cc +++ b/deps/v8/src/arm/codegen-arm.cc @@ -1532,9 +1532,8 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, __ BranchOnSmi(r0, &build_args); __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); __ b(ne, &build_args); - __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); - __ ldr(r1, FieldMemOperand(r0, SharedFunctionInfo::kCodeOffset)); + __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeOffset)); __ cmp(r1, Operand(apply_code)); __ b(ne, &build_args); @@ -4176,21 +4175,21 @@ void CodeGenerator::VisitCallNew(CallNew* node) { void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); - ASSERT(args->length() == 1); JumpTarget leave, null, function, non_function_constructor; + Register scratch = VirtualFrame::scratch0(); - // Load the object into r0. + // Load the object into register. + ASSERT(args->length() == 1); Load(args->at(0)); - frame_->EmitPop(r0); + Register tos = frame_->PopToRegister(); // If the object is a smi, we return null. - __ tst(r0, Operand(kSmiTagMask)); + __ tst(tos, Operand(kSmiTagMask)); null.Branch(eq); // Check that the object is a JS object but take special care of JS // functions to make sure they have 'Function' as their class. - __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE); + __ CompareObjectType(tos, tos, scratch, FIRST_JS_OBJECT_TYPE); null.Branch(lt); // As long as JS_FUNCTION_TYPE is the last instance type and it is @@ -4198,37 +4197,38 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { // LAST_JS_OBJECT_TYPE. STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); - __ cmp(r1, Operand(JS_FUNCTION_TYPE)); + __ cmp(scratch, Operand(JS_FUNCTION_TYPE)); function.Branch(eq); // Check if the constructor in the map is a function. - __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); - __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); + __ ldr(tos, FieldMemOperand(tos, Map::kConstructorOffset)); + __ CompareObjectType(tos, scratch, scratch, JS_FUNCTION_TYPE); non_function_constructor.Branch(ne); - // The r0 register now contains the constructor function. Grab the + // The tos register now contains the constructor function. Grab the // instance class name from there. - __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); - __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); - frame_->EmitPush(r0); + __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(tos, + FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset)); + frame_->EmitPush(tos); leave.Jump(); // Functions have class 'Function'. function.Bind(); - __ mov(r0, Operand(Factory::function_class_symbol())); - frame_->EmitPush(r0); + __ mov(tos, Operand(Factory::function_class_symbol())); + frame_->EmitPush(tos); leave.Jump(); // Objects with a non-function constructor have class 'Object'. non_function_constructor.Bind(); - __ mov(r0, Operand(Factory::Object_symbol())); - frame_->EmitPush(r0); + __ mov(tos, Operand(Factory::Object_symbol())); + frame_->EmitPush(tos); leave.Jump(); // Non-JS objects have class null. null.Bind(); - __ LoadRoot(r0, Heap::kNullValueRootIndex); - frame_->EmitPush(r0); + __ LoadRoot(tos, Heap::kNullValueRootIndex); + frame_->EmitPush(tos); // All done. leave.Bind(); @@ -4236,45 +4236,51 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); - ASSERT(args->length() == 1); + Register scratch = VirtualFrame::scratch0(); JumpTarget leave; + + ASSERT(args->length() == 1); Load(args->at(0)); - frame_->EmitPop(r0); // r0 contains object. + Register tos = frame_->PopToRegister(); // tos contains object. // if (object->IsSmi()) return the object. - __ tst(r0, Operand(kSmiTagMask)); + __ tst(tos, Operand(kSmiTagMask)); leave.Branch(eq); // It is a heap object - get map. If (!object->IsJSValue()) return the object. - __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); + __ CompareObjectType(tos, scratch, scratch, JS_VALUE_TYPE); leave.Branch(ne); // Load the value. - __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset)); + __ ldr(tos, FieldMemOperand(tos, JSValue::kValueOffset)); leave.Bind(); - frame_->EmitPush(r0); + frame_->EmitPush(tos); } void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); - ASSERT(args->length() == 2); + Register scratch1 = VirtualFrame::scratch0(); + Register scratch2 = VirtualFrame::scratch1(); JumpTarget leave; + + ASSERT(args->length() == 2); Load(args->at(0)); // Load the object. Load(args->at(1)); // Load the value. - frame_->EmitPop(r0); // r0 contains value - frame_->EmitPop(r1); // r1 contains object + Register value = frame_->PopToRegister(); + Register object = frame_->PopToRegister(value); // if (object->IsSmi()) return object. - __ tst(r1, Operand(kSmiTagMask)); + __ tst(object, Operand(kSmiTagMask)); leave.Branch(eq); // It is a heap object - get map. If (!object->IsJSValue()) return the object. - __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); + __ CompareObjectType(object, scratch1, scratch1, JS_VALUE_TYPE); leave.Branch(ne); // Store the value. - __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); + __ str(value, FieldMemOperand(object, JSValue::kValueOffset)); // Update the write barrier. - __ RecordWrite(r1, Operand(JSValue::kValueOffset - kHeapObjectTag), r2, r3); + __ RecordWrite(object, + Operand(JSValue::kValueOffset - kHeapObjectTag), + scratch1, + scratch2); // Leave. leave.Bind(); - frame_->EmitPush(r0); + frame_->EmitPush(value); } @@ -4558,22 +4564,18 @@ class DeferredStringCharCodeAt : public DeferredCode { // This generates code that performs a String.prototype.charCodeAt() call // or returns a smi in order to trigger conversion. void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); Comment(masm_, "[ GenerateStringCharCodeAt"); ASSERT(args->length() == 2); Load(args->at(0)); Load(args->at(1)); - Register index = r1; - Register object = r2; - - frame_->EmitPop(r1); - frame_->EmitPop(r2); + Register index = frame_->PopToRegister(); + Register object = frame_->PopToRegister(index); // We need two extra registers. - Register scratch = r3; - Register result = r0; + Register scratch = VirtualFrame::scratch0(); + Register result = VirtualFrame::scratch1(); DeferredStringCharCodeAt* deferred = new DeferredStringCharCodeAt(object, @@ -4608,16 +4610,13 @@ class DeferredStringCharFromCode : public DeferredCode { // Generates code for creating a one-char string from a char code. void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); Comment(masm_, "[ GenerateStringCharFromCode"); ASSERT(args->length() == 1); Load(args->at(0)); - Register code = r1; - Register result = r0; - - frame_->EmitPop(code); + Register result = frame_->GetTOSRegister(); + Register code = frame_->PopToRegister(result); DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode( code, result); @@ -4679,23 +4678,20 @@ class DeferredStringCharAt : public DeferredCode { // This generates code that performs a String.prototype.charAt() call // or returns a smi in order to trigger conversion. void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); Comment(masm_, "[ GenerateStringCharAt"); ASSERT(args->length() == 2); Load(args->at(0)); Load(args->at(1)); - Register index = r1; - Register object = r2; - - frame_->EmitPop(r1); - frame_->EmitPop(r2); + Register index = frame_->PopToRegister(); + Register object = frame_->PopToRegister(index); // We need three extra registers. - Register scratch1 = r3; - Register scratch2 = r4; - Register result = r0; + Register scratch1 = VirtualFrame::scratch0(); + Register scratch2 = VirtualFrame::scratch1(); + // Use r6 without notifying the virtual frame. + Register result = r6; DeferredStringCharAt* deferred = new DeferredStringCharAt(object, @@ -4874,13 +4870,13 @@ void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); ASSERT(args->length() == 1); // Satisfy contract with ArgumentsAccessStub: // Load the key into r1 and the formal parameters count into r0. Load(args->at(0)); - frame_->EmitPop(r1); + frame_->PopToR1(); + frame_->SpillAll(); __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters()))); // Call the shared stub to get to arguments[key]. @@ -5108,9 +5104,7 @@ class DeferredSearchCache: public DeferredCode { void DeferredSearchCache::Generate() { __ Push(cache_, key_); __ CallRuntime(Runtime::kGetFromCache, 2); - if (!dst_.is(r0)) { - __ mov(dst_, r0); - } + __ Move(dst_, r0); } @@ -5130,33 +5124,42 @@ void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { Load(args->at(1)); - VirtualFrame::SpilledScope spilled_scope(frame_); - - frame_->EmitPop(r2); + frame_->PopToR1(); + frame_->SpillAll(); + Register key = r1; // Just poped to r1 + Register result = r0; // Free, as frame has just been spilled. + Register scratch1 = VirtualFrame::scratch0(); + Register scratch2 = VirtualFrame::scratch1(); - __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX)); - __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset)); - __ ldr(r1, ContextOperand(r1, Context::JSFUNCTION_RESULT_CACHES_INDEX)); - __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(cache_id))); + __ ldr(scratch1, ContextOperand(cp, Context::GLOBAL_INDEX)); + __ ldr(scratch1, + FieldMemOperand(scratch1, GlobalObject::kGlobalContextOffset)); + __ ldr(scratch1, + ContextOperand(scratch1, Context::JSFUNCTION_RESULT_CACHES_INDEX)); + __ ldr(scratch1, + FieldMemOperand(scratch1, FixedArray::OffsetOfElementAt(cache_id))); - DeferredSearchCache* deferred = new DeferredSearchCache(r0, r1, r2); + DeferredSearchCache* deferred = + new DeferredSearchCache(result, scratch1, key); const int kFingerOffset = FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex); STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); - __ ldr(r0, FieldMemOperand(r1, kFingerOffset)); - // r0 now holds finger offset as a smi. - __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - // r3 now points to the start of fixed array elements. - __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex)); - // Note side effect of PreIndex: r3 now points to the key of the pair. - __ cmp(r2, r0); + __ ldr(result, FieldMemOperand(scratch1, kFingerOffset)); + // result now holds finger offset as a smi. + __ add(scratch2, scratch1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + // scratch2 now points to the start of fixed array elements. + __ ldr(result, + MemOperand( + scratch2, result, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex)); + // Note side effect of PreIndex: scratch2 now points to the key of the pair. + __ cmp(key, result); deferred->Branch(ne); - __ ldr(r0, MemOperand(r3, kPointerSize)); + __ ldr(result, MemOperand(scratch2, kPointerSize)); deferred->BindExit(); - frame_->EmitPush(r0); + frame_->EmitPush(result); } @@ -6851,6 +6854,11 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset)); __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); + // Initialize the code pointer in the function to be the one + // found in the shared function info object. + __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); + __ str(r3, FieldMemOperand(r0, JSFunction::kCodeOffset)); + // Return result. The argument function info has been popped already. __ Ret(); @@ -10444,11 +10452,9 @@ void StringCharCodeAtGenerator::GenerateSlow( // NumberToSmi discards numbers that are not exact integers. __ CallRuntime(Runtime::kNumberToSmi, 1); } - if (!scratch_.is(r0)) { - // Save the conversion result before the pop instructions below - // have a chance to overwrite it. - __ mov(scratch_, r0); - } + // Save the conversion result before the pop instructions below + // have a chance to overwrite it. + __ Move(scratch_, r0); __ pop(index_); __ pop(object_); // Reload the instance type. @@ -10467,9 +10473,7 @@ void StringCharCodeAtGenerator::GenerateSlow( call_helper.BeforeCall(masm); __ Push(object_, index_); __ CallRuntime(Runtime::kStringCharCodeAt, 2); - if (!result_.is(r0)) { - __ mov(result_, r0); - } + __ Move(result_, r0); call_helper.AfterCall(masm); __ jmp(&exit_); @@ -10510,9 +10514,7 @@ void StringCharFromCodeGenerator::GenerateSlow( call_helper.BeforeCall(masm); __ push(code_); __ CallRuntime(Runtime::kCharFromCode, 1); - if (!result_.is(r0)) { - __ mov(result_, r0); - } + __ Move(result_, r0); call_helper.AfterCall(masm); __ jmp(&exit_); diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc index 9c25ccde2f..7a03641a2f 100644 --- a/deps/v8/src/arm/macro-assembler-arm.cc +++ b/deps/v8/src/arm/macro-assembler-arm.cc @@ -757,7 +757,7 @@ void MacroAssembler::InvokeFunction(Register fun, SharedFunctionInfo::kFormalParameterCountOffset)); mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize)); ldr(code_reg, - MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag)); + MemOperand(r1, JSFunction::kCodeOffset - kHeapObjectTag)); add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); ParameterCount expected(expected_reg); @@ -1508,8 +1508,7 @@ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { // Make sure the code objects in the builtins object and in the // builtin function are the same. push(r1); - ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); - ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCodeOffset)); + ldr(r1, FieldMemOperand(r1, JSFunction::kCodeOffset)); cmp(r1, target); Assert(eq, "Builtin code object changed"); pop(r1); diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc index e1d4489d44..0d59505c09 100644 --- a/deps/v8/src/bootstrapper.cc +++ b/deps/v8/src/bootstrapper.cc @@ -56,7 +56,7 @@ class SourceCodeCache BASE_EMBEDDED { } void Iterate(ObjectVisitor* v) { - v->VisitPointer(BitCast<Object**, FixedArray**>(&cache_)); + v->VisitPointer(BitCast<Object**>(&cache_)); } @@ -470,6 +470,7 @@ Handle<JSFunction> Genesis::CreateEmptyFunction() { Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::EmptyFunction)); empty_function->set_code(*code); + empty_function->shared()->set_code(*code); Handle<String> source = Factory::NewStringFromAscii(CStrVector("() {}")); Handle<Script> script = Factory::NewScript(source); script->set_type(Smi::FromInt(Script::TYPE_NATIVE)); @@ -1545,6 +1546,8 @@ bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) { Handle<SharedFunctionInfo> shared = Handle<SharedFunctionInfo>(function->shared()); if (!EnsureCompiled(shared, CLEAR_EXCEPTION)) return false; + // Set the code object on the function object. + function->set_code(function->shared()->code()); builtins->set_javascript_builtin_code(id, shared->code()); } return true; diff --git a/deps/v8/src/checks.h b/deps/v8/src/checks.h index 13374d86dd..5ea59920ac 100644 --- a/deps/v8/src/checks.h +++ b/deps/v8/src/checks.h @@ -280,14 +280,13 @@ template <int> class StaticAssertionHelper { }; // The ASSERT macro is equivalent to CHECK except that it only -// generates code in debug builds. Ditto STATIC_ASSERT. +// generates code in debug builds. #ifdef DEBUG #define ASSERT_RESULT(expr) CHECK(expr) #define ASSERT(condition) CHECK(condition) #define ASSERT_EQ(v1, v2) CHECK_EQ(v1, v2) #define ASSERT_NE(v1, v2) CHECK_NE(v1, v2) #define ASSERT_GE(v1, v2) CHECK_GE(v1, v2) -#define STATIC_ASSERT(test) STATIC_CHECK(test) #define SLOW_ASSERT(condition) if (FLAG_enable_slow_asserts) CHECK(condition) #else #define ASSERT_RESULT(expr) (expr) @@ -295,9 +294,14 @@ template <int> class StaticAssertionHelper { }; #define ASSERT_EQ(v1, v2) ((void) 0) #define ASSERT_NE(v1, v2) ((void) 0) #define ASSERT_GE(v1, v2) ((void) 0) -#define STATIC_ASSERT(test) ((void) 0) #define SLOW_ASSERT(condition) ((void) 0) #endif +// Static asserts has no impact on runtime performance, so they can be +// safely enabled in release mode. Moreover, the ((void) 0) expression +// obeys different syntax rules than typedef's, e.g. it can't appear +// inside class declaration, this leads to inconsistency between debug +// and release compilation modes behaviour. +#define STATIC_ASSERT(test) STATIC_CHECK(test) #define ASSERT_TAG_ALIGNED(address) \ diff --git a/deps/v8/src/codegen.cc b/deps/v8/src/codegen.cc index 444698c536..a9fab43f39 100644 --- a/deps/v8/src/codegen.cc +++ b/deps/v8/src/codegen.cc @@ -77,14 +77,23 @@ void CodeGenerator::ProcessDeferred() { // Generate the code. Comment cmnt(masm_, code->comment()); masm_->bind(code->entry_label()); - code->SaveRegisters(); + if (code->AutoSaveAndRestore()) { + code->SaveRegisters(); + } code->Generate(); - code->RestoreRegisters(); - masm_->jmp(code->exit_label()); + if (code->AutoSaveAndRestore()) { + code->RestoreRegisters(); + code->Exit(); + } } } +void DeferredCode::Exit() { + masm_->jmp(exit_label()); +} + + void CodeGenerator::SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers) { RegisterFile saved_counts; diff --git a/deps/v8/src/codegen.h b/deps/v8/src/codegen.h index 2a6ad6435b..588468f31a 100644 --- a/deps/v8/src/codegen.h +++ b/deps/v8/src/codegen.h @@ -319,6 +319,15 @@ class DeferredCode: public ZoneObject { void SaveRegisters(); void RestoreRegisters(); + void Exit(); + + // If this returns true then all registers will be saved for the duration + // of the Generate() call. Otherwise the registers are not saved and the + // Generate() call must bracket runtime any runtime calls with calls to + // SaveRegisters() and RestoreRegisters(). In this case the Generate + // method must also call Exit() in order to return to the non-deferred + // code. + virtual bool AutoSaveAndRestore() { return true; } protected: MacroAssembler* masm_; diff --git a/deps/v8/src/cpu-profiler.cc b/deps/v8/src/cpu-profiler.cc index c8d29f8cf5..3e554ccebd 100644 --- a/deps/v8/src/cpu-profiler.cc +++ b/deps/v8/src/cpu-profiler.cc @@ -476,7 +476,7 @@ void CpuProfiler::StartProcessorIfNotStarted() { CpuProfile* CpuProfiler::StopCollectingProfile(const char* title) { const double actual_sampling_rate = generator_->actual_sampling_rate(); - StopProcessorIfLastProfile(); + StopProcessorIfLastProfile(title); CpuProfile* result = profiles_->StopProfiling(TokenEnumerator::kNoSecurityToken, title, @@ -491,14 +491,15 @@ CpuProfile* CpuProfiler::StopCollectingProfile(const char* title) { CpuProfile* CpuProfiler::StopCollectingProfile(Object* security_token, String* title) { const double actual_sampling_rate = generator_->actual_sampling_rate(); - StopProcessorIfLastProfile(); + const char* profile_title = profiles_->GetName(title); + StopProcessorIfLastProfile(profile_title); int token = token_enumerator_->GetTokenId(security_token); - return profiles_->StopProfiling(token, title, actual_sampling_rate); + return profiles_->StopProfiling(token, profile_title, actual_sampling_rate); } -void CpuProfiler::StopProcessorIfLastProfile() { - if (profiles_->is_last_profile()) { +void CpuProfiler::StopProcessorIfLastProfile(const char* title) { + if (profiles_->IsLastProfile(title)) { reinterpret_cast<Sampler*>(Logger::ticker_)->Stop(); processor_->Stop(); processor_->Join(); diff --git a/deps/v8/src/cpu-profiler.h b/deps/v8/src/cpu-profiler.h index 03b8176485..4d5559e4fe 100644 --- a/deps/v8/src/cpu-profiler.h +++ b/deps/v8/src/cpu-profiler.h @@ -260,7 +260,7 @@ class CpuProfiler { void StartProcessorIfNotStarted(); CpuProfile* StopCollectingProfile(const char* title); CpuProfile* StopCollectingProfile(Object* security_token, String* title); - void StopProcessorIfLastProfile(); + void StopProcessorIfLastProfile(const char* title); CpuProfilesCollection* profiles_; unsigned next_profile_uid_; diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc index 5d386cc04b..dbf9df9fda 100644 --- a/deps/v8/src/debug.cc +++ b/deps/v8/src/debug.cc @@ -852,8 +852,8 @@ void Debug::PreemptionWhileInDebugger() { void Debug::Iterate(ObjectVisitor* v) { - v->VisitPointer(BitCast<Object**, Code**>(&(debug_break_return_))); - v->VisitPointer(BitCast<Object**, Code**>(&(debug_break_slot_))); + v->VisitPointer(BitCast<Object**>(&(debug_break_return_))); + v->VisitPointer(BitCast<Object**>(&(debug_break_slot_))); } diff --git a/deps/v8/src/factory.cc b/deps/v8/src/factory.cc index d65338385e..14042e85c4 100644 --- a/deps/v8/src/factory.cc +++ b/deps/v8/src/factory.cc @@ -486,6 +486,10 @@ Handle<JSFunction> Factory::NewFunction(Handle<String> name, bool force_initial_map) { // Allocate the function Handle<JSFunction> function = NewFunction(name, the_hole_value()); + + // Setup the code pointer in both the shared function info and in + // the function itself. + function->shared()->set_code(*code); function->set_code(*code); if (force_initial_map || @@ -511,9 +515,12 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name, Handle<JSObject> prototype, Handle<Code> code, bool force_initial_map) { - // Allocate the function + // Allocate the function. Handle<JSFunction> function = NewFunction(name, prototype); + // Setup the code pointer in both the shared function info and in + // the function itself. + function->shared()->set_code(*code); function->set_code(*code); if (force_initial_map || @@ -535,6 +542,7 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name, Handle<JSFunction> Factory::NewFunctionWithoutPrototype(Handle<String> name, Handle<Code> code) { Handle<JSFunction> function = NewFunctionWithoutPrototype(name); + function->shared()->set_code(*code); function->set_code(*code); ASSERT(!function->has_initial_map()); ASSERT(!function->has_prototype()); diff --git a/deps/v8/src/factory.h b/deps/v8/src/factory.h index 22511121cb..c014986f4e 100644 --- a/deps/v8/src/factory.h +++ b/deps/v8/src/factory.h @@ -329,7 +329,7 @@ class Factory : public AllStatic { #define ROOT_ACCESSOR(type, name, camel_name) \ static inline Handle<type> name() { \ - return Handle<type>(BitCast<type**, Object**>( \ + return Handle<type>(BitCast<type**>( \ &Heap::roots_[Heap::k##camel_name##RootIndex])); \ } ROOT_LIST(ROOT_ACCESSOR) @@ -337,7 +337,7 @@ class Factory : public AllStatic { #define SYMBOL_ACCESSOR(name, str) \ static inline Handle<String> name() { \ - return Handle<String>(BitCast<String**, Object**>( \ + return Handle<String>(BitCast<String**>( \ &Heap::roots_[Heap::k##name##RootIndex])); \ } SYMBOL_LIST(SYMBOL_ACCESSOR) diff --git a/deps/v8/src/handles-inl.h b/deps/v8/src/handles-inl.h index 8478bb5cd9..bf19f5f864 100644 --- a/deps/v8/src/handles-inl.h +++ b/deps/v8/src/handles-inl.h @@ -47,7 +47,7 @@ template <class T> inline T* Handle<T>::operator*() const { ASSERT(location_ != NULL); ASSERT(reinterpret_cast<Address>(*location_) != kHandleZapValue); - return *location_; + return *BitCast<T**>(location_); } diff --git a/deps/v8/src/handles.cc b/deps/v8/src/handles.cc index 0d218cb972..927cfd9128 100644 --- a/deps/v8/src/handles.cc +++ b/deps/v8/src/handles.cc @@ -771,20 +771,30 @@ bool CompileLazyShared(Handle<SharedFunctionInfo> shared, bool CompileLazy(Handle<JSFunction> function, Handle<Object> receiver, ClearExceptionFlag flag) { - CompilationInfo info(function, 0, receiver); - bool result = CompileLazyHelper(&info, flag); - PROFILE(FunctionCreateEvent(*function)); - return result; + if (function->shared()->is_compiled()) { + function->set_code(function->shared()->code()); + return true; + } else { + CompilationInfo info(function, 0, receiver); + bool result = CompileLazyHelper(&info, flag); + PROFILE(FunctionCreateEvent(*function)); + return result; + } } bool CompileLazyInLoop(Handle<JSFunction> function, Handle<Object> receiver, ClearExceptionFlag flag) { - CompilationInfo info(function, 1, receiver); - bool result = CompileLazyHelper(&info, flag); - PROFILE(FunctionCreateEvent(*function)); - return result; + if (function->shared()->is_compiled()) { + function->set_code(function->shared()->code()); + return true; + } else { + CompilationInfo info(function, 1, receiver); + bool result = CompileLazyHelper(&info, flag); + PROFILE(FunctionCreateEvent(*function)); + return result; + } } diff --git a/deps/v8/src/heap-profiler.cc b/deps/v8/src/heap-profiler.cc index 92ded7b347..7668bbc150 100644 --- a/deps/v8/src/heap-profiler.cc +++ b/deps/v8/src/heap-profiler.cc @@ -111,10 +111,10 @@ int Clusterizer::CalculateNetworkSize(JSObject* obj) { int size = obj->Size(); // If 'properties' and 'elements' are non-empty (thus, non-shared), // take their size into account. - if (FixedArray::cast(obj->properties())->length() != 0) { + if (obj->properties() != Heap::empty_fixed_array()) { size += obj->properties()->Size(); } - if (FixedArray::cast(obj->elements())->length() != 0) { + if (obj->elements() != Heap::empty_fixed_array()) { size += obj->elements()->Size(); } // For functions, also account non-empty context and literals sizes. @@ -360,7 +360,7 @@ HeapSnapshot* HeapProfiler::TakeSnapshot(String* name) { HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name) { - Heap::CollectAllGarbage(false); + Heap::CollectAllGarbage(true); HeapSnapshot* result = snapshots_->NewSnapshot(name, next_snapshot_uid_++); HeapSnapshotGenerator generator(result); generator.GenerateSnapshot(); diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc index c4d0439e0d..dfc18cc74a 100644 --- a/deps/v8/src/heap.cc +++ b/deps/v8/src/heap.cc @@ -2452,39 +2452,62 @@ class FlushingStackVisitor : public ThreadVisitor { }; -static void FlushCodeForFunction(SharedFunctionInfo* function_info) { +static bool CodeIsActive(Code* code) { + // Make sure we are not referencing the code from the stack. + for (StackFrameIterator it; !it.done(); it.Advance()) { + if (code->contains(it.frame()->pc())) return true; + } + // Iterate the archived stacks in all threads to check if + // the code is referenced. + FlushingStackVisitor threadvisitor(code); + ThreadManager::IterateArchivedThreads(&threadvisitor); + if (threadvisitor.FoundCode()) return true; + return false; +} + + +static void FlushCodeForFunction(JSFunction* function) { + SharedFunctionInfo* shared_info = function->shared(); + + // Special handling if the function and shared info objects + // have different code objects. + if (function->code() != shared_info->code()) { + // If the shared function has been flushed but the function has not, + // we flush the function if possible. + if (!shared_info->is_compiled() && function->is_compiled() && + !CodeIsActive(function->code())) { + function->set_code(shared_info->code()); + } + return; + } + // The function must be compiled and have the source code available, // to be able to recompile it in case we need the function again. - if (!(function_info->is_compiled() && function_info->HasSourceCode())) return; + if (!(shared_info->is_compiled() && shared_info->HasSourceCode())) return; // We never flush code for Api functions. - if (function_info->IsApiFunction()) return; + if (shared_info->IsApiFunction()) return; // Only flush code for functions. - if (!function_info->code()->kind() == Code::FUNCTION) return; + if (!shared_info->code()->kind() == Code::FUNCTION) return; // Function must be lazy compilable. - if (!function_info->allows_lazy_compilation()) return; + if (!shared_info->allows_lazy_compilation()) return; // If this is a full script wrapped in a function we do no flush the code. - if (function_info->is_toplevel()) return; + if (shared_info->is_toplevel()) return; // If this function is in the compilation cache we do not flush the code. - if (CompilationCache::HasFunction(function_info)) return; + if (CompilationCache::HasFunction(shared_info)) return; - // Make sure we are not referencing the code from the stack. - for (StackFrameIterator it; !it.done(); it.Advance()) { - if (function_info->code()->contains(it.frame()->pc())) return; - } - // Iterate the archived stacks in all threads to check if - // the code is referenced. - FlushingStackVisitor threadvisitor(function_info->code()); - ThreadManager::IterateArchivedThreads(&threadvisitor); - if (threadvisitor.FoundCode()) return; + // Check stack and archived threads for the code. + if (CodeIsActive(shared_info->code())) return; // Compute the lazy compilable version of the code. HandleScope scope; - function_info->set_code(*ComputeLazyCompile(function_info->length())); + Code* code = *ComputeLazyCompile(shared_info->length()); + shared_info->set_code(code); + function->set_code(code); } @@ -2496,12 +2519,12 @@ void Heap::FlushCode() { HeapObjectIterator it(old_pointer_space()); for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { if (obj->IsJSFunction()) { - JSFunction* jsfunction = JSFunction::cast(obj); + JSFunction* function = JSFunction::cast(obj); // The function must have a valid context and not be a builtin. - if (jsfunction->unchecked_context()->IsContext() && - !jsfunction->IsBuiltin()) { - FlushCodeForFunction(jsfunction->shared()); + if (function->unchecked_context()->IsContext() && + !function->IsBuiltin()) { + FlushCodeForFunction(function); } } } @@ -2651,6 +2674,7 @@ Object* Heap::InitializeFunction(JSFunction* function, function->initialize_properties(); function->initialize_elements(); function->set_shared(shared); + function->set_code(shared->code()); function->set_prototype_or_initial_map(prototype); function->set_context(undefined_value()); function->set_literals(empty_fixed_array()); @@ -4000,7 +4024,7 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); v->Synchronize("strong_root_list"); - v->VisitPointer(BitCast<Object**, String**>(&hidden_symbol_)); + v->VisitPointer(BitCast<Object**>(&hidden_symbol_)); v->Synchronize("symbol"); Bootstrapper::Iterate(v); diff --git a/deps/v8/src/ia32/assembler-ia32.cc b/deps/v8/src/ia32/assembler-ia32.cc index 6c830cba16..2565acb532 100644 --- a/deps/v8/src/ia32/assembler-ia32.cc +++ b/deps/v8/src/ia32/assembler-ia32.cc @@ -1142,6 +1142,21 @@ void Assembler::rcl(Register dst, uint8_t imm8) { } +void Assembler::rcr(Register dst, uint8_t imm8) { + EnsureSpace ensure_space(this); + last_pc_ = pc_; + ASSERT(is_uint5(imm8)); // illegal shift count + if (imm8 == 1) { + EMIT(0xD1); + EMIT(0xD8 | dst.code()); + } else { + EMIT(0xC1); + EMIT(0xD8 | dst.code()); + EMIT(imm8); + } +} + + void Assembler::sar(Register dst, uint8_t imm8) { EnsureSpace ensure_space(this); last_pc_ = pc_; diff --git a/deps/v8/src/ia32/assembler-ia32.h b/deps/v8/src/ia32/assembler-ia32.h index c76c55cf53..8a5a4c5f51 100644 --- a/deps/v8/src/ia32/assembler-ia32.h +++ b/deps/v8/src/ia32/assembler-ia32.h @@ -625,6 +625,7 @@ class Assembler : public Malloced { void or_(const Operand& dst, const Immediate& x); void rcl(Register dst, uint8_t imm8); + void rcr(Register dst, uint8_t imm8); void sar(Register dst, uint8_t imm8); void sar_cl(Register dst); diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc index 3adb014b14..31f50418d1 100644 --- a/deps/v8/src/ia32/builtins-ia32.cc +++ b/deps/v8/src/ia32/builtins-ia32.cc @@ -548,7 +548,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); __ SmiUntag(ebx); - __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset)); + __ mov(edx, FieldOperand(edi, JSFunction::kCodeOffset)); __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); __ cmp(eax, Operand(ebx)); __ j(not_equal, Handle<Code>(builtin(ArgumentsAdaptorTrampoline))); diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc index ba7785b2d0..3c22defac9 100644 --- a/deps/v8/src/ia32/codegen-ia32.cc +++ b/deps/v8/src/ia32/codegen-ia32.cc @@ -1038,7 +1038,11 @@ const char* GenericBinaryOpStub::GetName() { } -// Call the specialized stub for a binary operation. +// Perform or call the specialized stub for a binary operation. Requires the +// three registers left, right and dst to be distinct and spilled. This +// deferred operation has up to three entry points: The main one calls the +// runtime system. The second is for when the result is a non-Smi. The +// third is for when at least one of the inputs is non-Smi and we have SSE2. class DeferredInlineBinaryOperation: public DeferredCode { public: DeferredInlineBinaryOperation(Token::Value op, @@ -1051,11 +1055,23 @@ class DeferredInlineBinaryOperation: public DeferredCode { : op_(op), dst_(dst), left_(left), right_(right), left_info_(left_info), right_info_(right_info), mode_(mode) { set_comment("[ DeferredInlineBinaryOperation"); + ASSERT(!left.is(right)); } virtual void Generate(); + // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and + // Exit(). + virtual bool AutoSaveAndRestore() { return false; } + + void JumpToAnswerOutOfRange(Condition cond); + void JumpToConstantRhs(Condition cond, Smi* smi_value); + Label* NonSmiInputLabel(); + private: + void GenerateAnswerOutOfRange(); + void GenerateNonSmiInput(); + Token::Value op_; Register dst_; Register left_; @@ -1063,15 +1079,42 @@ class DeferredInlineBinaryOperation: public DeferredCode { TypeInfo left_info_; TypeInfo right_info_; OverwriteMode mode_; + Label answer_out_of_range_; + Label non_smi_input_; + Label constant_rhs_; + Smi* smi_value_; }; +Label* DeferredInlineBinaryOperation::NonSmiInputLabel() { + if (Token::IsBitOp(op_) && CpuFeatures::IsSupported(SSE2)) { + return &non_smi_input_; + } else { + return entry_label(); + } +} + + +void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) { + __ j(cond, &answer_out_of_range_); +} + + +void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond, + Smi* smi_value) { + smi_value_ = smi_value; + __ j(cond, &constant_rhs_); +} + + void DeferredInlineBinaryOperation::Generate() { - Label done; - if (CpuFeatures::IsSupported(SSE2) && ((op_ == Token::ADD) || - (op_ ==Token::SUB) || - (op_ == Token::MUL) || - (op_ == Token::DIV))) { + // Registers are not saved implicitly for this stub, so we should not + // tread on the registers that were not passed to us. + if (CpuFeatures::IsSupported(SSE2) && + ((op_ == Token::ADD) || + (op_ == Token::SUB) || + (op_ == Token::MUL) || + (op_ == Token::DIV))) { CpuFeatures::Scope use_sse2(SSE2); Label call_runtime, after_alloc_failure; Label left_smi, right_smi, load_right, do_op; @@ -1131,7 +1174,6 @@ void DeferredInlineBinaryOperation::Generate() { __ cvtsi2sd(xmm1, Operand(right_)); __ SmiTag(right_); if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) { - Label alloc_failure; __ push(left_); __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure); __ pop(left_); @@ -1146,19 +1188,200 @@ void DeferredInlineBinaryOperation::Generate() { default: UNREACHABLE(); } __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0); - __ jmp(&done); + Exit(); + __ bind(&after_alloc_failure); __ pop(left_); __ bind(&call_runtime); } + // Register spilling is not done implicitly for this stub. + // We can't postpone it any more now though. + SaveRegisters(); + GenericBinaryOpStub stub(op_, mode_, NO_SMI_CODE_IN_STUB, TypeInfo::Combine(left_info_, right_info_)); stub.GenerateCall(masm_, left_, right_); if (!dst_.is(eax)) __ mov(dst_, eax); - __ bind(&done); + RestoreRegisters(); + Exit(); + + if (non_smi_input_.is_linked() || constant_rhs_.is_linked()) { + GenerateNonSmiInput(); + } + if (answer_out_of_range_.is_linked()) { + GenerateAnswerOutOfRange(); + } +} + + +void DeferredInlineBinaryOperation::GenerateNonSmiInput() { + // We know at least one of the inputs was not a Smi. + // This is a third entry point into the deferred code. + // We may not overwrite left_ because we want to be able + // to call the handling code for non-smi answer and it + // might want to overwrite the heap number in left_. + ASSERT(!right_.is(dst_)); + ASSERT(!left_.is(dst_)); + ASSERT(!left_.is(right_)); + // This entry point is used for bit ops where the right hand side + // is a constant Smi and the left hand side is a heap object. It + // is also used for bit ops where both sides are unknown, but where + // at least one of them is a heap object. + bool rhs_is_constant = constant_rhs_.is_linked(); + // We can't generate code for both cases. + ASSERT(!non_smi_input_.is_linked() || !constant_rhs_.is_linked()); + + if (FLAG_debug_code) { + __ int3(); // We don't fall through into this code. + } + + __ bind(&non_smi_input_); + + if (rhs_is_constant) { + __ bind(&constant_rhs_); + // In this case the input is a heap object and it is in the dst_ register. + // The left_ and right_ registers have not been initialized yet. + __ mov(right_, Immediate(smi_value_)); + __ mov(left_, Operand(dst_)); + if (!CpuFeatures::IsSupported(SSE2)) { + __ jmp(entry_label()); + return; + } else { + CpuFeatures::Scope use_sse2(SSE2); + __ JumpIfNotNumber(dst_, left_info_, entry_label()); + __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label()); + __ SmiUntag(right_); + } + } else { + // We know we have SSE2 here because otherwise the label is not linked (see + // NonSmiInputLabel). + CpuFeatures::Scope use_sse2(SSE2); + // Handle the non-constant right hand side situation: + if (left_info_.IsSmi()) { + // Right is a heap object. + __ JumpIfNotNumber(right_, right_info_, entry_label()); + __ ConvertToInt32(right_, right_, dst_, left_info_, entry_label()); + __ mov(dst_, Operand(left_)); + __ SmiUntag(dst_); + } else if (right_info_.IsSmi()) { + // Left is a heap object. + __ JumpIfNotNumber(left_, left_info_, entry_label()); + __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label()); + __ SmiUntag(right_); + } else { + // Here we don't know if it's one or both that is a heap object. + Label only_right_is_heap_object, got_both; + __ mov(dst_, Operand(left_)); + __ SmiUntag(dst_, &only_right_is_heap_object); + // Left was a heap object. + __ JumpIfNotNumber(left_, left_info_, entry_label()); + __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label()); + __ SmiUntag(right_, &got_both); + // Both were heap objects. + __ rcl(right_, 1); // Put tag back. + __ JumpIfNotNumber(right_, right_info_, entry_label()); + __ ConvertToInt32(right_, right_, no_reg, left_info_, entry_label()); + __ jmp(&got_both); + __ bind(&only_right_is_heap_object); + __ JumpIfNotNumber(right_, right_info_, entry_label()); + __ ConvertToInt32(right_, right_, no_reg, left_info_, entry_label()); + __ bind(&got_both); + } + } + ASSERT(op_ == Token::BIT_AND || + op_ == Token::BIT_OR || + op_ == Token::BIT_XOR || + right_.is(ecx)); + switch (op_) { + case Token::BIT_AND: __ and_(dst_, Operand(right_)); break; + case Token::BIT_OR: __ or_(dst_, Operand(right_)); break; + case Token::BIT_XOR: __ xor_(dst_, Operand(right_)); break; + case Token::SHR: __ shr_cl(dst_); break; + case Token::SAR: __ sar_cl(dst_); break; + case Token::SHL: __ shl_cl(dst_); break; + default: UNREACHABLE(); + } + if (op_ == Token::SHR) { + // Check that the *unsigned* result fits in a smi. Neither of + // the two high-order bits can be set: + // * 0x80000000: high bit would be lost when smi tagging. + // * 0x40000000: this number would convert to negative when smi + // tagging. + __ test(dst_, Immediate(0xc0000000)); + __ j(not_zero, &answer_out_of_range_); + } else { + // Check that the *signed* result fits in a smi. + __ cmp(dst_, 0xc0000000); + __ j(negative, &answer_out_of_range_); + } + __ SmiTag(dst_); + Exit(); +} + + +void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() { + Label after_alloc_failure2; + Label allocation_ok; + __ bind(&after_alloc_failure2); + // We have to allocate a number, causing a GC, while keeping hold of + // the answer in dst_. The answer is not a Smi. We can't just call the + // runtime shift function here because we already threw away the inputs. + __ xor_(left_, Operand(left_)); + __ shl(dst_, 1); // Put top bit in carry flag and Smi tag the low bits. + __ rcr(left_, 1); // Rotate with carry. + __ push(dst_); // Smi tagged low 31 bits. + __ push(left_); // 0 or 0x80000000, which is Smi tagged in both cases. + __ CallRuntime(Runtime::kNumberAlloc, 0); + if (!left_.is(eax)) { + __ mov(left_, eax); + } + __ pop(right_); // High bit. + __ pop(dst_); // Low 31 bits. + __ shr(dst_, 1); // Put 0 in top bit. + __ or_(dst_, Operand(right_)); + __ jmp(&allocation_ok); + + // This is the second entry point to the deferred code. It is used only by + // the bit operations. + // The dst_ register has the answer. It is not Smi tagged. If mode_ is + // OVERWRITE_LEFT then left_ must contain either an overwritable heap number + // or a Smi. + // Put a heap number pointer in left_. + __ bind(&answer_out_of_range_); + SaveRegisters(); + if (mode_ == OVERWRITE_LEFT) { + __ test(left_, Immediate(kSmiTagMask)); + __ j(not_zero, &allocation_ok); + } + // This trashes right_. + __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2); + __ bind(&allocation_ok); + if (CpuFeatures::IsSupported(SSE2) && op_ != Token::SHR) { + CpuFeatures::Scope use_sse2(SSE2); + ASSERT(Token::IsBitOp(op_)); + // Signed conversion. + __ cvtsi2sd(xmm0, Operand(dst_)); + __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0); + } else { + if (op_ == Token::SHR) { + __ push(Immediate(0)); // High word of unsigned value. + __ push(dst_); + __ fild_d(Operand(esp, 0)); + __ Drop(2); + } else { + ASSERT(Token::IsBitOp(op_)); + __ push(dst_); + __ fild_s(Operand(esp, 0)); // Signed conversion. + __ pop(dst_); + } + __ fstp_d(FieldOperand(left_, HeapNumber::kValueOffset)); + } + __ mov(dst_, left_); + RestoreRegisters(); + Exit(); } @@ -1499,10 +1722,25 @@ void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left, TypeInfo left_info, TypeInfo right_info, DeferredCode* deferred) { + JumpIfNotBothSmiUsingTypeInfo(left, + right, + scratch, + left_info, + right_info, + deferred->entry_label()); +} + + +void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left, + Register right, + Register scratch, + TypeInfo left_info, + TypeInfo right_info, + Label* on_not_smi) { if (left.is(right)) { if (!left_info.IsSmi()) { __ test(left, Immediate(kSmiTagMask)); - deferred->Branch(not_zero); + __ j(not_zero, on_not_smi); } else { if (FLAG_debug_code) __ AbortIfNotSmi(left); } @@ -1511,17 +1749,17 @@ void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left, __ mov(scratch, left); __ or_(scratch, Operand(right)); __ test(scratch, Immediate(kSmiTagMask)); - deferred->Branch(not_zero); + __ j(not_zero, on_not_smi); } else { __ test(left, Immediate(kSmiTagMask)); - deferred->Branch(not_zero); + __ j(not_zero, on_not_smi); if (FLAG_debug_code) __ AbortIfNotSmi(right); } } else { if (FLAG_debug_code) __ AbortIfNotSmi(left); if (!right_info.IsSmi()) { __ test(right, Immediate(kSmiTagMask)); - deferred->Branch(not_zero); + __ j(not_zero, on_not_smi); } else { if (FLAG_debug_code) __ AbortIfNotSmi(right); } @@ -1606,13 +1844,16 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, right->ToRegister(); frame_->Spill(eax); frame_->Spill(edx); + // DeferredInlineBinaryOperation requires all the registers that it is + // told about to be spilled and distinct. + Result distinct_right = frame_->MakeDistinctAndSpilled(left, right); // Check that left and right are smi tagged. DeferredInlineBinaryOperation* deferred = new DeferredInlineBinaryOperation(op, (op == Token::DIV) ? eax : edx, left->reg(), - right->reg(), + distinct_right.reg(), left_type_info, right_type_info, overwrite_mode); @@ -1695,15 +1936,23 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, left->ToRegister(); ASSERT(left->is_register() && !left->reg().is(ecx)); ASSERT(right->is_register() && right->reg().is(ecx)); + if (left_type_info.IsSmi()) { + if (FLAG_debug_code) __ AbortIfNotSmi(left->reg()); + } + if (right_type_info.IsSmi()) { + if (FLAG_debug_code) __ AbortIfNotSmi(right->reg()); + } // We will modify right, it must be spilled. frame_->Spill(ecx); + // DeferredInlineBinaryOperation requires all the registers that it is told + // about to be spilled and distinct. We know that right is ecx and left is + // not ecx. + frame_->Spill(left->reg()); // Use a fresh answer register to avoid spilling the left operand. answer = allocator_->Allocate(); ASSERT(answer.is_valid()); - // Check that both operands are smis using the answer register as a - // temporary. DeferredInlineBinaryOperation* deferred = new DeferredInlineBinaryOperation(op, answer.reg(), @@ -1712,55 +1961,28 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, left_type_info, right_type_info, overwrite_mode); + JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(), + left_type_info, right_type_info, + deferred->NonSmiInputLabel()); - Label do_op, left_nonsmi; - // If right is a smi we make a fast case if left is either a smi - // or a heapnumber. - if (CpuFeatures::IsSupported(SSE2) && right_type_info.IsSmi()) { - CpuFeatures::Scope use_sse2(SSE2); - __ mov(answer.reg(), left->reg()); - // Fast case - both are actually smis. - if (!left_type_info.IsSmi()) { - __ test(answer.reg(), Immediate(kSmiTagMask)); - __ j(not_zero, &left_nonsmi); - } else { - if (FLAG_debug_code) __ AbortIfNotSmi(left->reg()); - } - if (FLAG_debug_code) __ AbortIfNotSmi(right->reg()); - __ SmiUntag(answer.reg()); - __ jmp(&do_op); + // Untag both operands. + __ mov(answer.reg(), left->reg()); + __ SmiUntag(answer.reg()); + __ SmiUntag(right->reg()); // Right is ecx. - __ bind(&left_nonsmi); - // Branch if not a heapnumber. - __ cmp(FieldOperand(answer.reg(), HeapObject::kMapOffset), - Factory::heap_number_map()); - deferred->Branch(not_equal); - - // Load integer value into answer register using truncation. - __ cvttsd2si(answer.reg(), - FieldOperand(answer.reg(), HeapNumber::kValueOffset)); - // Branch if we do not fit in a smi. - __ cmp(answer.reg(), 0xc0000000); - deferred->Branch(negative); - } else { - JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(), - left_type_info, right_type_info, deferred); - - // Untag both operands. - __ mov(answer.reg(), left->reg()); - __ SmiUntag(answer.reg()); - } - - __ bind(&do_op); - __ SmiUntag(ecx); // Perform the operation. + ASSERT(right->reg().is(ecx)); switch (op) { - case Token::SAR: + case Token::SAR: { __ sar_cl(answer.reg()); - // No checks of result necessary + if (!left_type_info.IsSmi()) { + // Check that the *signed* result fits in a smi. + __ cmp(answer.reg(), 0xc0000000); + deferred->JumpToAnswerOutOfRange(negative); + } break; + } case Token::SHR: { - Label result_ok; __ shr_cl(answer.reg()); // Check that the *unsigned* result fits in a smi. Neither of // the two high-order bits can be set: @@ -1773,21 +1995,14 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, // case. The low bit of the left argument may be lost, but only // in a case where it is dropped anyway. __ test(answer.reg(), Immediate(0xc0000000)); - __ j(zero, &result_ok); - __ SmiTag(ecx); - deferred->Jump(); - __ bind(&result_ok); + deferred->JumpToAnswerOutOfRange(not_zero); break; } case Token::SHL: { - Label result_ok; __ shl_cl(answer.reg()); // Check that the *signed* result fits in a smi. __ cmp(answer.reg(), 0xc0000000); - __ j(positive, &result_ok); - __ SmiTag(ecx); - deferred->Jump(); - __ bind(&result_ok); + deferred->JumpToAnswerOutOfRange(negative); break; } default: @@ -1805,6 +2020,9 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, // Handle the other binary operations. left->ToRegister(); right->ToRegister(); + // DeferredInlineBinaryOperation requires all the registers that it is told + // about to be spilled. + Result distinct_right = frame_->MakeDistinctAndSpilled(left, right); // A newly allocated register answer is used to hold the answer. The // registers containing left and right are not modified so they don't // need to be spilled in the fast case. @@ -1816,12 +2034,16 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, new DeferredInlineBinaryOperation(op, answer.reg(), left->reg(), - right->reg(), + distinct_right.reg(), left_type_info, right_type_info, overwrite_mode); - JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(), - left_type_info, right_type_info, deferred); + Label non_smi_bit_op; + if (op != Token::BIT_OR) { + JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(), + left_type_info, right_type_info, + deferred->NonSmiInputLabel()); + } __ mov(answer.reg(), left->reg()); switch (op) { @@ -1864,6 +2086,8 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, case Token::BIT_OR: __ or_(answer.reg(), Operand(right->reg())); + __ test(answer.reg(), Immediate(kSmiTagMask)); + __ j(not_zero, deferred->NonSmiInputLabel()); break; case Token::BIT_AND: @@ -1878,6 +2102,7 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, UNREACHABLE(); break; } + deferred->BindExit(); left->Unuse(); right->Unuse(); @@ -2363,27 +2588,25 @@ Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr, case Token::BIT_XOR: case Token::BIT_AND: { operand->ToRegister(); + // DeferredInlineBinaryOperation requires all the registers that it is + // told about to be spilled. frame_->Spill(operand->reg()); - DeferredCode* deferred = NULL; - if (reversed) { - deferred = - new DeferredInlineSmiOperationReversed(op, - operand->reg(), - smi_value, - operand->reg(), - operand->type_info(), - overwrite_mode); - } else { - deferred = new DeferredInlineSmiOperation(op, - operand->reg(), - operand->reg(), - operand->type_info(), - smi_value, - overwrite_mode); - } + DeferredInlineBinaryOperation* deferred = NULL; if (!operand->type_info().IsSmi()) { + Result left = allocator()->Allocate(); + ASSERT(left.is_valid()); + Result right = allocator()->Allocate(); + ASSERT(right.is_valid()); + deferred = new DeferredInlineBinaryOperation( + op, + operand->reg(), + left.reg(), + right.reg(), + operand->type_info(), + TypeInfo::Smi(), + overwrite_mode == NO_OVERWRITE ? NO_OVERWRITE : OVERWRITE_LEFT); __ test(operand->reg(), Immediate(kSmiTagMask)); - deferred->Branch(not_zero); + deferred->JumpToConstantRhs(not_zero, smi_value); } else if (FLAG_debug_code) { __ AbortIfNotSmi(operand->reg()); } @@ -2399,7 +2622,7 @@ Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr, __ or_(Operand(operand->reg()), Immediate(value)); } } - deferred->BindExit(); + if (deferred != NULL) deferred->BindExit(); answer = *operand; break; } @@ -3212,10 +3435,8 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, __ j(zero, &build_args); __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx); __ j(not_equal, &build_args); - __ mov(ecx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset)); Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); - __ cmp(FieldOperand(ecx, SharedFunctionInfo::kCodeOffset), - Immediate(apply_code)); + __ cmp(FieldOperand(eax, JSFunction::kCodeOffset), Immediate(apply_code)); __ j(not_equal, &build_args); // Check that applicand is a function. @@ -9467,6 +9688,11 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi); __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx); + // Initialize the code pointer in the function to be the one + // found in the shared function info object. + __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset)); + __ mov(FieldOperand(eax, JSFunction::kCodeOffset), edx); + // Return and remove the on-stack parameter. __ ret(1 * kPointerSize); diff --git a/deps/v8/src/ia32/codegen-ia32.h b/deps/v8/src/ia32/codegen-ia32.h index 2368b23c90..81a5da1bc5 100644 --- a/deps/v8/src/ia32/codegen-ia32.h +++ b/deps/v8/src/ia32/codegen-ia32.h @@ -530,7 +530,7 @@ class CodeGenerator: public AstVisitor { // Emits code sequence that jumps to deferred code if the inputs // are not both smis. Cannot be in MacroAssembler because it takes - // advantage of TypeInfo to skip unneeded checks. + // a deferred code object. void JumpIfNotBothSmiUsingTypeInfo(Register left, Register right, Register scratch, @@ -538,6 +538,15 @@ class CodeGenerator: public AstVisitor { TypeInfo right_info, DeferredCode* deferred); + // Emits code sequence that jumps to the label if the inputs + // are not both smis. + void JumpIfNotBothSmiUsingTypeInfo(Register left, + Register right, + Register scratch, + TypeInfo left_info, + TypeInfo right_info, + Label* on_non_smi); + // If possible, combine two constant smi values using op to produce // a smi result, and push it on the virtual frame, all at compile time. // Returns true if it succeeds. Otherwise it has no effect. diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc index d0eeb7734c..37b6436cfb 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.cc +++ b/deps/v8/src/ia32/macro-assembler-ia32.cc @@ -377,6 +377,12 @@ void MacroAssembler::AbortIfNotSmi(Register object) { } +void MacroAssembler::AbortIfSmi(Register object) { + test(object, Immediate(kSmiTagMask)); + Assert(not_equal, "Operand a smi"); +} + + void MacroAssembler::EnterFrame(StackFrame::Type type) { push(ebp); mov(ebp, Operand(esp)); @@ -1292,7 +1298,7 @@ void MacroAssembler::InvokeFunction(Register fun, mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); SmiUntag(ebx); - mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset)); + mov(edx, FieldOperand(edi, JSFunction::kCodeOffset)); lea(edx, FieldOperand(edx, Code::kHeaderSize)); ParameterCount expected(ebx); @@ -1344,8 +1350,7 @@ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { // Make sure the code objects in the builtins object and in the // builtin function are the same. push(target); - mov(target, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); - mov(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset)); + mov(target, FieldOperand(edi, JSFunction::kCodeOffset)); cmp(target, Operand(esp, 0)); Assert(equal, "Builtin code object changed"); pop(target); @@ -1510,6 +1515,61 @@ void MacroAssembler::Abort(const char* msg) { } +void MacroAssembler::JumpIfNotNumber(Register reg, + TypeInfo info, + Label* on_not_number) { + if (FLAG_debug_code) AbortIfSmi(reg); + if (!info.IsNumber()) { + cmp(FieldOperand(reg, HeapObject::kMapOffset), + Factory::heap_number_map()); + j(not_equal, on_not_number); + } +} + + +void MacroAssembler::ConvertToInt32(Register dst, + Register source, + Register scratch, + TypeInfo info, + Label* on_not_int32) { + if (FLAG_debug_code) { + AbortIfSmi(source); + AbortIfNotNumber(source); + } + if (info.IsInteger32()) { + cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset)); + } else { + Label done; + bool push_pop = (scratch.is(no_reg) && dst.is(source)); + ASSERT(!scratch.is(source)); + if (push_pop) { + push(dst); + scratch = dst; + } + if (scratch.is(no_reg)) scratch = dst; + cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset)); + cmp(scratch, 0x80000000u); + if (push_pop || dst.is(source)) { + j(not_equal, &done); + if (push_pop) { + pop(dst); + jmp(on_not_int32); + } + } else { + j(equal, on_not_int32); + } + + bind(&done); + if (push_pop) { + add(Operand(esp), Immediate(kPointerSize)); // Pop. + } + if (!scratch.is(dst)) { + mov(dst, scratch); + } + } +} + + void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii( Register instance_type, Register scratch, diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h index a17a2b4c1b..0b16f0d40b 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.h +++ b/deps/v8/src/ia32/macro-assembler-ia32.h @@ -29,6 +29,7 @@ #define V8_IA32_MACRO_ASSEMBLER_IA32_H_ #include "assembler.h" +#include "type-info.h" namespace v8 { namespace internal { @@ -225,12 +226,44 @@ class MacroAssembler: public Assembler { sar(reg, kSmiTagSize); } + // Modifies the register even if it does not contain a Smi! + void SmiUntag(Register reg, TypeInfo info, Label* non_smi) { + ASSERT(kSmiTagSize == 1); + sar(reg, kSmiTagSize); + if (info.IsSmi()) { + ASSERT(kSmiTag == 0); + j(carry, non_smi); + } + } + + // Modifies the register even if it does not contain a Smi! + void SmiUntag(Register reg, Label* is_smi) { + ASSERT(kSmiTagSize == 1); + sar(reg, kSmiTagSize); + ASSERT(kSmiTag == 0); + j(not_carry, is_smi); + } + + // Assumes input is a heap object. + void JumpIfNotNumber(Register reg, TypeInfo info, Label* on_not_number); + + // Assumes input is a heap number. Jumps on things out of range. Also jumps + // on the min negative int32. Ignores frational parts. + void ConvertToInt32(Register dst, + Register src, // Can be the same as dst. + Register scratch, // Can be no_reg or dst, but not src. + TypeInfo info, + Label* on_not_int32); + // Abort execution if argument is not a number. Used in debug code. void AbortIfNotNumber(Register object); // Abort execution if argument is not a smi. Used in debug code. void AbortIfNotSmi(Register object); + // Abort execution if argument is a smi. Used in debug code. + void AbortIfSmi(Register object); + // --------------------------------------------------------------------------- // Exception handling diff --git a/deps/v8/src/ia32/virtual-frame-ia32.h b/deps/v8/src/ia32/virtual-frame-ia32.h index e00626b7d4..b9faa46145 100644 --- a/deps/v8/src/ia32/virtual-frame-ia32.h +++ b/deps/v8/src/ia32/virtual-frame-ia32.h @@ -139,6 +139,22 @@ class VirtualFrame: public ZoneObject { if (is_used(reg)) SpillElementAt(register_location(reg)); } + // Make the two registers distinct and spill them. Returns the second + // register. If the registers were not distinct then it returns the new + // second register. + Result MakeDistinctAndSpilled(Result* left, Result* right) { + Spill(left->reg()); + Spill(right->reg()); + if (left->reg().is(right->reg())) { + RegisterAllocator* allocator = cgen()->allocator(); + Result fresh = allocator->Allocate(); + ASSERT(fresh.is_valid()); + masm()->mov(fresh.reg(), right->reg()); + return fresh; + } + return *right; + } + // Spill all occurrences of an arbitrary register if possible. Return the // register spilled or no_reg if it was not possible to free any register // (ie, they all have frame-external references). diff --git a/deps/v8/src/list-inl.h b/deps/v8/src/list-inl.h index e41db11fc5..e277bc8723 100644 --- a/deps/v8/src/list-inl.h +++ b/deps/v8/src/list-inl.h @@ -127,6 +127,13 @@ void List<T, P>::Iterate(void (*callback)(T* x)) { template<typename T, class P> +template<class Visitor> +void List<T, P>::Iterate(Visitor* visitor) { + for (int i = 0; i < length_; i++) visitor->Apply(&data_[i]); +} + + +template<typename T, class P> bool List<T, P>::Contains(const T& elm) { for (int i = 0; i < length_; i++) { if (data_[i] == elm) diff --git a/deps/v8/src/list.h b/deps/v8/src/list.h index d3c2767a53..9abf61ce55 100644 --- a/deps/v8/src/list.h +++ b/deps/v8/src/list.h @@ -117,6 +117,8 @@ class List { // Iterate through all list entries, starting at index 0. void Iterate(void (*callback)(T* x)); + template<class Visitor> + void Iterate(Visitor* visitor); // Sort all list entries (using QuickSort) void Sort(int (*cmp)(const T* x, const T* y)); diff --git a/deps/v8/src/mips/simulator-mips.cc b/deps/v8/src/mips/simulator-mips.cc index 886b9e4faa..57bed6a047 100644 --- a/deps/v8/src/mips/simulator-mips.cc +++ b/deps/v8/src/mips/simulator-mips.cc @@ -606,7 +606,7 @@ void Simulator::set_fpu_register(int fpureg, int32_t value) { void Simulator::set_fpu_register_double(int fpureg, double value) { ASSERT((fpureg >= 0) && (fpureg < kNumFPURegisters) && ((fpureg % 2) == 0)); - *v8i::BitCast<double*, int32_t*>(&FPUregisters_[fpureg]) = value; + *v8i::BitCast<double*>(&FPUregisters_[fpureg]) = value; } @@ -627,8 +627,7 @@ int32_t Simulator::get_fpu_register(int fpureg) const { double Simulator::get_fpu_register_double(int fpureg) const { ASSERT((fpureg >= 0) && (fpureg < kNumFPURegisters) && ((fpureg % 2) == 0)); - return *v8i::BitCast<double*, int32_t*>( - const_cast<int32_t*>(&FPUregisters_[fpureg])); + return *v8i::BitCast<double*>(const_cast<int32_t*>(&FPUregisters_[fpureg])); } // Raw access to the PC register. @@ -903,7 +902,7 @@ void Simulator::DecodeTypeRegister(Instruction* instr) { break; case MFHC1: fp_out = get_fpu_register_double(fs_reg); - alu_out = *v8i::BitCast<int32_t*, double*>(&fp_out); + alu_out = *v8i::BitCast<int32_t*>(&fp_out); break; case MTC1: case MTHC1: diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h index 101096d6c9..c81f4ab22a 100644 --- a/deps/v8/src/objects-inl.h +++ b/deps/v8/src/objects-inl.h @@ -2694,12 +2694,14 @@ bool JSFunction::IsBuiltin() { Code* JSFunction::code() { - return shared()->code(); + return Code::cast(READ_FIELD(this, kCodeOffset)); } void JSFunction::set_code(Code* value) { - shared()->set_code(value); + // Skip the write barrier because code is never in new space. + ASSERT(!Heap::InNewSpace(value)); + WRITE_FIELD(this, kCodeOffset, value); } @@ -2771,7 +2773,7 @@ bool JSFunction::should_have_prototype() { bool JSFunction::is_compiled() { - return shared()->is_compiled(); + return code()->kind() != Code::STUB; } diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc index 4e20959a7a..aabb0413a4 100644 --- a/deps/v8/src/objects.cc +++ b/deps/v8/src/objects.cc @@ -5823,16 +5823,24 @@ bool JSObject::HasElementWithInterceptor(JSObject* receiver, uint32_t index) { CustomArguments args(interceptor->data(), receiver, this); v8::AccessorInfo info(args.end()); if (!interceptor->query()->IsUndefined()) { - v8::IndexedPropertyQuery query = - v8::ToCData<v8::IndexedPropertyQuery>(interceptor->query()); + v8::IndexedPropertyQueryImpl query = + v8::ToCData<v8::IndexedPropertyQueryImpl>(interceptor->query()); LOG(ApiIndexedPropertyAccess("interceptor-indexed-has", this, index)); - v8::Handle<v8::Boolean> result; + v8::Handle<v8::Value> result; { // Leaving JavaScript. VMState state(EXTERNAL); result = query(index, info); } - if (!result.IsEmpty()) return result->IsTrue(); + if (!result.IsEmpty()) { + // IsBoolean check would be removed when transition to new API is over. + if (result->IsBoolean()) { + return result->IsTrue() ? true : false; + } else { + ASSERT(result->IsInt32()); + return true; // absence of property is signaled by empty handle. + } + } } else if (!interceptor->getter()->IsUndefined()) { v8::IndexedPropertyGetter getter = v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter()); diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h index 8fa251efab..1ca3003397 100644 --- a/deps/v8/src/objects.h +++ b/deps/v8/src/objects.h @@ -3608,7 +3608,9 @@ class JSFunction: public JSObject { static Context* GlobalContextFromLiterals(FixedArray* literals); // Layout descriptors. - static const int kPrototypeOrInitialMapOffset = JSObject::kHeaderSize; + static const int kCodeOffset = JSObject::kHeaderSize; + static const int kPrototypeOrInitialMapOffset = + kCodeOffset + kPointerSize; static const int kSharedFunctionInfoOffset = kPrototypeOrInitialMapOffset + kPointerSize; static const int kContextOffset = kSharedFunctionInfoOffset + kPointerSize; diff --git a/deps/v8/src/parser.cc b/deps/v8/src/parser.cc index e935b7b4a7..1df7c21450 100644 --- a/deps/v8/src/parser.cc +++ b/deps/v8/src/parser.cc @@ -3587,10 +3587,8 @@ ObjectLiteral::Property* Parser::ParseObjectLiteralGetSet(bool is_getter, // { ... , get foo() { ... }, ... , set foo(v) { ... v ... } , ... } // We have already read the "get" or "set" keyword. Token::Value next = Next(); - if (next == Token::IDENTIFIER || - next == Token::STRING || - next == Token::NUMBER || - Token::IsKeyword(next)) { + // TODO(820): Allow NUMBER and STRING as well (and handle array indices). + if (next == Token::IDENTIFIER || Token::IsKeyword(next)) { Handle<String> name = factory()->LookupSymbol(scanner_.literal_string(), scanner_.literal_length()); @@ -3652,8 +3650,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) { factory()->LookupSymbol(scanner_.literal_string(), scanner_.literal_length()); uint32_t index; - if (!string.is_null() && - string->AsArrayIndex(&index)) { + if (!string.is_null() && string->AsArrayIndex(&index)) { key = NewNumberLiteral(index); break; } diff --git a/deps/v8/src/profile-generator-inl.h b/deps/v8/src/profile-generator-inl.h index ea9bc98767..0c50581ab7 100644 --- a/deps/v8/src/profile-generator-inl.h +++ b/deps/v8/src/profile-generator-inl.h @@ -97,13 +97,6 @@ void CodeMap::DeleteCode(Address addr) { } -bool CpuProfilesCollection::is_last_profile() { - // Called from VM thread, and only it can mutate the list, - // so no locking is needed here. - return current_profiles_.length() == 1; -} - - const char* CpuProfilesCollection::GetFunctionName(String* name) { return GetFunctionName(GetName(name)); } @@ -130,17 +123,6 @@ CodeEntry* ProfileGenerator::EntryForVMState(StateTag tag) { } } - -template<class Visitor> -void HeapEntriesMap::Apply(Visitor* visitor) { - for (HashMap::Entry* p = entries_.Start(); - p != NULL; - p = entries_.Next(p)) { - if (!IsAlias(p->value)) - visitor->Apply(reinterpret_cast<HeapEntry*>(p->value)); - } -} - } } // namespace v8::internal #endif // ENABLE_LOGGING_AND_PROFILING diff --git a/deps/v8/src/profile-generator.cc b/deps/v8/src/profile-generator.cc index 7054b12595..cd46badf09 100644 --- a/deps/v8/src/profile-generator.cc +++ b/deps/v8/src/profile-generator.cc @@ -542,13 +542,6 @@ CpuProfile* CpuProfilesCollection::StopProfiling(int security_token_id, } -CpuProfile* CpuProfilesCollection::StopProfiling(int security_token_id, - String* title, - double actual_sampling_rate) { - return StopProfiling(security_token_id, GetName(title), actual_sampling_rate); -} - - CpuProfile* CpuProfilesCollection::GetProfile(int security_token_id, unsigned uid) { HashMap::Entry* entry = profiles_uids_.Lookup(reinterpret_cast<void*>(uid), @@ -574,6 +567,15 @@ CpuProfile* CpuProfilesCollection::GetProfile(int security_token_id, } +bool CpuProfilesCollection::IsLastProfile(const char* title) { + // Called from VM thread, and only it can mutate the list, + // so no locking is needed here. + if (current_profiles_.length() != 1) return false; + return StrLength(title) == 0 + || strcmp(current_profiles_[0]->title(), title) == 0; +} + + int CpuProfilesCollection::TokenToIndex(int security_token_id) { ASSERT(TokenEnumerator::kNoSecurityToken == -1); return security_token_id + 1; // kNoSecurityToken -> 0, 0 -> 1, ... @@ -798,83 +800,102 @@ void ProfileGenerator::RecordTickSample(const TickSample& sample) { } -HeapGraphEdge::HeapGraphEdge(Type type, - const char* name, - HeapEntry* from, - HeapEntry* to) - : type_(type), name_(name), from_(from), to_(to) { - ASSERT(type_ == CONTEXT_VARIABLE || type_ == PROPERTY || type_ == INTERNAL); -} - - -HeapGraphEdge::HeapGraphEdge(int index, - HeapEntry* from, - HeapEntry* to) - : type_(ELEMENT), index_(index), from_(from), to_(to) { -} - - -static void DeleteHeapGraphEdge(HeapGraphEdge** edge_ptr) { - delete *edge_ptr; +void HeapGraphEdge::Init( + int child_index, Type type, const char* name, HeapEntry* to) { + ASSERT(type == kContextVariable || type == kProperty || type == kInternal); + child_index_ = child_index; + type_ = type; + name_ = name; + to_ = to; } -static void DeleteHeapGraphPath(HeapGraphPath** path_ptr) { - delete *path_ptr; +void HeapGraphEdge::Init(int child_index, int index, HeapEntry* to) { + child_index_ = child_index; + type_ = kElement; + index_ = index; + to_ = to; } -HeapEntry::~HeapEntry() { - children_.Iterate(DeleteHeapGraphEdge); - retaining_paths_.Iterate(DeleteHeapGraphPath); +HeapEntry* HeapGraphEdge::From() { + return reinterpret_cast<HeapEntry*>(this - child_index_) - 1; } -void HeapEntry::AddEdge(HeapGraphEdge* edge) { - children_.Add(edge); - edge->to()->retainers_.Add(edge); +void HeapEntry::Init(HeapSnapshot* snapshot, + int children_count, + int retainers_count) { + Init(snapshot, kInternal, "", 0, 0, children_count, retainers_count); } -void HeapEntry::SetClosureReference(const char* name, HeapEntry* entry) { - AddEdge( - new HeapGraphEdge(HeapGraphEdge::CONTEXT_VARIABLE, name, this, entry)); +void HeapEntry::Init(HeapSnapshot* snapshot, + Type type, + const char* name, + uint64_t id, + int self_size, + int children_count, + int retainers_count) { + snapshot_ = snapshot; + type_ = type; + painted_ = kUnpainted; + calculated_data_index_ = kNoCalculatedData; + name_ = name; + id_ = id; + self_size_ = self_size; + children_count_ = children_count; + retainers_count_ = retainers_count; } -void HeapEntry::SetElementReference(int index, HeapEntry* entry) { - AddEdge(new HeapGraphEdge(index, this, entry)); +void HeapEntry::SetNamedReference(HeapGraphEdge::Type type, + int child_index, + const char* name, + HeapEntry* entry, + int retainer_index) { + children_arr()[child_index].Init(child_index, type, name, entry); + entry->retainers_arr()[retainer_index] = children_arr() + child_index; } -void HeapEntry::SetInternalReference(const char* name, HeapEntry* entry) { - AddEdge(new HeapGraphEdge(HeapGraphEdge::INTERNAL, name, this, entry)); +void HeapEntry::SetElementReference( + int child_index, int index, HeapEntry* entry, int retainer_index) { + children_arr()[child_index].Init(child_index, index, entry); + entry->retainers_arr()[retainer_index] = children_arr() + child_index; } -void HeapEntry::SetPropertyReference(const char* name, HeapEntry* entry) { - AddEdge(new HeapGraphEdge(HeapGraphEdge::PROPERTY, name, this, entry)); +void HeapEntry::SetUnidirElementReference( + int child_index, int index, HeapEntry* entry) { + children_arr()[child_index].Init(child_index, index, entry); } -void HeapEntry::SetAutoIndexReference(HeapEntry* entry) { - SetElementReference(next_auto_index_++, entry); -} - - -void HeapEntry::SetUnidirAutoIndexReference(HeapEntry* entry) { - children_.Add(new HeapGraphEdge(next_auto_index_++, this, entry)); +int HeapEntry::ReachableSize() { + if (calculated_data_index_ == kNoCalculatedData) { + calculated_data_index_ = snapshot_->AddCalculatedData(); + } + return snapshot_->GetCalculatedData( + calculated_data_index_).ReachableSize(this); } -int HeapEntry::TotalSize() { - return total_size_ != kUnknownSize ? total_size_ : CalculateTotalSize(); +int HeapEntry::RetainedSize() { + if (calculated_data_index_ == kNoCalculatedData) { + calculated_data_index_ = snapshot_->AddCalculatedData(); + } + return snapshot_->GetCalculatedData( + calculated_data_index_).RetainedSize(this); } -int HeapEntry::NonSharedTotalSize() { - return non_shared_total_size_ != kUnknownSize ? - non_shared_total_size_ : CalculateNonSharedTotalSize(); +List<HeapGraphPath*>* HeapEntry::GetRetainingPaths() { + if (calculated_data_index_ == kNoCalculatedData) { + calculated_data_index_ = snapshot_->AddCalculatedData(); + } + return snapshot_->GetCalculatedData( + calculated_data_index_).GetRetainingPaths(this); } @@ -882,16 +903,16 @@ template<class Visitor> void HeapEntry::ApplyAndPaintAllReachable(Visitor* visitor) { List<HeapEntry*> list(10); list.Add(this); - this->PaintReachable(); + this->paint_reachable(); visitor->Apply(this); while (!list.is_empty()) { HeapEntry* entry = list.RemoveLast(); - const int children_count = entry->children_.length(); - for (int i = 0; i < children_count; ++i) { - HeapEntry* child = entry->children_[i]->to(); + Vector<HeapGraphEdge> children = entry->children(); + for (int i = 0; i < children.length(); ++i) { + HeapEntry* child = children[i].to(); if (!child->painted_reachable()) { list.Add(child); - child->PaintReachable(); + child->paint_reachable(); visitor->Apply(child); } } @@ -910,78 +931,158 @@ void HeapEntry::PaintAllReachable() { } -class TotalSizeCalculator { - public: - TotalSizeCalculator() - : total_size_(0) { +void HeapEntry::Print(int max_depth, int indent) { + OS::Print("%6d %6d %6d [%ld] ", + self_size(), ReachableSize(), RetainedSize(), id_); + if (type() != kString) { + OS::Print("%s %.40s\n", TypeAsString(), name_); + } else { + OS::Print("\""); + const char* c = name_; + while (*c && (c - name_) <= 40) { + if (*c != '\n') + OS::Print("%c", *c); + else + OS::Print("\\n"); + ++c; + } + OS::Print("\"\n"); } + if (--max_depth == 0) return; + Vector<HeapGraphEdge> ch = children(); + for (int i = 0; i < ch.length(); ++i) { + HeapGraphEdge& edge = ch[i]; + switch (edge.type()) { + case HeapGraphEdge::kContextVariable: + OS::Print(" %*c #%s: ", indent, ' ', edge.name()); + break; + case HeapGraphEdge::kElement: + OS::Print(" %*c %d: ", indent, ' ', edge.index()); + break; + case HeapGraphEdge::kInternal: + OS::Print(" %*c $%s: ", indent, ' ', edge.name()); + break; + case HeapGraphEdge::kProperty: + OS::Print(" %*c %s: ", indent, ' ', edge.name()); + break; + default: + OS::Print("!!! unknown edge type: %d ", edge.type()); + } + edge.to()->Print(max_depth, indent + 2); + } +} - int total_size() const { return total_size_; } - void Apply(HeapEntry* entry) { - total_size_ += entry->self_size(); +const char* HeapEntry::TypeAsString() { + switch (type()) { + case kInternal: return "/internal/"; + case kObject: return "/object/"; + case kClosure: return "/closure/"; + case kString: return "/string/"; + case kCode: return "/code/"; + case kArray: return "/array/"; + default: return "???"; } +} - private: - int total_size_; -}; -int HeapEntry::CalculateTotalSize() { - snapshot_->ClearPaint(); - TotalSizeCalculator calc; - ApplyAndPaintAllReachable(&calc); - total_size_ = calc.total_size(); - return total_size_; +int HeapEntry::EntriesSize(int entries_count, + int children_count, + int retainers_count) { + return sizeof(HeapEntry) * entries_count // NOLINT + + sizeof(HeapGraphEdge) * children_count // NOLINT + + sizeof(HeapGraphEdge*) * retainers_count; // NOLINT } -class NonSharedSizeCalculator { +static void DeleteHeapGraphPath(HeapGraphPath** path_ptr) { + delete *path_ptr; +} + +void HeapEntryCalculatedData::Dispose() { + if (retaining_paths_ != NULL) retaining_paths_->Iterate(DeleteHeapGraphPath); + delete retaining_paths_; +} + + +int HeapEntryCalculatedData::ReachableSize(HeapEntry* entry) { + if (reachable_size_ == kUnknownSize) CalculateSizes(entry); + return reachable_size_; +} + + +int HeapEntryCalculatedData::RetainedSize(HeapEntry* entry) { + if (retained_size_ == kUnknownSize) CalculateSizes(entry); + return retained_size_; +} + + +class ReachableSizeCalculator { public: - NonSharedSizeCalculator() - : non_shared_total_size_(0) { + ReachableSizeCalculator() + : reachable_size_(0) { } - int non_shared_total_size() const { return non_shared_total_size_; } + int reachable_size() const { return reachable_size_; } void Apply(HeapEntry* entry) { - if (entry->painted_reachable()) { - non_shared_total_size_ += entry->self_size(); + reachable_size_ += entry->self_size(); + } + + private: + int reachable_size_; +}; + +class RetainedSizeCalculator { + public: + RetainedSizeCalculator() + : retained_size_(0) { + } + + int reained_size() const { return retained_size_; } + + void Apply(HeapEntry** entry_ptr) { + if ((*entry_ptr)->painted_reachable()) { + retained_size_ += (*entry_ptr)->self_size(); } } private: - int non_shared_total_size_; + int retained_size_; }; -int HeapEntry::CalculateNonSharedTotalSize() { - // To calculate non-shared total size, first we paint all reachable - // nodes in one color, then we paint all nodes reachable from other - // nodes with a different color. Then we consider only nodes painted - // with the first color for calculating the total size. - snapshot_->ClearPaint(); - PaintAllReachable(); +void HeapEntryCalculatedData::CalculateSizes(HeapEntry* entry) { + // To calculate retained size, first we paint all reachable nodes in + // one color (and calculate reachable size as a byproduct), then we + // paint (or re-paint) all nodes reachable from other nodes with a + // different color. Then we consider only nodes painted with the + // first color for calculating the retained size. + entry->snapshot()->ClearPaint(); + ReachableSizeCalculator rch_size_calc; + entry->ApplyAndPaintAllReachable(&rch_size_calc); + reachable_size_ = rch_size_calc.reachable_size(); List<HeapEntry*> list(10); - if (this != snapshot_->root()) { - list.Add(snapshot_->root()); - snapshot_->root()->PaintReachableFromOthers(); + HeapEntry* root = entry->snapshot()->root(); + if (entry != root) { + list.Add(root); + root->paint_reachable_from_others(); } while (!list.is_empty()) { - HeapEntry* entry = list.RemoveLast(); - const int children_count = entry->children_.length(); - for (int i = 0; i < children_count; ++i) { - HeapEntry* child = entry->children_[i]->to(); - if (child != this && child->not_painted_reachable_from_others()) { + HeapEntry* curr = list.RemoveLast(); + Vector<HeapGraphEdge> children = curr->children(); + for (int i = 0; i < children.length(); ++i) { + HeapEntry* child = children[i].to(); + if (child != entry && child->not_painted_reachable_from_others()) { list.Add(child); - child->PaintReachableFromOthers(); + child->paint_reachable_from_others(); } } } - NonSharedSizeCalculator calculator; - snapshot_->IterateEntries(&calculator); - non_shared_total_size_ = calculator.non_shared_total_size(); - return non_shared_total_size_; + RetainedSizeCalculator ret_size_calc; + entry->snapshot()->IterateEntries(&ret_size_calc); + retained_size_ = ret_size_calc.reained_size(); } @@ -1019,125 +1120,34 @@ class CachedHeapGraphPath { }; -const List<HeapGraphPath*>* HeapEntry::GetRetainingPaths() { - if (retaining_paths_.length() == 0 && retainers_.length() != 0) { +List<HeapGraphPath*>* HeapEntryCalculatedData::GetRetainingPaths( + HeapEntry* entry) { + if (retaining_paths_ == NULL) retaining_paths_ = new List<HeapGraphPath*>(4); + if (retaining_paths_->length() == 0 && entry->retainers().length() != 0) { CachedHeapGraphPath path; - FindRetainingPaths(this, &path); + FindRetainingPaths(entry, &path); } - return &retaining_paths_; + return retaining_paths_; } -void HeapEntry::FindRetainingPaths(HeapEntry* node, - CachedHeapGraphPath* prev_path) { - for (int i = 0; i < node->retainers_.length(); ++i) { - HeapGraphEdge* ret_edge = node->retainers_[i]; - if (prev_path->ContainsNode(ret_edge->from())) continue; - if (ret_edge->from() != snapshot_->root()) { +void HeapEntryCalculatedData::FindRetainingPaths( + HeapEntry* entry, + CachedHeapGraphPath* prev_path) { + Vector<HeapGraphEdge*> retainers = entry->retainers(); + for (int i = 0; i < retainers.length(); ++i) { + HeapGraphEdge* ret_edge = retainers[i]; + if (prev_path->ContainsNode(ret_edge->From())) continue; + if (ret_edge->From() != entry->snapshot()->root()) { CachedHeapGraphPath path(*prev_path); path.Add(ret_edge); - FindRetainingPaths(ret_edge->from(), &path); + FindRetainingPaths(ret_edge->From(), &path); } else { HeapGraphPath* ret_path = new HeapGraphPath(*prev_path->path()); ret_path->Set(0, ret_edge); - retaining_paths_.Add(ret_path); - } - } -} - - -static void RemoveEdge(List<HeapGraphEdge*>* list, HeapGraphEdge* edge) { - for (int i = 0; i < list->length(); ) { - if (list->at(i) == edge) { - list->Remove(i); - return; - } else { - ++i; + retaining_paths_->Add(ret_path); } } - UNREACHABLE(); -} - - -void HeapEntry::RemoveChild(HeapGraphEdge* edge) { - RemoveEdge(&children_, edge); - delete edge; -} - - -void HeapEntry::RemoveRetainer(HeapGraphEdge* edge) { - RemoveEdge(&retainers_, edge); -} - - -void HeapEntry::CutEdges() { - for (int i = 0; i < children_.length(); ++i) { - HeapGraphEdge* edge = children_[i]; - edge->to()->RemoveRetainer(edge); - } - children_.Iterate(DeleteHeapGraphEdge); - children_.Clear(); - - for (int i = 0; i < retainers_.length(); ++i) { - HeapGraphEdge* edge = retainers_[i]; - edge->from()->RemoveChild(edge); - } - retainers_.Clear(); -} - - -void HeapEntry::Print(int max_depth, int indent) { - OS::Print("%6d %6d %6d [%ld] ", - self_size_, TotalSize(), NonSharedTotalSize(), id_); - if (type_ != STRING) { - OS::Print("%s %.40s\n", TypeAsString(), name_); - } else { - OS::Print("\""); - const char* c = name_; - while (*c && (c - name_) <= 40) { - if (*c != '\n') - OS::Print("%c", *c); - else - OS::Print("\\n"); - ++c; - } - OS::Print("\"\n"); - } - if (--max_depth == 0) return; - const int children_count = children_.length(); - for (int i = 0; i < children_count; ++i) { - HeapGraphEdge* edge = children_[i]; - switch (edge->type()) { - case HeapGraphEdge::CONTEXT_VARIABLE: - OS::Print(" %*c #%s: ", indent, ' ', edge->name()); - break; - case HeapGraphEdge::ELEMENT: - OS::Print(" %*c %d: ", indent, ' ', edge->index()); - break; - case HeapGraphEdge::INTERNAL: - OS::Print(" %*c $%s: ", indent, ' ', edge->name()); - break; - case HeapGraphEdge::PROPERTY: - OS::Print(" %*c %s: ", indent, ' ', edge->name()); - break; - default: - OS::Print("!!! unknown edge type: %d ", edge->type()); - } - edge->to()->Print(max_depth, indent + 2); - } -} - - -const char* HeapEntry::TypeAsString() { - switch (type_) { - case INTERNAL: return "/internal/"; - case OBJECT: return "/object/"; - case CLOSURE: return "/closure/"; - case STRING: return "/string/"; - case CODE: return "/code/"; - case ARRAY: return "/array/"; - default: return "???"; - } } @@ -1151,21 +1161,21 @@ HeapGraphPath::HeapGraphPath(const List<HeapGraphEdge*>& path) void HeapGraphPath::Print() { - path_[0]->from()->Print(1, 0); + path_[0]->From()->Print(1, 0); for (int i = 0; i < path_.length(); ++i) { OS::Print(" -> "); HeapGraphEdge* edge = path_[i]; switch (edge->type()) { - case HeapGraphEdge::CONTEXT_VARIABLE: + case HeapGraphEdge::kContextVariable: OS::Print("[#%s] ", edge->name()); break; - case HeapGraphEdge::ELEMENT: + case HeapGraphEdge::kElement: OS::Print("[%d] ", edge->index()); break; - case HeapGraphEdge::INTERNAL: + case HeapGraphEdge::kInternal: OS::Print("[$%s] ", edge->name()); break; - case HeapGraphEdge::PROPERTY: + case HeapGraphEdge::kProperty: OS::Print("[%s] ", edge->name()); break; default: @@ -1177,76 +1187,27 @@ void HeapGraphPath::Print() { } -class IndexedReferencesExtractor : public ObjectVisitor { - public: - IndexedReferencesExtractor(HeapSnapshot* snapshot, HeapEntry* parent) - : snapshot_(snapshot), - parent_(parent) { - } - - void VisitPointer(Object** o) { - if (!(*o)->IsHeapObject()) return; - HeapEntry* entry = snapshot_->GetEntry(HeapObject::cast(*o)); - if (entry != NULL) { - parent_->SetAutoIndexReference(entry); - } - } - - void VisitPointers(Object** start, Object** end) { - for (Object** p = start; p < end; p++) VisitPointer(p); - } - - private: - HeapSnapshot* snapshot_; - HeapEntry* parent_; -}; - - -HeapEntriesMap::HeapEntriesMap() - : entries_(HeapObjectsMatch) { -} - - -HeapEntriesMap::~HeapEntriesMap() { - for (HashMap::Entry* p = entries_.Start(); - p != NULL; - p = entries_.Next(p)) { - if (!IsAlias(p->value)) delete reinterpret_cast<HeapEntry*>(p->value); - } -} +HeapObject *const HeapSnapshot::kInternalRootObject = + reinterpret_cast<HeapObject*>(1); -void HeapEntriesMap::Alias(HeapObject* object, HeapEntry* entry) { - HashMap::Entry* cache_entry = entries_.Lookup(object, Hash(object), true); - if (cache_entry->value == NULL) - cache_entry->value = reinterpret_cast<void*>( - reinterpret_cast<intptr_t>(entry) | kAliasTag); -} +// It is very important to keep objects that form a heap snapshot +// as small as possible. +namespace { // Avoid littering the global namespace. +template <size_t ptr_size> struct SnapshotSizeConstants; -void HeapEntriesMap::Apply(void (HeapEntry::*Func)(void)) { - for (HashMap::Entry* p = entries_.Start(); - p != NULL; - p = entries_.Next(p)) { - if (!IsAlias(p->value)) (reinterpret_cast<HeapEntry*>(p->value)->*Func)(); - } -} - - -HeapEntry* HeapEntriesMap::Map(HeapObject* object) { - HashMap::Entry* cache_entry = entries_.Lookup(object, Hash(object), false); - return cache_entry != NULL ? - reinterpret_cast<HeapEntry*>( - reinterpret_cast<intptr_t>(cache_entry->value) & (~kAliasTag)) : NULL; -} - +template <> struct SnapshotSizeConstants<4> { + static const int kExpectedHeapGraphEdgeSize = 12; + static const int kExpectedHeapEntrySize = 32; +}; -void HeapEntriesMap::Pair(HeapObject* object, HeapEntry* entry) { - HashMap::Entry* cache_entry = entries_.Lookup(object, Hash(object), true); - ASSERT(cache_entry->value == NULL); - cache_entry->value = entry; -} +template <> struct SnapshotSizeConstants<8> { + static const int kExpectedHeapGraphEdgeSize = 24; + static const int kExpectedHeapEntrySize = 40; +}; +} // namespace HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, const char* title, @@ -1254,176 +1215,157 @@ HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, : collection_(collection), title_(title), uid_(uid), - root_(this), - sorted_entries_(NULL) { + root_entry_index_(-1), + raw_entries_(NULL), + entries_sorted_(false) { + STATIC_ASSERT( + sizeof(HeapGraphEdge) == + SnapshotSizeConstants<sizeof(void*)>::kExpectedHeapGraphEdgeSize); // NOLINT + STATIC_ASSERT( + sizeof(HeapEntry) == + SnapshotSizeConstants<sizeof(void*)>::kExpectedHeapEntrySize); // NOLINT } +static void DisposeCalculatedData(HeapEntryCalculatedData* cdata) { + cdata->Dispose(); +} + HeapSnapshot::~HeapSnapshot() { - delete sorted_entries_; + DeleteArray(raw_entries_); + calculated_data_.Iterate(DisposeCalculatedData); } -void HeapSnapshot::ClearPaint() { - root_.ClearPaint(); - entries_.Apply(&HeapEntry::ClearPaint); +void HeapSnapshot::AllocateEntries(int entries_count, + int children_count, + int retainers_count) { + ASSERT(raw_entries_ == NULL); + raw_entries_ = NewArray<char>( + HeapEntry::EntriesSize(entries_count, children_count, retainers_count)); } -HeapEntry* HeapSnapshot::GetEntry(Object* obj) { - if (!obj->IsHeapObject()) return NULL; - HeapObject* object = HeapObject::cast(obj); - - { - HeapEntry* existing = FindEntry(object); - if (existing != NULL) return existing; - } - - // Add new entry. - if (object->IsJSFunction()) { +HeapEntry* HeapSnapshot::AddEntry(HeapObject* object, + int children_count, + int retainers_count) { + if (object == kInternalRootObject) { + ASSERT(root_entry_index_ == -1); + root_entry_index_ = entries_.length(); + HeapEntry* entry = GetNextEntryToInit(); + entry->Init(this, children_count, retainers_count); + return entry; + } else if (object->IsJSFunction()) { JSFunction* func = JSFunction::cast(object); SharedFunctionInfo* shared = func->shared(); String* name = String::cast(shared->name())->length() > 0 ? String::cast(shared->name()) : shared->inferred_name(); - return AddEntry(object, HeapEntry::CLOSURE, collection_->GetName(name)); + return AddEntry(object, + HeapEntry::kClosure, + collection_->GetName(name), + children_count, + retainers_count); } else if (object->IsJSObject()) { return AddEntry(object, - HeapEntry::OBJECT, + HeapEntry::kObject, collection_->GetName( - JSObject::cast(object)->constructor_name())); - } else if (object->IsJSGlobalPropertyCell()) { - HeapEntry* value = GetEntry(JSGlobalPropertyCell::cast(object)->value()); - // If GPC references an object that we have interest in, add the object. - // We don't store HeapEntries for GPCs. Instead, we make our hash map - // to point to object's HeapEntry by GPCs address. - if (value != NULL) AddEntryAlias(object, value); - return value; + JSObject::cast(object)->constructor_name()), + children_count, + retainers_count); } else if (object->IsString()) { return AddEntry(object, - HeapEntry::STRING, - collection_->GetName(String::cast(object))); + HeapEntry::kString, + collection_->GetName(String::cast(object)), + children_count, + retainers_count); } else if (object->IsCode()) { - return AddEntry(object, HeapEntry::CODE); + return AddEntry(object, + HeapEntry::kCode, + "", + children_count, + retainers_count); } else if (object->IsSharedFunctionInfo()) { SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); String* name = String::cast(shared->name())->length() > 0 ? String::cast(shared->name()) : shared->inferred_name(); - return AddEntry(object, HeapEntry::CODE, collection_->GetName(name)); + return AddEntry(object, + HeapEntry::kCode, + collection_->GetName(name), + children_count, + retainers_count); } else if (object->IsScript()) { Script* script = Script::cast(object); return AddEntry(object, - HeapEntry::CODE, + HeapEntry::kCode, script->name()->IsString() ? - collection_->GetName(String::cast(script->name())) : ""); + collection_->GetName(String::cast(script->name())) : "", + children_count, + retainers_count); } else if (object->IsFixedArray()) { - return AddEntry(object, HeapEntry::ARRAY); + return AddEntry(object, + HeapEntry::kArray, + "", + children_count, + retainers_count); } // No interest in this object. return NULL; } -void HeapSnapshot::SetClosureReference(HeapEntry* parent, - String* reference_name, - Object* child) { - HeapEntry* child_entry = GetEntry(child); - if (child_entry != NULL) { - parent->SetClosureReference( - collection_->GetName(reference_name), child_entry); - } +bool HeapSnapshot::WillAddEntry(HeapObject* object) { + return object == kInternalRootObject + || object->IsJSFunction() + || object->IsJSObject() + || object->IsString() + || object->IsCode() + || object->IsSharedFunctionInfo() + || object->IsScript() + || object->IsFixedArray(); } -void HeapSnapshot::SetElementReference(HeapEntry* parent, - int index, - Object* child) { - HeapEntry* child_entry = GetEntry(child); - if (child_entry != NULL) { - parent->SetElementReference(index, child_entry); - } +static void HeapEntryClearPaint(HeapEntry** entry_ptr) { + (*entry_ptr)->clear_paint(); } - -void HeapSnapshot::SetInternalReference(HeapEntry* parent, - const char* reference_name, - Object* child) { - HeapEntry* child_entry = GetEntry(child); - if (child_entry != NULL) { - parent->SetInternalReference(reference_name, child_entry); - } +void HeapSnapshot::ClearPaint() { + entries_.Iterate(HeapEntryClearPaint); } -void HeapSnapshot::SetPropertyReference(HeapEntry* parent, - String* reference_name, - Object* child) { - HeapEntry* child_entry = GetEntry(child); - if (child_entry != NULL) { - parent->SetPropertyReference( - collection_->GetName(reference_name), child_entry); - } +int HeapSnapshot::AddCalculatedData() { + calculated_data_.Add(HeapEntryCalculatedData()); + return calculated_data_.length() - 1; } HeapEntry* HeapSnapshot::AddEntry(HeapObject* object, HeapEntry::Type type, - const char* name) { - HeapEntry* entry = new HeapEntry(this, - type, - name, - collection_->GetObjectId(object->address()), - GetObjectSize(object), - GetObjectSecurityToken(object)); - entries_.Pair(object, entry); - - // Detect, if this is a JS global object of the current context, and - // add it to snapshot's roots. There can be several JS global objects - // in a context. - if (object->IsJSGlobalProxy()) { - int global_security_token = GetGlobalSecurityToken(); - int object_security_token = - collection_->token_enumerator()->GetTokenId( - Context::cast( - JSGlobalProxy::cast(object)->context())->security_token()); - if (object_security_token == TokenEnumerator::kNoSecurityToken - || object_security_token == global_security_token) { - HeapEntry* global_object_entry = - GetEntry(HeapObject::cast(object->map()->prototype())); - ASSERT(global_object_entry != NULL); - root_.SetAutoIndexReference(global_object_entry); - } - } - + const char* name, + int children_count, + int retainers_count) { + HeapEntry* entry = GetNextEntryToInit(); + entry->Init(this, + type, + name, + collection_->GetObjectId(object->address()), + GetObjectSize(object), + children_count, + retainers_count); return entry; } -class EdgesCutter { - public: - explicit EdgesCutter(int global_security_token) - : global_security_token_(global_security_token) { - } - - void Apply(HeapEntry* entry) { - if (entry->security_token_id() != TokenEnumerator::kNoSecurityToken - && entry->security_token_id() != global_security_token_) { - entry->CutEdges(); - } +HeapEntry* HeapSnapshot::GetNextEntryToInit() { + if (entries_.length() > 0) { + HeapEntry* last_entry = entries_.last(); + entries_.Add(reinterpret_cast<HeapEntry*>( + reinterpret_cast<char*>(last_entry) + last_entry->EntrySize())); + } else { + entries_.Add(reinterpret_cast<HeapEntry*>(raw_entries_)); } - - private: - const int global_security_token_; -}; - -void HeapSnapshot::CutObjectsFromForeignSecurityContexts() { - EdgesCutter cutter(GetGlobalSecurityToken()); - entries_.Apply(&cutter); -} - - -int HeapSnapshot::GetGlobalSecurityToken() { - return collection_->token_enumerator()->GetTokenId( - Top::context()->global()->global_context()->security_token()); + return entries_.last(); } @@ -1433,24 +1375,14 @@ int HeapSnapshot::GetObjectSize(HeapObject* obj) { } -int HeapSnapshot::GetObjectSecurityToken(HeapObject* obj) { - if (obj->IsGlobalContext()) { - return collection_->token_enumerator()->GetTokenId( - Context::cast(obj)->security_token()); - } else { - return TokenEnumerator::kNoSecurityToken; - } -} - - int HeapSnapshot::CalculateNetworkSize(JSObject* obj) { int size = obj->Size(); // If 'properties' and 'elements' are non-empty (thus, non-shared), // take their size into account. - if (FixedArray::cast(obj->properties())->length() != 0) { + if (obj->properties() != Heap::empty_fixed_array()) { size += obj->properties()->Size(); } - if (FixedArray::cast(obj->elements())->length() != 0) { + if (obj->elements() != Heap::empty_fixed_array()) { size += obj->elements()->Size(); } // For functions, also account non-empty context and literals sizes. @@ -1467,15 +1399,10 @@ int HeapSnapshot::CalculateNetworkSize(JSObject* obj) { } -class EntriesCollector { - public: - explicit EntriesCollector(List<HeapEntry*>* list) : list_(list) { } - void Apply(HeapEntry* entry) { - list_->Add(entry); - } - private: - List<HeapEntry*>* list_; -}; +HeapSnapshotsDiff* HeapSnapshot::CompareWith(HeapSnapshot* snapshot) { + return collection_->CompareSnapshots(this, snapshot); +} + template<class T> static int SortByIds(const T* entry1_ptr, @@ -1485,22 +1412,16 @@ static int SortByIds(const T* entry1_ptr, } List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() { - if (sorted_entries_ != NULL) return sorted_entries_; - sorted_entries_ = new List<HeapEntry*>(entries_.capacity()); - EntriesCollector collector(sorted_entries_); - entries_.Apply(&collector); - sorted_entries_->Sort(SortByIds); - return sorted_entries_; -} - - -HeapSnapshotsDiff* HeapSnapshot::CompareWith(HeapSnapshot* snapshot) { - return collection_->CompareSnapshots(this, snapshot); + if (!entries_sorted_) { + entries_.Sort(SortByIds); + entries_sorted_ = true; + } + return &entries_; } void HeapSnapshot::Print(int max_depth) { - root_.Print(max_depth, 0); + root()->Print(max_depth, 0); } @@ -1571,6 +1492,7 @@ uint64_t HeapObjectsMap::FindEntry(Address addr) { void HeapObjectsMap::RemoveDeadEntries() { List<EntryInfo>* new_entries = new List<EntryInfo>(); + List<void*> dead_entries; for (HashMap::Entry* entry = entries_map_.Start(); entry != NULL; entry = entries_map_.Next(entry)) { @@ -1580,8 +1502,15 @@ void HeapObjectsMap::RemoveDeadEntries() { if (entry_info.accessed) { entry->value = reinterpret_cast<void*>(new_entries->length()); new_entries->Add(EntryInfo(entry_info.id, false)); + } else { + dead_entries.Add(entry->key); } } + for (int i = 0; i < dead_entries.length(); ++i) { + void* raw_entry = dead_entries[i]; + entries_map_.Remove( + raw_entry, AddressHash(reinterpret_cast<Address>(raw_entry))); + } delete entries_; entries_ = new_entries; } @@ -1635,53 +1564,343 @@ HeapSnapshotsDiff* HeapSnapshotsCollection::CompareSnapshots( } +HeapEntriesMap::HeapEntriesMap() + : entries_(HeapObjectsMatch), + entries_count_(0), + total_children_count_(0), + total_retainers_count_(0) { +} + + +HeapEntriesMap::~HeapEntriesMap() { + for (HashMap::Entry* p = entries_.Start(); p != NULL; p = entries_.Next(p)) { + if (!IsAlias(p->value)) delete reinterpret_cast<EntryInfo*>(p->value); + } +} + + +void HeapEntriesMap::Alias(HeapObject* from, HeapObject* to) { + HashMap::Entry* from_cache_entry = entries_.Lookup(from, Hash(from), true); + HashMap::Entry* to_cache_entry = entries_.Lookup(to, Hash(to), false); + if (from_cache_entry->value == NULL) { + ASSERT(to_cache_entry != NULL); + from_cache_entry->value = MakeAlias(to_cache_entry->value); + } +} + + +HeapEntry* HeapEntriesMap::Map(HeapObject* object) { + HashMap::Entry* cache_entry = entries_.Lookup(object, Hash(object), false); + if (cache_entry != NULL) { + EntryInfo* entry_info = + reinterpret_cast<EntryInfo*>(Unalias(cache_entry->value)); + return entry_info->entry; + } else { + return NULL; + } +} + + +void HeapEntriesMap::Pair(HeapObject* object, HeapEntry* entry) { + HashMap::Entry* cache_entry = entries_.Lookup(object, Hash(object), true); + ASSERT(cache_entry->value == NULL); + cache_entry->value = new EntryInfo(entry); + ++entries_count_; +} + + +void HeapEntriesMap::CountReference(HeapObject* from, HeapObject* to, + int* prev_children_count, + int* prev_retainers_count) { + HashMap::Entry* from_cache_entry = entries_.Lookup(from, Hash(from), true); + HashMap::Entry* to_cache_entry = entries_.Lookup(to, Hash(to), false); + ASSERT(from_cache_entry != NULL); + ASSERT(to_cache_entry != NULL); + EntryInfo* from_entry_info = + reinterpret_cast<EntryInfo*>(Unalias(from_cache_entry->value)); + EntryInfo* to_entry_info = + reinterpret_cast<EntryInfo*>(Unalias(to_cache_entry->value)); + if (prev_children_count) + *prev_children_count = from_entry_info->children_count; + if (prev_retainers_count) + *prev_retainers_count = to_entry_info->retainers_count; + ++from_entry_info->children_count; + ++to_entry_info->retainers_count; + ++total_children_count_; + ++total_retainers_count_; +} + + +template<class Visitor> +void HeapEntriesMap::UpdateEntries(Visitor* visitor) { + for (HashMap::Entry* p = entries_.Start(); + p != NULL; + p = entries_.Next(p)) { + if (!IsAlias(p->value)) { + EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(p->value); + entry_info->entry = visitor->GetEntry( + reinterpret_cast<HeapObject*>(p->key), + entry_info->children_count, + entry_info->retainers_count); + entry_info->children_count = 0; + entry_info->retainers_count = 0; + } + } +} + + HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot) - : snapshot_(snapshot) { + : snapshot_(snapshot), + collection_(snapshot->collection()), + filler_(NULL) { } +HeapEntry *const +HeapSnapshotGenerator::SnapshotFillerInterface::kHeapEntryPlaceholder = + reinterpret_cast<HeapEntry*>(1); + +class SnapshotCounter : public HeapSnapshotGenerator::SnapshotFillerInterface { + public: + explicit SnapshotCounter(HeapEntriesMap* entries) + : entries_(entries) { } + HeapEntry* AddEntry(HeapObject* obj) { + entries_->Pair(obj, kHeapEntryPlaceholder); + return kHeapEntryPlaceholder; + } + void SetElementReference(HeapObject* parent_obj, + HeapEntry*, + int, + Object* child_obj, + HeapEntry*) { + entries_->CountReference(parent_obj, HeapObject::cast(child_obj)); + } + void SetNamedReference(HeapGraphEdge::Type, + HeapObject* parent_obj, + HeapEntry*, + const char*, + Object* child_obj, + HeapEntry*) { + entries_->CountReference(parent_obj, HeapObject::cast(child_obj)); + } + void SetRootReference(Object* child_obj, HeapEntry*) { + entries_->CountReference( + HeapSnapshot::kInternalRootObject, HeapObject::cast(child_obj)); + } + private: + HeapEntriesMap* entries_; +}; + + +class SnapshotFiller : public HeapSnapshotGenerator::SnapshotFillerInterface { + public: + explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries) + : snapshot_(snapshot), + collection_(snapshot->collection()), + entries_(entries) { } + HeapEntry* AddEntry(HeapObject* obj) { + UNREACHABLE(); + return NULL; + } + void SetElementReference(HeapObject* parent_obj, + HeapEntry* parent_entry, + int index, + Object* child_obj, + HeapEntry* child_entry) { + int child_index, retainer_index; + entries_->CountReference(parent_obj, HeapObject::cast(child_obj), + &child_index, &retainer_index); + parent_entry->SetElementReference( + child_index, index, child_entry, retainer_index); + } + void SetNamedReference(HeapGraphEdge::Type type, + HeapObject* parent_obj, + HeapEntry* parent_entry, + const char* reference_name, + Object* child_obj, + HeapEntry* child_entry) { + int child_index, retainer_index; + entries_->CountReference(parent_obj, HeapObject::cast(child_obj), + &child_index, &retainer_index); + parent_entry->SetNamedReference(type, + child_index, + reference_name, + child_entry, + retainer_index); + } + void SetRootReference(Object* child_obj, HeapEntry* child_entry) { + int child_index, retainer_index; + entries_->CountReference( + HeapSnapshot::kInternalRootObject, HeapObject::cast(child_obj), + &child_index, &retainer_index); + snapshot_->root()->SetElementReference( + child_index, child_index + 1, child_entry, retainer_index); + } + private: + HeapSnapshot* snapshot_; + HeapSnapshotsCollection* collection_; + HeapEntriesMap* entries_; +}; + +class SnapshotAllocator { + public: + explicit SnapshotAllocator(HeapSnapshot* snapshot) + : snapshot_(snapshot) { } + HeapEntry* GetEntry( + HeapObject* obj, int children_count, int retainers_count) { + HeapEntry* entry = + snapshot_->AddEntry(obj, children_count, retainers_count); + ASSERT(entry != NULL); + return entry; + } + private: + HeapSnapshot* snapshot_; +}; + void HeapSnapshotGenerator::GenerateSnapshot() { AssertNoAllocation no_alloc; - // Iterate heap contents. - HeapIterator iterator; - for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) { + // Pass 1. Iterate heap contents to count entries and references. + SnapshotCounter counter(&entries_); + filler_ = &counter; + filler_->AddEntry(HeapSnapshot::kInternalRootObject); + HeapIterator iterator1; + for (HeapObject* obj = iterator1.next(); + obj != NULL; + obj = iterator1.next()) { ExtractReferences(obj); } - snapshot_->CutObjectsFromForeignSecurityContexts(); + // Allocate and fill entries in the snapshot, allocate references. + snapshot_->AllocateEntries(entries_.entries_count(), + entries_.total_children_count(), + entries_.total_retainers_count()); + SnapshotAllocator allocator(snapshot_); + entries_.UpdateEntries(&allocator); + + // Pass 2. Fill references. + SnapshotFiller filler(snapshot_, &entries_); + filler_ = &filler; + HeapIterator iterator2; + for (HeapObject* obj = iterator2.next(); + obj != NULL; + obj = iterator2.next()) { + ExtractReferences(obj); + } +} + + +HeapEntry* HeapSnapshotGenerator::GetEntry(Object* obj) { + if (!obj->IsHeapObject()) return NULL; + HeapObject* object = HeapObject::cast(obj); + HeapEntry* entry = entries_.Map(object); + + // A new entry. + if (entry == NULL) { + if (obj->IsJSGlobalPropertyCell()) { + Object* cell_target = JSGlobalPropertyCell::cast(obj)->value(); + entry = GetEntry(cell_target); + // If GPC references an object that we have interest in (see + // HeapSnapshot::AddEntry, WillAddEntry), add the object. We + // don't store HeapEntries for GPCs. Instead, we make our hash + // map to point to object's HeapEntry by GPCs address. + if (entry != NULL) { + entries_.Alias(object, HeapObject::cast(cell_target)); + } + return entry; + } + + if (snapshot_->WillAddEntry(object)) entry = filler_->AddEntry(object); + } + + return entry; +} + + +int HeapSnapshotGenerator::GetGlobalSecurityToken() { + return collection_->token_enumerator()->GetTokenId( + Top::context()->global()->global_context()->security_token()); +} + + +int HeapSnapshotGenerator::GetObjectSecurityToken(HeapObject* obj) { + if (obj->IsGlobalContext()) { + return collection_->token_enumerator()->GetTokenId( + Context::cast(obj)->security_token()); + } else { + return TokenEnumerator::kNoSecurityToken; + } } +class IndexedReferencesExtractor : public ObjectVisitor { + public: + IndexedReferencesExtractor(HeapSnapshotGenerator* generator, + HeapObject* parent_obj, + HeapEntry* parent_entry) + : generator_(generator), + parent_obj_(parent_obj), + parent_(parent_entry), + next_index_(1) { + } + + void VisitPointer(Object** o) { + generator_->SetElementReference(parent_obj_, parent_, next_index_++, *o); + } + + void VisitPointers(Object** start, Object** end) { + for (Object** p = start; p < end; p++) VisitPointer(p); + } + + private: + HeapSnapshotGenerator* generator_; + HeapObject* parent_obj_; + HeapEntry* parent_; + int next_index_; +}; + + void HeapSnapshotGenerator::ExtractReferences(HeapObject* obj) { - HeapEntry* entry = snapshot_->GetEntry(obj); - if (entry == NULL) return; - if (entry->visited()) return; + // We need to reference JS global objects from snapshot's root. + // We also need to only include global objects from the current + // security context. And we don't want to add the global proxy, + // as we don't have a special type for it. + if (obj->IsJSGlobalProxy()) { + int global_security_token = GetGlobalSecurityToken(); + JSGlobalProxy* proxy = JSGlobalProxy::cast(obj); + int object_security_token = + collection_->token_enumerator()->GetTokenId( + Context::cast(proxy->context())->security_token()); + if (object_security_token == TokenEnumerator::kNoSecurityToken + || object_security_token == global_security_token) { + SetRootReference(proxy->map()->prototype()); + } + return; + } + + HeapEntry* entry = GetEntry(obj); + if (entry == NULL) return; // No interest in this object. if (obj->IsJSObject()) { JSObject* js_obj = JSObject::cast(obj); ExtractClosureReferences(js_obj, entry); ExtractPropertyReferences(js_obj, entry); ExtractElementReferences(js_obj, entry); - snapshot_->SetPropertyReference( - entry, Heap::prototype_symbol(), js_obj->map()->prototype()); - } else if (obj->IsJSGlobalPropertyCell()) { - JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(obj); - snapshot_->SetElementReference(entry, 0, cell->value()); + SetPropertyReference( + obj, entry, Heap::prototype_symbol(), js_obj->map()->prototype()); } else if (obj->IsString()) { if (obj->IsConsString()) { ConsString* cs = ConsString::cast(obj); - snapshot_->SetElementReference(entry, 0, cs->first()); - snapshot_->SetElementReference(entry, 1, cs->second()); + SetElementReference(obj, entry, 0, cs->first()); + SetElementReference(obj, entry, 1, cs->second()); } } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) { - IndexedReferencesExtractor refs_extractor(snapshot_, entry); + IndexedReferencesExtractor refs_extractor(this, obj, entry); obj->Iterate(&refs_extractor); } else if (obj->IsFixedArray()) { - IndexedReferencesExtractor refs_extractor(snapshot_, entry); + IndexedReferencesExtractor refs_extractor(this, obj, entry); obj->Iterate(&refs_extractor); } - entry->MarkAsVisited(); } @@ -1700,10 +1919,10 @@ void HeapSnapshotGenerator::ExtractClosureReferences(JSObject* js_obj, String* local_name = *zone_scope_info.LocalName(i); int idx = serialized_scope_info->ContextSlotIndex(local_name, NULL); if (idx >= 0 && idx < context->length()) { - snapshot_->SetClosureReference(entry, local_name, context->get(idx)); + SetClosureReference(js_obj, entry, local_name, context->get(idx)); } } - snapshot_->SetInternalReference(entry, "code", func->shared()); + SetInternalReference(js_obj, entry, "code", func->shared()); } } @@ -1716,13 +1935,13 @@ void HeapSnapshotGenerator::ExtractPropertyReferences(JSObject* js_obj, switch (descs->GetType(i)) { case FIELD: { int index = descs->GetFieldIndex(i); - snapshot_->SetPropertyReference( - entry, descs->GetKey(i), js_obj->FastPropertyAt(index)); + SetPropertyReference( + js_obj, entry, descs->GetKey(i), js_obj->FastPropertyAt(index)); break; } case CONSTANT_FUNCTION: - snapshot_->SetPropertyReference( - entry, descs->GetKey(i), descs->GetConstantFunction(i)); + SetPropertyReference( + js_obj, entry, descs->GetKey(i), descs->GetConstantFunction(i)); break; default: ; } @@ -1733,8 +1952,8 @@ void HeapSnapshotGenerator::ExtractPropertyReferences(JSObject* js_obj, for (int i = 0; i < length; ++i) { Object* k = dictionary->KeyAt(i); if (dictionary->IsKey(k)) { - snapshot_->SetPropertyReference( - entry, String::cast(k), dictionary->ValueAt(i)); + SetPropertyReference( + js_obj, entry, String::cast(k), dictionary->ValueAt(i)); } } } @@ -1750,7 +1969,7 @@ void HeapSnapshotGenerator::ExtractElementReferences(JSObject* js_obj, elements->length(); for (int i = 0; i < length; ++i) { if (!elements->get(i)->IsTheHole()) { - snapshot_->SetElementReference(entry, i, elements->get(i)); + SetElementReference(js_obj, entry, i, elements->get(i)); } } } else if (js_obj->HasDictionaryElements()) { @@ -1761,13 +1980,90 @@ void HeapSnapshotGenerator::ExtractElementReferences(JSObject* js_obj, if (dictionary->IsKey(k)) { ASSERT(k->IsNumber()); uint32_t index = static_cast<uint32_t>(k->Number()); - snapshot_->SetElementReference(entry, index, dictionary->ValueAt(i)); + SetElementReference(js_obj, entry, index, dictionary->ValueAt(i)); } } } } +void HeapSnapshotGenerator::SetClosureReference(HeapObject* parent_obj, + HeapEntry* parent_entry, + String* reference_name, + Object* child_obj) { + HeapEntry* child_entry = GetEntry(child_obj); + if (child_entry != NULL) { + filler_->SetNamedReference(HeapGraphEdge::kContextVariable, + parent_obj, + parent_entry, + collection_->GetName(reference_name), + child_obj, + child_entry); + } +} + + +void HeapSnapshotGenerator::SetElementReference(HeapObject* parent_obj, + HeapEntry* parent_entry, + int index, + Object* child_obj) { + HeapEntry* child_entry = GetEntry(child_obj); + if (child_entry != NULL) { + filler_->SetElementReference( + parent_obj, parent_entry, index, child_obj, child_entry); + } +} + + +void HeapSnapshotGenerator::SetInternalReference(HeapObject* parent_obj, + HeapEntry* parent_entry, + const char* reference_name, + Object* child_obj) { + HeapEntry* child_entry = GetEntry(child_obj); + if (child_entry != NULL) { + filler_->SetNamedReference(HeapGraphEdge::kInternal, + parent_obj, + parent_entry, + reference_name, + child_obj, + child_entry); + } +} + + +void HeapSnapshotGenerator::SetPropertyReference(HeapObject* parent_obj, + HeapEntry* parent_entry, + String* reference_name, + Object* child_obj) { + HeapEntry* child_entry = GetEntry(child_obj); + if (child_entry != NULL) { + filler_->SetNamedReference(HeapGraphEdge::kProperty, + parent_obj, + parent_entry, + collection_->GetName(reference_name), + child_obj, + child_entry); + } +} + + +void HeapSnapshotGenerator::SetRootReference(Object* child_obj) { + HeapEntry* child_entry = GetEntry(child_obj); + ASSERT(child_entry != NULL); + filler_->SetRootReference(child_obj, child_entry); +} + + +void HeapSnapshotsDiff::CreateRoots(int additions_count, int deletions_count) { + raw_additions_root_ = + NewArray<char>(HeapEntry::EntriesSize(1, additions_count, 0)); + additions_root()->Init(snapshot2_, additions_count, 0); + raw_deletions_root_ = + NewArray<char>(HeapEntry::EntriesSize(1, deletions_count, 0)); + deletions_root()->Init(snapshot1_, deletions_count, 0); +} + + static void DeleteHeapSnapshotsDiff(HeapSnapshotsDiff** diff_ptr) { delete *diff_ptr; } @@ -1779,8 +2075,6 @@ HeapSnapshotsComparator::~HeapSnapshotsComparator() { HeapSnapshotsDiff* HeapSnapshotsComparator::Compare(HeapSnapshot* snapshot1, HeapSnapshot* snapshot2) { - HeapSnapshotsDiff* diff = new HeapSnapshotsDiff(snapshot1, snapshot2); - diffs_.Add(diff); List<HeapEntry*>* entries1 = snapshot1->GetSortedEntriesList(); List<HeapEntry*>* entries2 = snapshot2->GetSortedEntriesList(); int i = 0, j = 0; @@ -1810,17 +2104,33 @@ HeapSnapshotsDiff* HeapSnapshotsComparator::Compare(HeapSnapshot* snapshot1, snapshot1->ClearPaint(); snapshot1->root()->PaintAllReachable(); + snapshot2->ClearPaint(); + snapshot2->root()->PaintAllReachable(); + int reachable_deleted_entries = 0, reachable_added_entries = 0; + for (int i = 0; i < deleted_entries.length(); ++i) { + HeapEntry* entry = deleted_entries[i]; + if (entry->painted_reachable()) ++reachable_deleted_entries; + } + for (int i = 0; i < added_entries.length(); ++i) { + HeapEntry* entry = added_entries[i]; + if (entry->painted_reachable()) ++reachable_added_entries; + } + + HeapSnapshotsDiff* diff = new HeapSnapshotsDiff(snapshot1, snapshot2); + diffs_.Add(diff); + diff->CreateRoots(reachable_added_entries, reachable_deleted_entries); + + int del_child_index = 0, deleted_entry_index = 1; for (int i = 0; i < deleted_entries.length(); ++i) { HeapEntry* entry = deleted_entries[i]; if (entry->painted_reachable()) - diff->AddDeletedEntry(entry); + diff->AddDeletedEntry(del_child_index++, deleted_entry_index++, entry); } - snapshot2->ClearPaint(); - snapshot2->root()->PaintAllReachable(); + int add_child_index = 0, added_entry_index = 1; for (int i = 0; i < added_entries.length(); ++i) { HeapEntry* entry = added_entries[i]; if (entry->painted_reachable()) - diff->AddAddedEntry(entry); + diff->AddAddedEntry(add_child_index++, added_entry_index++, entry); } return diff; } diff --git a/deps/v8/src/profile-generator.h b/deps/v8/src/profile-generator.h index cd2bd0b621..bebf40a376 100644 --- a/deps/v8/src/profile-generator.h +++ b/deps/v8/src/profile-generator.h @@ -279,15 +279,12 @@ class CpuProfilesCollection { CpuProfile* StopProfiling(int security_token_id, const char* title, double actual_sampling_rate); - CpuProfile* StopProfiling(int security_token_id, - String* title, - double actual_sampling_rate); List<CpuProfile*>* Profiles(int security_token_id); const char* GetName(String* name) { return function_and_resource_names_.GetName(name); } CpuProfile* GetProfile(int security_token_id, unsigned uid); - inline bool is_last_profile(); + bool IsLastProfile(const char* title); CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, String* name, String* resource_name, int line_number); @@ -423,167 +420,194 @@ class ProfileGenerator { }; -class HeapSnapshot; class HeapEntry; - -class HeapGraphEdge { +class HeapGraphEdge BASE_EMBEDDED { public: enum Type { - CONTEXT_VARIABLE = v8::HeapGraphEdge::CONTEXT_VARIABLE, - ELEMENT = v8::HeapGraphEdge::ELEMENT, - PROPERTY = v8::HeapGraphEdge::PROPERTY, - INTERNAL = v8::HeapGraphEdge::INTERNAL + kContextVariable = v8::HeapGraphEdge::kContextVariable, + kElement = v8::HeapGraphEdge::kElement, + kProperty = v8::HeapGraphEdge::kProperty, + kInternal = v8::HeapGraphEdge::kInternal }; - HeapGraphEdge(Type type, const char* name, HeapEntry* from, HeapEntry* to); - HeapGraphEdge(int index, HeapEntry* from, HeapEntry* to); + HeapGraphEdge() { } + void Init(int child_index, Type type, const char* name, HeapEntry* to); + void Init(int child_index, int index, HeapEntry* to); - Type type() const { return type_; } - int index() const { - ASSERT(type_ == ELEMENT); + Type type() { return static_cast<Type>(type_); } + int index() { + ASSERT(type_ == kElement); return index_; } - const char* name() const { - ASSERT(type_ == CONTEXT_VARIABLE || type_ == PROPERTY || type_ == INTERNAL); + const char* name() { + ASSERT(type_ == kContextVariable + || type_ == kProperty + || type_ == kInternal); return name_; } - HeapEntry* from() const { return from_; } - HeapEntry* to() const { return to_; } + HeapEntry* to() { return to_; } + + HeapEntry* From(); private: - Type type_; + int child_index_ : 30; + unsigned type_ : 2; union { int index_; const char* name_; }; - HeapEntry* from_; HeapEntry* to_; DISALLOW_COPY_AND_ASSIGN(HeapGraphEdge); }; -class HeapGraphPath; class CachedHeapGraphPath; +class HeapGraphPath; +class HeapSnapshot; -class HeapEntry { +// HeapEntry instances represent an entity from the heap (or a special +// virtual node, e.g. root). To make heap snapshots more compact, +// HeapEntries has a special memory layout (no Vectors or Lists used): +// +// +-----------------+ +// HeapEntry +// +-----------------+ +// HeapGraphEdge | +// ... } children_count +// HeapGraphEdge | +// +-----------------+ +// HeapGraphEdge* | +// ... } retainers_count +// HeapGraphEdge* | +// +-----------------+ +// +// In a HeapSnapshot, all entries are hand-allocated in a continuous array +// of raw bytes. +// +class HeapEntry BASE_EMBEDDED { public: enum Type { - INTERNAL = v8::HeapGraphNode::INTERNAL, - ARRAY = v8::HeapGraphNode::ARRAY, - STRING = v8::HeapGraphNode::STRING, - OBJECT = v8::HeapGraphNode::OBJECT, - CODE = v8::HeapGraphNode::CODE, - CLOSURE = v8::HeapGraphNode::CLOSURE + kInternal = v8::HeapGraphNode::kInternal, + kArray = v8::HeapGraphNode::kArray, + kString = v8::HeapGraphNode::kString, + kObject = v8::HeapGraphNode::kObject, + kCode = v8::HeapGraphNode::kCode, + kClosure = v8::HeapGraphNode::kClosure }; - explicit HeapEntry(HeapSnapshot* snapshot) - : snapshot_(snapshot), - visited_(false), - type_(INTERNAL), - name_(""), - id_(0), - next_auto_index_(0), - self_size_(0), - security_token_id_(TokenEnumerator::kNoSecurityToken), - children_(1), - retainers_(0), - retaining_paths_(0), - total_size_(kUnknownSize), - non_shared_total_size_(kUnknownSize), - painted_(kUnpainted) { } - HeapEntry(HeapSnapshot* snapshot, + HeapEntry() { } + void Init(HeapSnapshot* snapshot, int children_count, int retainers_count); + void Init(HeapSnapshot* snapshot, Type type, const char* name, uint64_t id, int self_size, - int security_token_id) - : snapshot_(snapshot), - visited_(false), - type_(type), - name_(name), - id_(id), - next_auto_index_(1), - self_size_(self_size), - security_token_id_(security_token_id), - children_(4), - retainers_(4), - retaining_paths_(4), - total_size_(kUnknownSize), - non_shared_total_size_(kUnknownSize), - painted_(kUnpainted) { } - ~HeapEntry(); - - bool visited() const { return visited_; } - Type type() const { return type_; } - const char* name() const { return name_; } - uint64_t id() const { return id_; } - int self_size() const { return self_size_; } - int security_token_id() const { return security_token_id_; } - bool painted_reachable() { return painted_ == kPaintReachable; } + int children_count, + int retainers_count); + + HeapSnapshot* snapshot() { return snapshot_; } + Type type() { return static_cast<Type>(type_); } + const char* name() { return name_; } + uint64_t id() { return id_; } + int self_size() { return self_size_; } + + Vector<HeapGraphEdge> children() { + return Vector<HeapGraphEdge>(children_arr(), children_count_); } + Vector<HeapGraphEdge*> retainers() { + return Vector<HeapGraphEdge*>(retainers_arr(), retainers_count_); } + List<HeapGraphPath*>* GetRetainingPaths(); + + void clear_paint() { painted_ = kUnpainted; } + bool painted_reachable() { return painted_ == kPainted; } + void paint_reachable() { + ASSERT(painted_ == kUnpainted); + painted_ = kPainted; + } bool not_painted_reachable_from_others() { - return painted_ != kPaintReachableFromOthers; + return painted_ != kPaintedReachableFromOthers; + } + void paint_reachable_from_others() { + painted_ = kPaintedReachableFromOthers; } - const List<HeapGraphEdge*>* children() const { return &children_; } - const List<HeapGraphEdge*>* retainers() const { return &retainers_; } - const List<HeapGraphPath*>* GetRetainingPaths(); - template<class Visitor> void ApplyAndPaintAllReachable(Visitor* visitor); - - void ClearPaint() { painted_ = kUnpainted; } - void CutEdges(); - void MarkAsVisited() { visited_ = true; } void PaintAllReachable(); - void PaintReachable() { - ASSERT(painted_ == kUnpainted); - painted_ = kPaintReachable; - } - void PaintReachableFromOthers() { painted_ = kPaintReachableFromOthers; } - void SetClosureReference(const char* name, HeapEntry* entry); - void SetElementReference(int index, HeapEntry* entry); - void SetInternalReference(const char* name, HeapEntry* entry); - void SetPropertyReference(const char* name, HeapEntry* entry); - void SetAutoIndexReference(HeapEntry* entry); - void SetUnidirAutoIndexReference(HeapEntry* entry); - int TotalSize(); - int NonSharedTotalSize(); + void SetElementReference( + int child_index, int index, HeapEntry* entry, int retainer_index); + void SetNamedReference(HeapGraphEdge::Type type, + int child_index, + const char* name, + HeapEntry* entry, + int retainer_index); + void SetUnidirElementReference(int child_index, int index, HeapEntry* entry); + + int EntrySize() { return EntriesSize(1, children_count_, retainers_count_); } + int ReachableSize(); + int RetainedSize(); void Print(int max_depth, int indent); - private: - void AddEdge(HeapGraphEdge* edge); - int CalculateTotalSize(); - int CalculateNonSharedTotalSize(); - void FindRetainingPaths(HeapEntry* node, CachedHeapGraphPath* prev_path); - void RemoveChild(HeapGraphEdge* edge); - void RemoveRetainer(HeapGraphEdge* edge); + static int EntriesSize(int entries_count, + int children_count, + int retainers_count); + private: + HeapGraphEdge* children_arr() { + return reinterpret_cast<HeapGraphEdge*>(this + 1); + } + HeapGraphEdge** retainers_arr() { + return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_); + } const char* TypeAsString(); + unsigned painted_: 2; + unsigned type_: 3; + // The calculated data is stored in HeapSnapshot in HeapEntryCalculatedData + // entries. See AddCalculatedData and GetCalculatedData. + int calculated_data_index_: 27; + int self_size_; + int children_count_; + int retainers_count_; HeapSnapshot* snapshot_; - bool visited_; - Type type_; const char* name_; uint64_t id_; - int next_auto_index_; - int self_size_; - int security_token_id_; - List<HeapGraphEdge*> children_; - List<HeapGraphEdge*> retainers_; - List<HeapGraphPath*> retaining_paths_; - int total_size_; - int non_shared_total_size_; - int painted_; + + static const unsigned kUnpainted = 0; + static const unsigned kPainted = 1; + static const unsigned kPaintedReachableFromOthers = 2; + static const int kNoCalculatedData = -1; + + DISALLOW_COPY_AND_ASSIGN(HeapEntry); +}; + + +class HeapEntryCalculatedData { + public: + HeapEntryCalculatedData() + : retaining_paths_(NULL), + reachable_size_(kUnknownSize), + retained_size_(kUnknownSize) { + } + void Dispose(); + + List<HeapGraphPath*>* GetRetainingPaths(HeapEntry* entry); + int ReachableSize(HeapEntry* entry); + int RetainedSize(HeapEntry* entry); + + private: + void CalculateSizes(HeapEntry* entry); + void FindRetainingPaths(HeapEntry* entry, CachedHeapGraphPath* prev_path); + + List<HeapGraphPath*>* retaining_paths_; + int reachable_size_; + int retained_size_; static const int kUnknownSize = -1; - static const int kUnpainted = 0; - static const int kPaintReachable = 1; - static const int kPaintReachableFromOthers = 2; - DISALLOW_IMPLICIT_CONSTRUCTORS(HeapEntry); + // Allow generated copy constructor and assignment operator. }; @@ -595,7 +619,7 @@ class HeapGraphPath { void Add(HeapGraphEdge* edge) { path_.Add(edge); } void Set(int index, HeapGraphEdge* edge) { path_[index] = edge; } - const List<HeapGraphEdge*>* path() const { return &path_; } + const List<HeapGraphEdge*>* path() { return &path_; } void Print(); @@ -606,39 +630,6 @@ class HeapGraphPath { }; -class HeapEntriesMap { - public: - HeapEntriesMap(); - ~HeapEntriesMap(); - - void Alias(HeapObject* object, HeapEntry* entry); - void Apply(void (HeapEntry::*Func)(void)); - template<class Visitor> - void Apply(Visitor* visitor); - HeapEntry* Map(HeapObject* object); - void Pair(HeapObject* object, HeapEntry* entry); - - uint32_t capacity() { return entries_.capacity(); } - - private: - INLINE(uint32_t Hash(HeapObject* object)) { - return static_cast<uint32_t>(reinterpret_cast<intptr_t>(object)); - } - INLINE(static bool HeapObjectsMatch(void* key1, void* key2)) { - return key1 == key2; - } - INLINE(bool IsAlias(void* ptr)) { - return reinterpret_cast<intptr_t>(ptr) & kAliasTag; - } - - static const intptr_t kAliasTag = 1; - - HashMap entries_; - - DISALLOW_COPY_AND_ASSIGN(HeapEntriesMap); -}; - - class HeapSnapshotsCollection; class HeapSnapshotsDiff; @@ -653,53 +644,52 @@ class HeapSnapshot { const char* title, unsigned uid); ~HeapSnapshot(); - void ClearPaint(); - void CutObjectsFromForeignSecurityContexts(); - HeapEntry* GetEntry(Object* object); - void SetClosureReference( - HeapEntry* parent, String* reference_name, Object* child); - void SetElementReference(HeapEntry* parent, int index, Object* child); - void SetInternalReference( - HeapEntry* parent, const char* reference_name, Object* child); - void SetPropertyReference( - HeapEntry* parent, String* reference_name, Object* child); - INLINE(const char* title() const) { return title_; } - INLINE(unsigned uid() const) { return uid_; } - const HeapEntry* const_root() const { return &root_; } - HeapEntry* root() { return &root_; } - template<class Visitor> - void IterateEntries(Visitor* visitor) { entries_.Apply(visitor); } - List<HeapEntry*>* GetSortedEntriesList(); + HeapSnapshotsCollection* collection() { return collection_; } + const char* title() { return title_; } + unsigned uid() { return uid_; } + HeapEntry* root() { return entries_[root_entry_index_]; } + + void AllocateEntries( + int entries_count, int children_count, int retainers_count); + HeapEntry* AddEntry( + HeapObject* object, int children_count, int retainers_count); + bool WillAddEntry(HeapObject* object); + int AddCalculatedData(); + HeapEntryCalculatedData& GetCalculatedData(int index) { + return calculated_data_[index]; + } + void ClearPaint(); HeapSnapshotsDiff* CompareWith(HeapSnapshot* snapshot); + List<HeapEntry*>* GetSortedEntriesList(); + template<class Visitor> + void IterateEntries(Visitor* visitor) { entries_.Iterate(visitor); } void Print(int max_depth); + void PrintEntriesSize(); + + static HeapObject *const kInternalRootObject; private: - HeapEntry* AddEntry(HeapObject* object, HeapEntry::Type type) { - return AddEntry(object, type, ""); - } - HeapEntry* AddEntry( - HeapObject* object, HeapEntry::Type type, const char* name); - void AddEntryAlias(HeapObject* object, HeapEntry* entry) { - entries_.Alias(object, entry); - } - HeapEntry* FindEntry(HeapObject* object) { - return entries_.Map(object); - } - int GetGlobalSecurityToken(); - int GetObjectSecurityToken(HeapObject* obj); + HeapEntry* AddEntry(HeapObject* object, + HeapEntry::Type type, + const char* name, + int children_count, + int retainers_count); + HeapEntry* GetNextEntryToInit(); static int GetObjectSize(HeapObject* obj); static int CalculateNetworkSize(JSObject* obj); HeapSnapshotsCollection* collection_; const char* title_; unsigned uid_; - HeapEntry root_; - // Mapping from HeapObject* pointers to HeapEntry* pointers. - HeapEntriesMap entries_; - // Entries sorted by id. - List<HeapEntry*>* sorted_entries_; + int root_entry_index_; + char* raw_entries_; + List<HeapEntry*> entries_; + bool entries_sorted_; + List<HeapEntryCalculatedData> calculated_data_; + + friend class HeapSnapshotTester; DISALLOW_COPY_AND_ASSIGN(HeapSnapshot); }; @@ -748,30 +738,36 @@ class HeapSnapshotsDiff { HeapSnapshotsDiff(HeapSnapshot* snapshot1, HeapSnapshot* snapshot2) : snapshot1_(snapshot1), snapshot2_(snapshot2), - additions_root_(new HeapEntry(snapshot2)), - deletions_root_(new HeapEntry(snapshot1)) { } + raw_additions_root_(NULL), + raw_deletions_root_(NULL) { } ~HeapSnapshotsDiff() { - delete deletions_root_; - delete additions_root_; + DeleteArray(raw_deletions_root_); + DeleteArray(raw_additions_root_); } - void AddAddedEntry(HeapEntry* entry) { - additions_root_->SetUnidirAutoIndexReference(entry); + void AddAddedEntry(int child_index, int index, HeapEntry* entry) { + additions_root()->SetUnidirElementReference(child_index, index, entry); } - void AddDeletedEntry(HeapEntry* entry) { - deletions_root_->SetUnidirAutoIndexReference(entry); + void AddDeletedEntry(int child_index, int index, HeapEntry* entry) { + deletions_root()->SetUnidirElementReference(child_index, index, entry); } - const HeapEntry* additions_root() const { return additions_root_; } - const HeapEntry* deletions_root() const { return deletions_root_; } + void CreateRoots(int additions_count, int deletions_count); + + HeapEntry* additions_root() { + return reinterpret_cast<HeapEntry*>(raw_additions_root_); + } + HeapEntry* deletions_root() { + return reinterpret_cast<HeapEntry*>(raw_deletions_root_); + } private: HeapSnapshot* snapshot1_; HeapSnapshot* snapshot2_; - HeapEntry* additions_root_; - HeapEntry* deletions_root_; + char* raw_additions_root_; + char* raw_deletions_root_; DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsDiff); }; @@ -830,18 +826,123 @@ class HeapSnapshotsCollection { }; +// The HeapEntriesMap instance is used to track a mapping between +// real heap objects and their representations in heap snapshots. +class HeapEntriesMap { + public: + HeapEntriesMap(); + ~HeapEntriesMap(); + + // Aliasing is used for skipping intermediate proxy objects, like + // JSGlobalPropertyCell. + void Alias(HeapObject* from, HeapObject* to); + HeapEntry* Map(HeapObject* object); + void Pair(HeapObject* object, HeapEntry* entry); + void CountReference(HeapObject* from, HeapObject* to, + int* prev_children_count = NULL, + int* prev_retainers_count = NULL); + template<class Visitor> + void UpdateEntries(Visitor* visitor); + + int entries_count() { return entries_count_; } + int total_children_count() { return total_children_count_; } + int total_retainers_count() { return total_retainers_count_; } + + private: + struct EntryInfo { + explicit EntryInfo(HeapEntry* entry) + : entry(entry), children_count(0), retainers_count(0) { } + HeapEntry* entry; + int children_count; + int retainers_count; + }; + + uint32_t Hash(HeapObject* object) { + return static_cast<uint32_t>(reinterpret_cast<intptr_t>(object)); + } + static bool HeapObjectsMatch(void* key1, void* key2) { return key1 == key2; } + + bool IsAlias(void* ptr) { + return reinterpret_cast<intptr_t>(ptr) & kAliasTag; + } + void* MakeAlias(void* ptr) { + return reinterpret_cast<void*>(reinterpret_cast<intptr_t>(ptr) | kAliasTag); + } + void* Unalias(void* ptr) { + return reinterpret_cast<void*>( + reinterpret_cast<intptr_t>(ptr) & (~kAliasTag)); + } + + HashMap entries_; + int entries_count_; + int total_children_count_; + int total_retainers_count_; + + static const intptr_t kAliasTag = 1; + + DISALLOW_COPY_AND_ASSIGN(HeapEntriesMap); +}; + + class HeapSnapshotGenerator { public: + class SnapshotFillerInterface { + public: + virtual ~SnapshotFillerInterface() { } + virtual HeapEntry* AddEntry(HeapObject* obj) = 0; + virtual void SetElementReference(HeapObject* parent_obj, + HeapEntry* parent_entry, + int index, + Object* child_obj, + HeapEntry* child_entry) = 0; + virtual void SetNamedReference(HeapGraphEdge::Type type, + HeapObject* parent_obj, + HeapEntry* parent_entry, + const char* reference_name, + Object* child_obj, + HeapEntry* child_entry) = 0; + virtual void SetRootReference(Object* child_obj, + HeapEntry* child_entry) = 0; + + static HeapEntry *const kHeapEntryPlaceholder; + }; + explicit HeapSnapshotGenerator(HeapSnapshot* snapshot); void GenerateSnapshot(); private: + HeapEntry* GetEntry(Object* obj); + int GetGlobalSecurityToken(); + int GetObjectSecurityToken(HeapObject* obj); void ExtractReferences(HeapObject* obj); void ExtractClosureReferences(JSObject* js_obj, HeapEntry* entry); void ExtractPropertyReferences(JSObject* js_obj, HeapEntry* entry); void ExtractElementReferences(JSObject* js_obj, HeapEntry* entry); + void SetClosureReference(HeapObject* parent_obj, + HeapEntry* parent, + String* reference_name, + Object* child); + void SetElementReference(HeapObject* parent_obj, + HeapEntry* parent, + int index, + Object* child); + void SetInternalReference(HeapObject* parent_obj, + HeapEntry* parent, + const char* reference_name, + Object* child); + void SetPropertyReference(HeapObject* parent_obj, + HeapEntry* parent, + String* reference_name, + Object* child); + void SetRootReference(Object* child); HeapSnapshot* snapshot_; + HeapSnapshotsCollection* collection_; + // Mapping from HeapObject* pointers to HeapEntry* pointers. + HeapEntriesMap entries_; + SnapshotFillerInterface* filler_; + + friend class IndexedReferencesExtractor; DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator); }; diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc index c7d3ff7f1b..fc6ca762f1 100644 --- a/deps/v8/src/runtime.cc +++ b/deps/v8/src/runtime.cc @@ -305,13 +305,14 @@ static Handle<Object> CreateObjectLiteralBoilerplate( } Handle<Object> result; uint32_t element_index = 0; - if (key->ToArrayIndex(&element_index)) { - // Array index (uint32). - result = SetElement(boilerplate, element_index, value); - } else if (key->IsSymbol()) { - // The key is not an array index. + if (key->IsSymbol()) { + // If key is a symbol it is not an array element. Handle<String> name(String::cast(*key)); + ASSERT(!name->AsArrayIndex(&element_index)); result = SetProperty(boilerplate, name, value, NONE); + } else if (key->ToArrayIndex(&element_index)) { + // Array index (uint32). + result = SetElement(boilerplate, element_index, value); } else { // Non-uint32 number. ASSERT(key->IsNumber()); @@ -1626,7 +1627,8 @@ static Object* Runtime_SetCode(Arguments args) { } // Set the code, scope info, formal parameter count, // and the length of the target function. - target->set_code(fun->code()); + target->shared()->set_code(shared->code()); + target->set_code(shared->code()); target->shared()->set_scope_info(shared->scope_info()); target->shared()->set_length(shared->length()); target->shared()->set_formal_parameter_count( @@ -6869,7 +6871,7 @@ static Object* Runtime_LazyCompile(Arguments args) { Handle<JSFunction> function = args.at<JSFunction>(0); #ifdef DEBUG - if (FLAG_trace_lazy) { + if (FLAG_trace_lazy && !function->shared()->is_compiled()) { PrintF("[lazy: "); function->shared()->name()->Print(); PrintF("]\n"); diff --git a/deps/v8/src/serialize.h b/deps/v8/src/serialize.h index 6a318f1936..d1b668d13b 100644 --- a/deps/v8/src/serialize.h +++ b/deps/v8/src/serialize.h @@ -248,7 +248,7 @@ class SerializerDeserializer: public ObjectVisitor { } static int partial_snapshot_cache_length_; - static const int kPartialSnapshotCacheCapacity = 1300; + static const int kPartialSnapshotCacheCapacity = 1400; static Object* partial_snapshot_cache_[]; }; diff --git a/deps/v8/src/top.cc b/deps/v8/src/top.cc index 2887b7664f..1a4a9485d1 100644 --- a/deps/v8/src/top.cc +++ b/deps/v8/src/top.cc @@ -107,16 +107,15 @@ void Top::IterateThread(ThreadVisitor* v, char* t) { void Top::Iterate(ObjectVisitor* v, ThreadLocalTop* thread) { v->VisitPointer(&(thread->pending_exception_)); v->VisitPointer(&(thread->pending_message_obj_)); - v->VisitPointer( - BitCast<Object**, Script**>(&(thread->pending_message_script_))); - v->VisitPointer(BitCast<Object**, Context**>(&(thread->context_))); + v->VisitPointer(BitCast<Object**>(&(thread->pending_message_script_))); + v->VisitPointer(BitCast<Object**>(&(thread->context_))); v->VisitPointer(&(thread->scheduled_exception_)); for (v8::TryCatch* block = thread->TryCatchHandler(); block != NULL; block = TRY_CATCH_FROM_ADDRESS(block->next_)) { - v->VisitPointer(BitCast<Object**, void**>(&(block->exception_))); - v->VisitPointer(BitCast<Object**, void**>(&(block->message_))); + v->VisitPointer(BitCast<Object**>(&(block->exception_))); + v->VisitPointer(BitCast<Object**>(&(block->message_))); } // Iterate over pointers on native execution stack. diff --git a/deps/v8/src/utils.h b/deps/v8/src/utils.h index 236b85e64d..d15319c7a1 100644 --- a/deps/v8/src/utils.h +++ b/deps/v8/src/utils.h @@ -739,7 +739,11 @@ inline Dest BitCast(const Source& source) { return dest; } -} } // namespace v8::internal +template <class Dest, class Source> +inline Dest BitCast(Source* const & source) { + return BitCast<Dest>(reinterpret_cast<uintptr_t>(source)); +} +} } // namespace v8::internal #endif // V8_UTILS_H_ diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc index c542aef877..e501a7c76b 100644 --- a/deps/v8/src/version.cc +++ b/deps/v8/src/version.cc @@ -34,8 +34,8 @@ // cannot be changed without changing the SCons build script. #define MAJOR_VERSION 2 #define MINOR_VERSION 3 -#define BUILD_NUMBER 6 -#define PATCH_LEVEL 1 +#define BUILD_NUMBER 7 +#define PATCH_LEVEL 0 #define CANDIDATE_VERSION false // Define SONAME to have the SCons build the put a specific SONAME into the diff --git a/deps/v8/src/x64/assembler-x64.cc b/deps/v8/src/x64/assembler-x64.cc index d90655b095..9ad94ce0f4 100644 --- a/deps/v8/src/x64/assembler-x64.cc +++ b/deps/v8/src/x64/assembler-x64.cc @@ -253,7 +253,7 @@ Operand::Operand(const Operand& operand, int32_t offset) { int32_t disp_value = 0; if (mode == 0x80 || is_baseless) { // Mode 2 or mode 0 with rbp/r13 as base: Word displacement. - disp_value = *reinterpret_cast<const int32_t*>(&operand.buf_[disp_offset]); + disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]); } else if (mode == 0x40) { // Mode 1: Byte displacement. disp_value = static_cast<signed char>(operand.buf_[disp_offset]); diff --git a/deps/v8/src/x64/builtins-x64.cc b/deps/v8/src/x64/builtins-x64.cc index 959b4b0342..6b34a4f140 100644 --- a/deps/v8/src/x64/builtins-x64.cc +++ b/deps/v8/src/x64/builtins-x64.cc @@ -310,7 +310,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ movsxlq(rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); - __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); + __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeOffset)); __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); __ cmpq(rax, rbx); __ j(not_equal, diff --git a/deps/v8/src/x64/codegen-x64.cc b/deps/v8/src/x64/codegen-x64.cc index b6256faf86..04078efa2e 100644 --- a/deps/v8/src/x64/codegen-x64.cc +++ b/deps/v8/src/x64/codegen-x64.cc @@ -2630,9 +2630,8 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, __ j(is_smi, &build_args); __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx); __ j(not_equal, &build_args); - __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); - __ Cmp(FieldOperand(rax, SharedFunctionInfo::kCodeOffset), apply_code); + __ Cmp(FieldOperand(rax, JSFunction::kCodeOffset), apply_code); __ j(not_equal, &build_args); // Check that applicand is a function. @@ -8635,6 +8634,12 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi); __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx); + // Initialize the code pointer in the function to be the one + // found in the shared function info object. + __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); + __ movq(FieldOperand(rax, JSFunction::kCodeOffset), rdx); + + // Return and remove the on-stack parameter. __ ret(1 * kPointerSize); diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc index bab0199354..e744d53f2d 100644 --- a/deps/v8/src/x64/macro-assembler-x64.cc +++ b/deps/v8/src/x64/macro-assembler-x64.cc @@ -582,8 +582,7 @@ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { // Make sure the code objects in the builtins object and in the // builtin function are the same. push(target); - movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); - movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset)); + movq(target, FieldOperand(rdi, JSFunction::kCodeOffset)); cmpq(target, Operand(rsp, 0)); Assert(equal, "Builtin code object changed"); pop(target); @@ -2290,7 +2289,7 @@ void MacroAssembler::InvokeFunction(Register function, movq(rsi, FieldOperand(function, JSFunction::kContextOffset)); movsxlq(rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); - movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); + movq(rdx, FieldOperand(rdi, JSFunction::kCodeOffset)); // Advances rdx to the end of the Code object header, to the start of // the executable code. lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc index 82b93c95cd..47a55e6a52 100644 --- a/deps/v8/test/cctest/test-api.cc +++ b/deps/v8/test/cctest/test-api.cc @@ -27,6 +27,8 @@ #include <limits.h> +#define USE_NEW_QUERY_CALLBACKS + #include "v8.h" #include "api.h" @@ -1194,12 +1196,12 @@ v8::Handle<Value> CheckThisNamedPropertySetter(Local<String> property, return v8::Handle<Value>(); } -v8::Handle<v8::Boolean> CheckThisIndexedPropertyQuery( +v8::Handle<v8::Integer> CheckThisIndexedPropertyQuery( uint32_t index, const AccessorInfo& info) { ApiTestFuzzer::Fuzz(); CHECK(info.This()->Equals(bottom)); - return v8::Handle<v8::Boolean>(); + return v8::Handle<v8::Integer>(); } diff --git a/deps/v8/test/cctest/test-cpu-profiler.cc b/deps/v8/test/cctest/test-cpu-profiler.cc index 0e6f09d2ef..239d8ae695 100644 --- a/deps/v8/test/cctest/test-cpu-profiler.cc +++ b/deps/v8/test/cctest/test-cpu-profiler.cc @@ -12,6 +12,7 @@ namespace i = v8::internal; using i::CodeEntry; using i::CpuProfile; +using i::CpuProfiler; using i::CpuProfilesCollection; using i::ProfileGenerator; using i::ProfileNode; @@ -225,4 +226,18 @@ TEST(TickEvents) { CHECK_EQ("bbb", bottom_up_ddd_stub_children->last()->entry()->name()); } + +// http://crbug/51594 +// This test must not crash. +TEST(CrashIfStoppingLastNonExistentProfile) { + InitializeVM(); + TestSetup test_setup; + CpuProfiler::Setup(); + CpuProfiler::StartProfiling("1"); + CpuProfiler::StopProfiling("2"); + CpuProfiler::StartProfiling("1"); + CpuProfiler::StopProfiling(""); + CpuProfiler::TearDown(); +} + #endif // ENABLE_LOGGING_AND_PROFILING diff --git a/deps/v8/test/cctest/test-heap-profiler.cc b/deps/v8/test/cctest/test-heap-profiler.cc index 1819aa461e..92ad0a4002 100644 --- a/deps/v8/test/cctest/test-heap-profiler.cc +++ b/deps/v8/test/cctest/test-heap-profiler.cc @@ -396,20 +396,17 @@ class NamedEntriesDetector { has_A2(false), has_B2(false), has_C2(false) { } - void Apply(i::HeapEntry* entry) { - const char* node_name = entry->name(); - if (strcmp("A1", node_name) == 0 - && entry->GetRetainingPaths()->length() > 0) has_A1 = true; - if (strcmp("B1", node_name) == 0 - && entry->GetRetainingPaths()->length() > 0) has_B1 = true; - if (strcmp("C1", node_name) == 0 - && entry->GetRetainingPaths()->length() > 0) has_C1 = true; - if (strcmp("A2", node_name) == 0 - && entry->GetRetainingPaths()->length() > 0) has_A2 = true; - if (strcmp("B2", node_name) == 0 - && entry->GetRetainingPaths()->length() > 0) has_B2 = true; - if (strcmp("C2", node_name) == 0 - && entry->GetRetainingPaths()->length() > 0) has_C2 = true; + void Apply(i::HeapEntry** entry_ptr) { + if (IsReachableNodeWithName(*entry_ptr, "A1")) has_A1 = true; + if (IsReachableNodeWithName(*entry_ptr, "B1")) has_B1 = true; + if (IsReachableNodeWithName(*entry_ptr, "C1")) has_C1 = true; + if (IsReachableNodeWithName(*entry_ptr, "A2")) has_A2 = true; + if (IsReachableNodeWithName(*entry_ptr, "B2")) has_B2 = true; + if (IsReachableNodeWithName(*entry_ptr, "C2")) has_C2 = true; + } + + static bool IsReachableNodeWithName(i::HeapEntry* entry, const char* name) { + return strcmp(name, entry->name()) == 0 && entry->painted_reachable(); } bool has_A1; @@ -460,7 +457,7 @@ static bool HasString(const v8::HeapGraphNode* node, const char* contents) { for (int i = 0, count = node->GetChildrenCount(); i < count; ++i) { const v8::HeapGraphEdge* prop = node->GetChild(i); const v8::HeapGraphNode* node = prop->GetToNode(); - if (node->GetType() == v8::HeapGraphNode::STRING) { + if (node->GetType() == v8::HeapGraphNode::kString) { v8::String::AsciiValue node_name(node->GetName()); if (strcmp(contents, *node_name) == 0) return true; } @@ -496,26 +493,34 @@ TEST(HeapSnapshot) { "var c2 = new C2(a2);"); const v8::HeapSnapshot* snapshot_env2 = v8::HeapProfiler::TakeSnapshot(v8::String::New("env2")); + i::HeapSnapshot* i_snapshot_env2 = + const_cast<i::HeapSnapshot*>( + reinterpret_cast<const i::HeapSnapshot*>(snapshot_env2)); const v8::HeapGraphNode* global_env2 = GetGlobalObject(snapshot_env2); + // Paint all nodes reachable from global object. + i_snapshot_env2->ClearPaint(); + const_cast<i::HeapEntry*>( + reinterpret_cast<const i::HeapEntry*>(global_env2))->PaintAllReachable(); // Verify, that JS global object of env2 doesn't have '..1' // properties, but has '..2' properties. - CHECK_EQ(NULL, GetProperty(global_env2, v8::HeapGraphEdge::PROPERTY, "a1")); - CHECK_EQ(NULL, GetProperty(global_env2, v8::HeapGraphEdge::PROPERTY, "b1_1")); - CHECK_EQ(NULL, GetProperty(global_env2, v8::HeapGraphEdge::PROPERTY, "b1_2")); - CHECK_EQ(NULL, GetProperty(global_env2, v8::HeapGraphEdge::PROPERTY, "c1")); + CHECK_EQ(NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "a1")); + CHECK_EQ( + NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "b1_1")); + CHECK_EQ( + NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "b1_2")); + CHECK_EQ(NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "c1")); const v8::HeapGraphNode* a2_node = - GetProperty(global_env2, v8::HeapGraphEdge::PROPERTY, "a2"); + GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "a2"); CHECK_NE(NULL, a2_node); - CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::PROPERTY, "b2_1")); - CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::PROPERTY, "b2_2")); - CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::PROPERTY, "c2")); + CHECK_NE( + NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "b2_1")); + CHECK_NE( + NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "b2_2")); + CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "c2")); // Verify that anything related to '[ABC]1' is not reachable. NamedEntriesDetector det; - i::HeapSnapshot* i_snapshot_env2 = - const_cast<i::HeapSnapshot*>( - reinterpret_cast<const i::HeapSnapshot*>(snapshot_env2)); i_snapshot_env2->IterateEntries(&det); CHECK(!det.has_A1); CHECK(!det.has_B1); @@ -539,7 +544,7 @@ TEST(HeapSnapshot) { const v8::HeapGraphEdge* last_edge = path->GetEdge(edges_count - 1); v8::String::AsciiValue last_edge_name(last_edge->GetName()); if (strcmp("a2", *last_edge_name) == 0 - && last_edge->GetType() == v8::HeapGraphEdge::PROPERTY) { + && last_edge->GetType() == v8::HeapGraphEdge::kProperty) { has_global_obj_a2_ref = true; continue; } @@ -547,19 +552,19 @@ TEST(HeapSnapshot) { const v8::HeapGraphEdge* prev_edge = path->GetEdge(edges_count - 2); v8::String::AsciiValue prev_edge_name(prev_edge->GetName()); if (strcmp("x1", *last_edge_name) == 0 - && last_edge->GetType() == v8::HeapGraphEdge::PROPERTY + && last_edge->GetType() == v8::HeapGraphEdge::kProperty && strcmp("c2", *prev_edge_name) == 0) has_c2_x1_ref = true; if (strcmp("x2", *last_edge_name) == 0 - && last_edge->GetType() == v8::HeapGraphEdge::PROPERTY + && last_edge->GetType() == v8::HeapGraphEdge::kProperty && strcmp("c2", *prev_edge_name) == 0) has_c2_x2_ref = true; if (strcmp("1", *last_edge_name) == 0 - && last_edge->GetType() == v8::HeapGraphEdge::ELEMENT + && last_edge->GetType() == v8::HeapGraphEdge::kElement && strcmp("c2", *prev_edge_name) == 0) has_c2_1_ref = true; if (strcmp("x", *last_edge_name) == 0 - && last_edge->GetType() == v8::HeapGraphEdge::CONTEXT_VARIABLE + && last_edge->GetType() == v8::HeapGraphEdge::kContextVariable && strcmp("b2_1", *prev_edge_name) == 0) has_b2_1_x_ref = true; if (strcmp("x", *last_edge_name) == 0 - && last_edge->GetType() == v8::HeapGraphEdge::CONTEXT_VARIABLE + && last_edge->GetType() == v8::HeapGraphEdge::kContextVariable && strcmp("b2_2", *prev_edge_name) == 0) has_b2_2_x_ref = true; } CHECK(has_global_obj_a2_ref); @@ -571,6 +576,73 @@ TEST(HeapSnapshot) { } +TEST(HeapSnapshotObjectSizes) { + v8::HandleScope scope; + LocalContext env; + + // -a-> X1 --a + // x -b-> X2 <-| + CompileAndRunScript( + "function X(a, b) { this.a = a; this.b = b; }\n" + "x = new X(new X(), new X());\n" + "x.a.a = x.b;"); + const v8::HeapSnapshot* snapshot = + v8::HeapProfiler::TakeSnapshot(v8::String::New("sizes")); + const v8::HeapGraphNode* global = GetGlobalObject(snapshot); + const v8::HeapGraphNode* x = + GetProperty(global, v8::HeapGraphEdge::kProperty, "x"); + CHECK_NE(NULL, x); + const v8::HeapGraphNode* x_prototype = + GetProperty(x, v8::HeapGraphEdge::kProperty, "prototype"); + CHECK_NE(NULL, x_prototype); + const v8::HeapGraphNode* x1 = + GetProperty(x, v8::HeapGraphEdge::kProperty, "a"); + CHECK_NE(NULL, x1); + const v8::HeapGraphNode* x2 = + GetProperty(x, v8::HeapGraphEdge::kProperty, "b"); + CHECK_NE(NULL, x2); + CHECK_EQ( + x->GetSelfSize() * 3, + x->GetReachableSize() - x_prototype->GetReachableSize()); + CHECK_EQ( + x->GetSelfSize() * 3 + x_prototype->GetSelfSize(), x->GetRetainedSize()); + CHECK_EQ( + x1->GetSelfSize() * 2, + x1->GetReachableSize() - x_prototype->GetReachableSize()); + CHECK_EQ( + x1->GetSelfSize(), x1->GetRetainedSize()); + CHECK_EQ( + x2->GetSelfSize(), + x2->GetReachableSize() - x_prototype->GetReachableSize()); + CHECK_EQ( + x2->GetSelfSize(), x2->GetRetainedSize()); +} + + +TEST(HeapSnapshotEntryChildren) { + v8::HandleScope scope; + LocalContext env; + + CompileAndRunScript( + "function A() { }\n" + "a = new A;"); + const v8::HeapSnapshot* snapshot = + v8::HeapProfiler::TakeSnapshot(v8::String::New("children")); + const v8::HeapGraphNode* global = GetGlobalObject(snapshot); + for (int i = 0, count = global->GetChildrenCount(); i < count; ++i) { + const v8::HeapGraphEdge* prop = global->GetChild(i); + CHECK_EQ(global, prop->GetFromNode()); + } + const v8::HeapGraphNode* a = + GetProperty(global, v8::HeapGraphEdge::kProperty, "a"); + CHECK_NE(NULL, a); + for (int i = 0, count = a->GetChildrenCount(); i < count; ++i) { + const v8::HeapGraphEdge* prop = a->GetChild(i); + CHECK_EQ(a, prop->GetFromNode()); + } +} + + TEST(HeapSnapshotCodeObjects) { v8::HandleScope scope; LocalContext env; @@ -584,20 +656,20 @@ TEST(HeapSnapshotCodeObjects) { const v8::HeapGraphNode* global = GetGlobalObject(snapshot); const v8::HeapGraphNode* compiled = - GetProperty(global, v8::HeapGraphEdge::PROPERTY, "compiled"); + GetProperty(global, v8::HeapGraphEdge::kProperty, "compiled"); CHECK_NE(NULL, compiled); - CHECK_EQ(v8::HeapGraphNode::CLOSURE, compiled->GetType()); + CHECK_EQ(v8::HeapGraphNode::kClosure, compiled->GetType()); const v8::HeapGraphNode* lazy = - GetProperty(global, v8::HeapGraphEdge::PROPERTY, "lazy"); + GetProperty(global, v8::HeapGraphEdge::kProperty, "lazy"); CHECK_NE(NULL, lazy); - CHECK_EQ(v8::HeapGraphNode::CLOSURE, lazy->GetType()); + CHECK_EQ(v8::HeapGraphNode::kClosure, lazy->GetType()); // Find references to code. const v8::HeapGraphNode* compiled_code = - GetProperty(compiled, v8::HeapGraphEdge::INTERNAL, "code"); + GetProperty(compiled, v8::HeapGraphEdge::kInternal, "code"); CHECK_NE(NULL, compiled_code); const v8::HeapGraphNode* lazy_code = - GetProperty(lazy, v8::HeapGraphEdge::INTERNAL, "code"); + GetProperty(lazy, v8::HeapGraphEdge::kInternal, "code"); CHECK_NE(NULL, lazy_code); // Verify that non-compiled code doesn't contain references to "x" @@ -607,7 +679,7 @@ TEST(HeapSnapshotCodeObjects) { for (int i = 0, count = compiled_code->GetChildrenCount(); i < count; ++i) { const v8::HeapGraphEdge* prop = compiled_code->GetChild(i); const v8::HeapGraphNode* node = prop->GetToNode(); - if (node->GetType() == v8::HeapGraphNode::ARRAY) { + if (node->GetType() == v8::HeapGraphNode::kArray) { if (HasString(node, "x")) { compiled_references_x = true; break; @@ -617,7 +689,7 @@ TEST(HeapSnapshotCodeObjects) { for (int i = 0, count = lazy_code->GetChildrenCount(); i < count; ++i) { const v8::HeapGraphEdge* prop = lazy_code->GetChild(i); const v8::HeapGraphNode* node = prop->GetToNode(); - if (node->GetType() == v8::HeapGraphNode::ARRAY) { + if (node->GetType() == v8::HeapGraphNode::kArray) { if (HasString(node, "x")) { lazy_references_x = true; break; @@ -634,11 +706,8 @@ TEST(HeapSnapshotCodeObjects) { // them to a signed type. #define CHECK_EQ_UINT64_T(a, b) \ CHECK_EQ(static_cast<int64_t>(a), static_cast<int64_t>(b)) -#define CHECK_NE_UINT64_T(a, b) do \ - { \ - bool ne = a != b; \ - CHECK(ne); \ - } while (false) +#define CHECK_NE_UINT64_T(a, b) \ + CHECK((a) != (b)) // NOLINT TEST(HeapEntryIdsAndGC) { v8::HandleScope scope; @@ -662,27 +731,35 @@ TEST(HeapEntryIdsAndGC) { CHECK_NE_UINT64_T(0, global1->GetId()); CHECK_EQ_UINT64_T(global1->GetId(), global2->GetId()); const v8::HeapGraphNode* A1 = - GetProperty(global1, v8::HeapGraphEdge::PROPERTY, "A"); + GetProperty(global1, v8::HeapGraphEdge::kProperty, "A"); + CHECK_NE(NULL, A1); const v8::HeapGraphNode* A2 = - GetProperty(global2, v8::HeapGraphEdge::PROPERTY, "A"); + GetProperty(global2, v8::HeapGraphEdge::kProperty, "A"); + CHECK_NE(NULL, A2); CHECK_NE_UINT64_T(0, A1->GetId()); CHECK_EQ_UINT64_T(A1->GetId(), A2->GetId()); const v8::HeapGraphNode* B1 = - GetProperty(global1, v8::HeapGraphEdge::PROPERTY, "B"); + GetProperty(global1, v8::HeapGraphEdge::kProperty, "B"); + CHECK_NE(NULL, B1); const v8::HeapGraphNode* B2 = - GetProperty(global2, v8::HeapGraphEdge::PROPERTY, "B"); + GetProperty(global2, v8::HeapGraphEdge::kProperty, "B"); + CHECK_NE(NULL, B2); CHECK_NE_UINT64_T(0, B1->GetId()); CHECK_EQ_UINT64_T(B1->GetId(), B2->GetId()); const v8::HeapGraphNode* a1 = - GetProperty(global1, v8::HeapGraphEdge::PROPERTY, "a"); + GetProperty(global1, v8::HeapGraphEdge::kProperty, "a"); + CHECK_NE(NULL, a1); const v8::HeapGraphNode* a2 = - GetProperty(global2, v8::HeapGraphEdge::PROPERTY, "a"); + GetProperty(global2, v8::HeapGraphEdge::kProperty, "a"); + CHECK_NE(NULL, a2); CHECK_NE_UINT64_T(0, a1->GetId()); CHECK_EQ_UINT64_T(a1->GetId(), a2->GetId()); const v8::HeapGraphNode* b1 = - GetProperty(global1, v8::HeapGraphEdge::PROPERTY, "b"); + GetProperty(global1, v8::HeapGraphEdge::kProperty, "b"); + CHECK_NE(NULL, b1); const v8::HeapGraphNode* b2 = - GetProperty(global2, v8::HeapGraphEdge::PROPERTY, "b"); + GetProperty(global2, v8::HeapGraphEdge::kProperty, "b"); + CHECK_NE(NULL, b2); CHECK_NE_UINT64_T(0, b1->GetId()); CHECK_EQ_UINT64_T(b1->GetId(), b2->GetId()); } @@ -717,15 +794,15 @@ TEST(HeapSnapshotsDiff) { for (int i = 0, count = additions_root->GetChildrenCount(); i < count; ++i) { const v8::HeapGraphEdge* prop = additions_root->GetChild(i); const v8::HeapGraphNode* node = prop->GetToNode(); - if (node->GetType() == v8::HeapGraphNode::OBJECT) { + if (node->GetType() == v8::HeapGraphNode::kObject) { v8::String::AsciiValue node_name(node->GetName()); if (strcmp(*node_name, "A") == 0) { - CHECK(IsNodeRetainedAs(node, v8::HeapGraphEdge::PROPERTY, "a")); + CHECK(IsNodeRetainedAs(node, v8::HeapGraphEdge::kProperty, "a")); CHECK(!found_A); found_A = true; s1_A_id = node->GetId(); } else if (strcmp(*node_name, "B") == 0) { - CHECK(IsNodeRetainedAs(node, v8::HeapGraphEdge::PROPERTY, "b2")); + CHECK(IsNodeRetainedAs(node, v8::HeapGraphEdge::kProperty, "b2")); CHECK(!found_B); found_B = true; } @@ -741,10 +818,10 @@ TEST(HeapSnapshotsDiff) { for (int i = 0, count = deletions_root->GetChildrenCount(); i < count; ++i) { const v8::HeapGraphEdge* prop = deletions_root->GetChild(i); const v8::HeapGraphNode* node = prop->GetToNode(); - if (node->GetType() == v8::HeapGraphNode::OBJECT) { + if (node->GetType() == v8::HeapGraphNode::kObject) { v8::String::AsciiValue node_name(node->GetName()); if (strcmp(*node_name, "A") == 0) { - CHECK(IsNodeRetainedAs(node, v8::HeapGraphEdge::PROPERTY, "a")); + CHECK(IsNodeRetainedAs(node, v8::HeapGraphEdge::kProperty, "a")); CHECK(!found_A_del); found_A_del = true; s2_A_id = node->GetId(); @@ -756,4 +833,35 @@ TEST(HeapSnapshotsDiff) { CHECK(s1_A_id != s2_A_id); } + +namespace v8 { +namespace internal { + +class HeapSnapshotTester { + public: + static int CalculateNetworkSize(JSObject* obj) { + return HeapSnapshot::CalculateNetworkSize(obj); + } +}; + +} } // namespace v8::internal + +// http://code.google.com/p/v8/issues/detail?id=822 +// Trying to call CalculateNetworkSize on an object with elements set +// to non-FixedArray may cause an assertion error in debug builds. +TEST(Issue822) { + v8::HandleScope scope; + LocalContext context; + const int kElementCount = 260; + uint8_t* pixel_data = reinterpret_cast<uint8_t*>(malloc(kElementCount)); + i::Handle<i::PixelArray> pixels = i::Factory::NewPixelArray(kElementCount, + pixel_data); + v8::Handle<v8::Object> obj = v8::Object::New(); + // Set the elements to be the pixels. + obj->SetIndexedPropertiesToPixelData(pixel_data, kElementCount); + i::Handle<i::JSObject> jsobj = v8::Utils::OpenHandle(*obj); + // This call must not cause an assertion error in debug builds. + i::HeapSnapshotTester::CalculateNetworkSize(*jsobj); +} + #endif // ENABLE_LOGGING_AND_PROFILING diff --git a/deps/v8/test/mjsunit/api-call-after-bypassed-exception.js b/deps/v8/test/mjsunit/api-call-after-bypassed-exception.js index f77b5140c1..4a1855881e 100644 --- a/deps/v8/test/mjsunit/api-call-after-bypassed-exception.js +++ b/deps/v8/test/mjsunit/api-call-after-bypassed-exception.js @@ -1,29 +1,29 @@ -// Copyright 2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Copyright 2008 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // This is a test of making an API call after an exception thrown in JavaScript // has been bypassed by a return in the finally block. diff --git a/deps/v8/test/mjsunit/debug-clearbreakpointgroup.js b/deps/v8/test/mjsunit/debug-clearbreakpointgroup.js index aad6c3aff7..e6677f9396 100644 --- a/deps/v8/test/mjsunit/debug-clearbreakpointgroup.js +++ b/deps/v8/test/mjsunit/debug-clearbreakpointgroup.js @@ -1,117 +1,117 @@ -// Copyright 2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-// Get the Debug object exposed from the debug context global object.
-var Debug = debug.Debug
-
-// Simple function which stores the last debug event.
-var listenerComplete = false;
-var exception = false;
-
-var base_request = '"seq":0,"type":"request","command":"clearbreakpointgroup"';
-var scriptId = null;
-
-function safeEval(code) {
- try {
- return eval('(' + code + ')');
- } catch (e) {
- assertEquals(void 0, e);
- return undefined;
- }
-}
-
-function testArguments(dcp, arguments, success) {
- var request = '{' + base_request + ',"arguments":' + arguments + '}'
- var json_response = dcp.processDebugJSONRequest(request);
- var response = safeEval(json_response);
- if (success) {
- assertTrue(response.success, json_response);
- } else {
- assertFalse(response.success, json_response);
- }
-}
-
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- // Get the debug command processor.
- var dcp = exec_state.debugCommandProcessor("unspecified_running_state");
-
- // Clear breakpoint group 1.
- testArguments(dcp, '{"groupId":1}', true);
-
- // Indicate that all was processed.
- listenerComplete = true;
- } else if (event == Debug.DebugEvent.AfterCompile) {
- scriptId = event_data.script().id();
- assertEquals(source, event_data.script().source());
- }
- } catch (e) {
- exception = e
- };
-};
-
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-var source = 'function f(n) {\nreturn n+1;\n}\nfunction g() {return f(10);}' +
- '\nvar r = g(); g;';
-eval(source);
-
-assertNotNull(scriptId);
-
-var groupId1 = 1;
-var groupId2 = 2;
-// Set a break point and call to invoke the debug event listener.
-var bp1 = Debug.setScriptBreakPointById(scriptId, 1, null, null, groupId1);
-var bp2 = Debug.setScriptBreakPointById(scriptId, 1, null, null, groupId2);
-var bp3 = Debug.setScriptBreakPointById(scriptId, 1, null, null, null);
-var bp4 = Debug.setScriptBreakPointById(scriptId, 3, null, null, groupId1);
-var bp5 = Debug.setScriptBreakPointById(scriptId, 4, null, null, groupId2);
-
-assertEquals(5, Debug.scriptBreakPoints().length);
-
-// Call function 'g' from the compiled script to trigger breakpoint.
-g();
-
-// Make sure that the debug event listener vas invoked.
-assertTrue(listenerComplete,
- "listener did not run to completion: " + exception);
-
-var breakpoints = Debug.scriptBreakPoints();
-assertEquals(3, breakpoints.length);
-var breakpointNumbers = breakpoints.map(
- function(scriptBreakpoint) { return scriptBreakpoint.number(); },
- breakpointNumbers);
-
-// Check that all breakpoints from group 1 were deleted and all the
-// rest are preserved.
-assertEquals([bp2, bp3, bp5].sort(), breakpointNumbers.sort());
-
-assertFalse(exception, "exception in listener");
+// Copyright 2008 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug +// Get the Debug object exposed from the debug context global object. +var Debug = debug.Debug + +// Simple function which stores the last debug event. +var listenerComplete = false; +var exception = false; + +var base_request = '"seq":0,"type":"request","command":"clearbreakpointgroup"'; +var scriptId = null; + +function safeEval(code) { + try { + return eval('(' + code + ')'); + } catch (e) { + assertEquals(void 0, e); + return undefined; + } +} + +function testArguments(dcp, arguments, success) { + var request = '{' + base_request + ',"arguments":' + arguments + '}' + var json_response = dcp.processDebugJSONRequest(request); + var response = safeEval(json_response); + if (success) { + assertTrue(response.success, json_response); + } else { + assertFalse(response.success, json_response); + } +} + +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + // Get the debug command processor. + var dcp = exec_state.debugCommandProcessor("unspecified_running_state"); + + // Clear breakpoint group 1. + testArguments(dcp, '{"groupId":1}', true); + + // Indicate that all was processed. + listenerComplete = true; + } else if (event == Debug.DebugEvent.AfterCompile) { + scriptId = event_data.script().id(); + assertEquals(source, event_data.script().source()); + } + } catch (e) { + exception = e + }; +}; + + +// Add the debug event listener. +Debug.setListener(listener); + +var source = 'function f(n) {\nreturn n+1;\n}\nfunction g() {return f(10);}' + + '\nvar r = g(); g;'; +eval(source); + +assertNotNull(scriptId); + +var groupId1 = 1; +var groupId2 = 2; +// Set a break point and call to invoke the debug event listener. +var bp1 = Debug.setScriptBreakPointById(scriptId, 1, null, null, groupId1); +var bp2 = Debug.setScriptBreakPointById(scriptId, 1, null, null, groupId2); +var bp3 = Debug.setScriptBreakPointById(scriptId, 1, null, null, null); +var bp4 = Debug.setScriptBreakPointById(scriptId, 3, null, null, groupId1); +var bp5 = Debug.setScriptBreakPointById(scriptId, 4, null, null, groupId2); + +assertEquals(5, Debug.scriptBreakPoints().length); + +// Call function 'g' from the compiled script to trigger breakpoint. +g(); + +// Make sure that the debug event listener vas invoked. +assertTrue(listenerComplete, + "listener did not run to completion: " + exception); + +var breakpoints = Debug.scriptBreakPoints(); +assertEquals(3, breakpoints.length); +var breakpointNumbers = breakpoints.map( + function(scriptBreakpoint) { return scriptBreakpoint.number(); }, + breakpointNumbers); + +// Check that all breakpoints from group 1 were deleted and all the +// rest are preserved. +assertEquals([bp2, bp3, bp5].sort(), breakpointNumbers.sort()); + +assertFalse(exception, "exception in listener"); diff --git a/deps/v8/test/mjsunit/debug-evaluate-bool-constructor.js b/deps/v8/test/mjsunit/debug-evaluate-bool-constructor.js index 809a5ccc69..d268091704 100644 --- a/deps/v8/test/mjsunit/debug-evaluate-bool-constructor.js +++ b/deps/v8/test/mjsunit/debug-evaluate-bool-constructor.js @@ -1,80 +1,80 @@ -// Copyright 2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
-
-var listenerComplete = false;
-var exception = false;
-
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- // Get the debug command processor.
- var dcp = exec_state.debugCommandProcessor();
-
- var request = {
- seq: 0,
- type: 'request',
- command: 'evaluate',
- arguments: {
- expression: 'a',
- frame: 0
- }
- };
- request = JSON.stringify(request);
-
- var resp = dcp.processDebugJSONRequest(request);
- var response = JSON.parse(resp);
- assertTrue(response.success, 'Command failed: ' + resp);
- assertEquals('object', response.body.type);
- assertEquals('Object', response.body.className);
-
- // Indicate that all was processed.
- listenerComplete = true;
- }
- } catch (e) {
- exception = e
- };
-};
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-function callDebugger() {
- // Add set constructor field to a non-function value.
- var a = {constructor:true};
- debugger;
-}
-
-callDebugger();
-
-
-// Make sure that the debug event listener vas invoked.
-assertFalse(exception, "exception in listener")
-assertTrue(listenerComplete, "listener did not run to completion");
+// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug +// Get the Debug object exposed from the debug context global object. +Debug = debug.Debug + +var listenerComplete = false; +var exception = false; + +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + // Get the debug command processor. + var dcp = exec_state.debugCommandProcessor(); + + var request = { + seq: 0, + type: 'request', + command: 'evaluate', + arguments: { + expression: 'a', + frame: 0 + } + }; + request = JSON.stringify(request); + + var resp = dcp.processDebugJSONRequest(request); + var response = JSON.parse(resp); + assertTrue(response.success, 'Command failed: ' + resp); + assertEquals('object', response.body.type); + assertEquals('Object', response.body.className); + + // Indicate that all was processed. + listenerComplete = true; + } + } catch (e) { + exception = e + }; +}; + +// Add the debug event listener. +Debug.setListener(listener); + +function callDebugger() { + // Add set constructor field to a non-function value. + var a = {constructor:true}; + debugger; +} + +callDebugger(); + + +// Make sure that the debug event listener vas invoked. +assertFalse(exception, "exception in listener") +assertTrue(listenerComplete, "listener did not run to completion"); diff --git a/deps/v8/test/mjsunit/debug-references.js b/deps/v8/test/mjsunit/debug-references.js index 452761cf1c..ab6c6292e3 100644 --- a/deps/v8/test/mjsunit/debug-references.js +++ b/deps/v8/test/mjsunit/debug-references.js @@ -1,118 +1,118 @@ -// Copyright 2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
-
-listenerComplete = false;
-exception = false;
-
-// The base part of all evaluate requests.
-var base_request = '"seq":0,"type":"request","command":"references"'
-
-function safeEval(code) {
- try {
- return eval('(' + code + ')');
- } catch (e) {
- assertEquals(void 0, e);
- return undefined;
- }
-}
-
-function testRequest(dcp, arguments, success, count) {
- // Generate request with the supplied arguments.
- var request;
- if (arguments) {
- request = '{' + base_request + ',"arguments":' + arguments + '}';
- } else {
- request = '{' + base_request + '}'
- }
-
- // Process the request and check expectation.
- var response = safeEval(dcp.processDebugJSONRequest(request));
- if (success) {
- assertTrue(response.success, request + ' -> ' + response.message);
- assertTrue(response.body instanceof Array);
- if (count) {
- assertEquals(count, response.body.length);
- } else {
- assertTrue(response.body.length > 0);
- }
- } else {
- assertFalse(response.success, request + ' -> ' + response.message);
- }
- assertEquals(response.running, dcp.isRunning(), request + ' -> expected not running');
-}
-
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- // Get the debug command processor.
- var dcp = exec_state.debugCommandProcessor("unspecified_running_state");
-
- // Test some illegal references requests.
- testRequest(dcp, void 0, false);
- testRequest(dcp, '{"handle":"a"}', false);
- testRequest(dcp, '{"handle":1}', false);
- testRequest(dcp, '{"type":"referencedBy"}', false);
- testRequest(dcp, '{"type":"constructedBy"}', false);
-
- // Evaluate Point.
- var evaluate_point = '{"seq":0,"type":"request","command":"evaluate",' +
- '"arguments":{"expression":"Point"}}';
- var response = safeEval(dcp.processDebugJSONRequest(evaluate_point));
- assertTrue(response.success, "Evaluation of Point failed");
- var handle = response.body.handle;
-
- // Test some legal references requests.
- testRequest(dcp, '{"handle":' + handle + ',"type":"referencedBy"}', true);
- testRequest(dcp, '{"handle":' + handle + ',"type":"constructedBy"}',
- true, 2);
-
- // Indicate that all was processed.
- listenerComplete = true;
- }
- } catch (e) {
- exception = e
- };
-};
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-// Test constructor and objects.
-function Point(x, y) { this.x_ = x; this.y_ = y;}
-p = new Point(0,0);
-q = new Point(1,2);
-
-// Enter debugger causing the event listener to be called.
-debugger;
-
-// Make sure that the debug event listener was invoked.
-assertFalse(exception, "exception in listener")
-assertTrue(listenerComplete, "listener did not run to completion");
+// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug +// Get the Debug object exposed from the debug context global object. +Debug = debug.Debug + +listenerComplete = false; +exception = false; + +// The base part of all evaluate requests. +var base_request = '"seq":0,"type":"request","command":"references"' + +function safeEval(code) { + try { + return eval('(' + code + ')'); + } catch (e) { + assertEquals(void 0, e); + return undefined; + } +} + +function testRequest(dcp, arguments, success, count) { + // Generate request with the supplied arguments. + var request; + if (arguments) { + request = '{' + base_request + ',"arguments":' + arguments + '}'; + } else { + request = '{' + base_request + '}' + } + + // Process the request and check expectation. + var response = safeEval(dcp.processDebugJSONRequest(request)); + if (success) { + assertTrue(response.success, request + ' -> ' + response.message); + assertTrue(response.body instanceof Array); + if (count) { + assertEquals(count, response.body.length); + } else { + assertTrue(response.body.length > 0); + } + } else { + assertFalse(response.success, request + ' -> ' + response.message); + } + assertEquals(response.running, dcp.isRunning(), request + ' -> expected not running'); +} + +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + // Get the debug command processor. + var dcp = exec_state.debugCommandProcessor("unspecified_running_state"); + + // Test some illegal references requests. + testRequest(dcp, void 0, false); + testRequest(dcp, '{"handle":"a"}', false); + testRequest(dcp, '{"handle":1}', false); + testRequest(dcp, '{"type":"referencedBy"}', false); + testRequest(dcp, '{"type":"constructedBy"}', false); + + // Evaluate Point. + var evaluate_point = '{"seq":0,"type":"request","command":"evaluate",' + + '"arguments":{"expression":"Point"}}'; + var response = safeEval(dcp.processDebugJSONRequest(evaluate_point)); + assertTrue(response.success, "Evaluation of Point failed"); + var handle = response.body.handle; + + // Test some legal references requests. + testRequest(dcp, '{"handle":' + handle + ',"type":"referencedBy"}', true); + testRequest(dcp, '{"handle":' + handle + ',"type":"constructedBy"}', + true, 2); + + // Indicate that all was processed. + listenerComplete = true; + } + } catch (e) { + exception = e + }; +}; + +// Add the debug event listener. +Debug.setListener(listener); + +// Test constructor and objects. +function Point(x, y) { this.x_ = x; this.y_ = y;} +p = new Point(0,0); +q = new Point(1,2); + +// Enter debugger causing the event listener to be called. +debugger; + +// Make sure that the debug event listener was invoked. +assertFalse(exception, "exception in listener") +assertTrue(listenerComplete, "listener did not run to completion"); diff --git a/deps/v8/test/mjsunit/debug-stepin-accessor.js b/deps/v8/test/mjsunit/debug-stepin-accessor.js index 2e593b2863..2c9c8c324f 100644 --- a/deps/v8/test/mjsunit/debug-stepin-accessor.js +++ b/deps/v8/test/mjsunit/debug-stepin-accessor.js @@ -1,248 +1,248 @@ -// Copyright 2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-
-// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
-
-var exception = null;
-var state = 1;
-var expected_source_line_text = null;
-var expected_function_name = null;
-
-// Simple debug event handler which first time will cause 'step in' action
-// to get into g.call and than check that execution is stopped inside
-// function 'g'.
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- if (state == 1) {
- exec_state.prepareStep(Debug.StepAction.StepIn, 2);
- state = 2;
- } else if (state == 2) {
- assertEquals(expected_source_line_text,
- event_data.sourceLineText());
- assertEquals(expected_function_name, event_data.func().name());
- state = 3;
- }
- }
- } catch(e) {
- exception = e;
- }
-};
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-
-var c = {
- name: 'name ',
- get getter1() {
- return this.name; // getter 1
- },
- get getter2() {
- return { // getter 2
- 'a': c.name
- };
- },
- set setter1(n) {
- this.name = n; // setter 1
- }
-};
-
-c.__defineGetter__('y', function getterY() {
- return this.name; // getter y
-});
-
-c.__defineGetter__(3, function getter3() {
- return this.name; // getter 3
-});
-
-c.__defineSetter__('y', function setterY(n) {
- this.name = n; // setter y
-});
-
-c.__defineSetter__(3, function setter3(n) {
- this.name = n; // setter 3
-});
-
-var d = {
- 'c': c,
-};
-
-function testGetter1_1() {
- expected_function_name = 'getter1';
- expected_source_line_text = ' return this.name; // getter 1';
- debugger;
- var x = c.getter1;
-}
-
-function testGetter1_2() {
- expected_function_name = 'getter1';
- expected_source_line_text = ' return this.name; // getter 1';
- debugger;
- var x = c['getter1'];
-}
-
-function testGetter1_3() {
- expected_function_name = 'getter1';
- expected_source_line_text = ' return this.name; // getter 1';
- debugger;
- for (var i = 1; i < 2; i++) {
- var x = c['getter' + i];
- }
-}
-
-function testGetter1_4() {
- expected_function_name = 'getter1';
- expected_source_line_text = ' return this.name; // getter 1';
- debugger;
- var x = d.c.getter1;
-}
-
-function testGetter1_5() {
- expected_function_name = 'getter1';
- expected_source_line_text = ' return this.name; // getter 1';
- for (var i = 2; i != 1; i--);
- debugger;
- var x = d.c['getter' + i];
-}
-
-function testGetter2_1() {
- expected_function_name = 'getter2';
- expected_source_line_text = ' return { // getter 2';
- for (var i = 2; i != 1; i--);
- debugger;
- var t = d.c.getter2.name;
-}
-
-
-function testGetterY_1() {
- expected_function_name = 'getterY';
- expected_source_line_text = ' return this.name; // getter y';
- debugger;
- var t = d.c.y;
-}
-
-function testIndexedGetter3_1() {
- expected_function_name = 'getter3';
- expected_source_line_text = ' return this.name; // getter 3';
- debugger;
- var r = d.c[3];
-}
-
-function testSetterY_1() {
- expected_function_name = 'setterY';
- expected_source_line_text = ' this.name = n; // setter y';
- debugger;
- d.c.y = 'www';
-}
-
-function testIndexedSetter3_1() {
- expected_function_name = 'setter3';
- expected_source_line_text = ' this.name = n; // setter 3';
- var i = 3
- debugger;
- d.c[3] = 'www';
-}
-
-function testSetter1_1() {
- expected_function_name = 'setter1';
- expected_source_line_text = ' this.name = n; // setter 1';
- debugger;
- d.c.setter1 = 'aa';
-}
-
-function testSetter1_2() {
- expected_function_name = 'setter1';
- expected_source_line_text = ' this.name = n; // setter 1';
- debugger;
- d.c['setter1'] = 'bb';
-}
-
-function testSetter1_3() {
- expected_function_name = 'setter1';
- expected_source_line_text = ' this.name = n; // setter 1';
- for (var i = 2; i != 1; i--);
- debugger;
- d.c['setter' + i] = i;
-}
-
-var e = {
- name: 'e'
-};
-e.__proto__ = c;
-
-function testProtoGetter1_1() {
- expected_function_name = 'getter1';
- expected_source_line_text = ' return this.name; // getter 1';
- debugger;
- var x = e.getter1;
-}
-
-function testProtoSetter1_1() {
- expected_function_name = 'setter1';
- expected_source_line_text = ' this.name = n; // setter 1';
- debugger;
- e.setter1 = 'aa';
-}
-
-function testProtoIndexedGetter3_1() {
- expected_function_name = 'getter3';
- expected_source_line_text = ' return this.name; // getter 3';
- debugger;
- var x = e[3];
-}
-
-function testProtoIndexedSetter3_1() {
- expected_function_name = 'setter3';
- expected_source_line_text = ' this.name = n; // setter 3';
- debugger;
- e[3] = 'new val';
-}
-
-function testProtoSetter1_2() {
- expected_function_name = 'setter1';
- expected_source_line_text = ' this.name = n; // setter 1';
- for (var i = 2; i != 1; i--);
- debugger;
- e['setter' + i] = 'aa';
-}
-
-for (var n in this) {
- if (n.substr(0, 4) != 'test') {
- continue;
- }
- state = 1;
- this[n]();
- assertNull(exception);
- assertEquals(3, state);
-}
-
-// Get rid of the debug event listener.
-Debug.setListener(null);
+// Copyright 2008 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug + +// Get the Debug object exposed from the debug context global object. +Debug = debug.Debug + +var exception = null; +var state = 1; +var expected_source_line_text = null; +var expected_function_name = null; + +// Simple debug event handler which first time will cause 'step in' action +// to get into g.call and than check that execution is stopped inside +// function 'g'. +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + if (state == 1) { + exec_state.prepareStep(Debug.StepAction.StepIn, 2); + state = 2; + } else if (state == 2) { + assertEquals(expected_source_line_text, + event_data.sourceLineText()); + assertEquals(expected_function_name, event_data.func().name()); + state = 3; + } + } + } catch(e) { + exception = e; + } +}; + +// Add the debug event listener. +Debug.setListener(listener); + + +var c = { + name: 'name ', + get getter1() { + return this.name; // getter 1 + }, + get getter2() { + return { // getter 2 + 'a': c.name + }; + }, + set setter1(n) { + this.name = n; // setter 1 + } +}; + +c.__defineGetter__('y', function getterY() { + return this.name; // getter y +}); + +c.__defineGetter__(3, function getter3() { + return this.name; // getter 3 +}); + +c.__defineSetter__('y', function setterY(n) { + this.name = n; // setter y +}); + +c.__defineSetter__(3, function setter3(n) { + this.name = n; // setter 3 +}); + +var d = { + 'c': c, +}; + +function testGetter1_1() { + expected_function_name = 'getter1'; + expected_source_line_text = ' return this.name; // getter 1'; + debugger; + var x = c.getter1; +} + +function testGetter1_2() { + expected_function_name = 'getter1'; + expected_source_line_text = ' return this.name; // getter 1'; + debugger; + var x = c['getter1']; +} + +function testGetter1_3() { + expected_function_name = 'getter1'; + expected_source_line_text = ' return this.name; // getter 1'; + debugger; + for (var i = 1; i < 2; i++) { + var x = c['getter' + i]; + } +} + +function testGetter1_4() { + expected_function_name = 'getter1'; + expected_source_line_text = ' return this.name; // getter 1'; + debugger; + var x = d.c.getter1; +} + +function testGetter1_5() { + expected_function_name = 'getter1'; + expected_source_line_text = ' return this.name; // getter 1'; + for (var i = 2; i != 1; i--); + debugger; + var x = d.c['getter' + i]; +} + +function testGetter2_1() { + expected_function_name = 'getter2'; + expected_source_line_text = ' return { // getter 2'; + for (var i = 2; i != 1; i--); + debugger; + var t = d.c.getter2.name; +} + + +function testGetterY_1() { + expected_function_name = 'getterY'; + expected_source_line_text = ' return this.name; // getter y'; + debugger; + var t = d.c.y; +} + +function testIndexedGetter3_1() { + expected_function_name = 'getter3'; + expected_source_line_text = ' return this.name; // getter 3'; + debugger; + var r = d.c[3]; +} + +function testSetterY_1() { + expected_function_name = 'setterY'; + expected_source_line_text = ' this.name = n; // setter y'; + debugger; + d.c.y = 'www'; +} + +function testIndexedSetter3_1() { + expected_function_name = 'setter3'; + expected_source_line_text = ' this.name = n; // setter 3'; + var i = 3 + debugger; + d.c[3] = 'www'; +} + +function testSetter1_1() { + expected_function_name = 'setter1'; + expected_source_line_text = ' this.name = n; // setter 1'; + debugger; + d.c.setter1 = 'aa'; +} + +function testSetter1_2() { + expected_function_name = 'setter1'; + expected_source_line_text = ' this.name = n; // setter 1'; + debugger; + d.c['setter1'] = 'bb'; +} + +function testSetter1_3() { + expected_function_name = 'setter1'; + expected_source_line_text = ' this.name = n; // setter 1'; + for (var i = 2; i != 1; i--); + debugger; + d.c['setter' + i] = i; +} + +var e = { + name: 'e' +}; +e.__proto__ = c; + +function testProtoGetter1_1() { + expected_function_name = 'getter1'; + expected_source_line_text = ' return this.name; // getter 1'; + debugger; + var x = e.getter1; +} + +function testProtoSetter1_1() { + expected_function_name = 'setter1'; + expected_source_line_text = ' this.name = n; // setter 1'; + debugger; + e.setter1 = 'aa'; +} + +function testProtoIndexedGetter3_1() { + expected_function_name = 'getter3'; + expected_source_line_text = ' return this.name; // getter 3'; + debugger; + var x = e[3]; +} + +function testProtoIndexedSetter3_1() { + expected_function_name = 'setter3'; + expected_source_line_text = ' this.name = n; // setter 3'; + debugger; + e[3] = 'new val'; +} + +function testProtoSetter1_2() { + expected_function_name = 'setter1'; + expected_source_line_text = ' this.name = n; // setter 1'; + for (var i = 2; i != 1; i--); + debugger; + e['setter' + i] = 'aa'; +} + +for (var n in this) { + if (n.substr(0, 4) != 'test') { + continue; + } + state = 1; + this[n](); + assertNull(exception); + assertEquals(3, state); +} + +// Get rid of the debug event listener. +Debug.setListener(null); diff --git a/deps/v8/test/mjsunit/debug-stepin-builtin.js b/deps/v8/test/mjsunit/debug-stepin-builtin.js index c6a97eac01..d9c6061104 100644 --- a/deps/v8/test/mjsunit/debug-stepin-builtin.js +++ b/deps/v8/test/mjsunit/debug-stepin-builtin.js @@ -1,78 +1,78 @@ -// Copyright 2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-
-// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
-
-var exception = null;
-var state = 1;
-var expected_source_line_text = null;
-var expected_function_name = null;
-
-// Simple debug event handler which first time will cause 'step in' action
-// and than check that execution is paused inside function
-// expected_function_name.
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- if (state == 1) {
- exec_state.prepareStep(Debug.StepAction.StepIn, 2);
- state = 2;
- } else if (state == 2) {
- assertEquals(expected_function_name, event_data.func().name());
- assertEquals(expected_source_line_text,
- event_data.sourceLineText());
- state = 3;
- }
- }
- } catch(e) {
- exception = e;
- }
-};
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-var a = [1,2,3,4,5];
-
-// Test step into function call from a function without local variables.
-function testStepInArraySlice() {
- expected_function_name = 'testStepInArraySlice';
- expected_source_line_text = '} // expected line';
- debugger;
- var s = Array.prototype.slice.call(a, 2,3);
-} // expected line
-
-state = 1;
-testStepInArraySlice();
-assertNull(exception);
-assertEquals(3, state);
-
-// Get rid of the debug event listener.
-Debug.setListener(null);
+// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug + +// Get the Debug object exposed from the debug context global object. +Debug = debug.Debug + +var exception = null; +var state = 1; +var expected_source_line_text = null; +var expected_function_name = null; + +// Simple debug event handler which first time will cause 'step in' action +// and than check that execution is paused inside function +// expected_function_name. +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + if (state == 1) { + exec_state.prepareStep(Debug.StepAction.StepIn, 2); + state = 2; + } else if (state == 2) { + assertEquals(expected_function_name, event_data.func().name()); + assertEquals(expected_source_line_text, + event_data.sourceLineText()); + state = 3; + } + } + } catch(e) { + exception = e; + } +}; + +// Add the debug event listener. +Debug.setListener(listener); + +var a = [1,2,3,4,5]; + +// Test step into function call from a function without local variables. +function testStepInArraySlice() { + expected_function_name = 'testStepInArraySlice'; + expected_source_line_text = '} // expected line'; + debugger; + var s = Array.prototype.slice.call(a, 2,3); +} // expected line + +state = 1; +testStepInArraySlice(); +assertNull(exception); +assertEquals(3, state); + +// Get rid of the debug event listener. +Debug.setListener(null); diff --git a/deps/v8/test/mjsunit/debug-stepin-call-function-stub.js b/deps/v8/test/mjsunit/debug-stepin-call-function-stub.js index 12f51429b0..c5cf8fdf3a 100644 --- a/deps/v8/test/mjsunit/debug-stepin-call-function-stub.js +++ b/deps/v8/test/mjsunit/debug-stepin-call-function-stub.js @@ -1,115 +1,115 @@ -// Copyright 2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
-
-var exception = null;
-var state = 0;
-var expected_function_name = null;
-var expected_source_line_text = null;
-var expected_caller_source_line = null;
-var step_in_count = 2;
-
-// Simple debug event handler which first time will cause 'step in' action
-// to get into g.call and than check that execution is pauesed inside
-// function 'g'.
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- if (state == 0) {
- // Step into f().
- exec_state.prepareStep(Debug.StepAction.StepIn, step_in_count);
- state = 2;
- } else if (state == 2) {
- assertEquals(expected_source_line_text,
- event_data.sourceLineText());
- assertEquals(expected_function_name, event_data.func().name());
- state = 3;
- }
- }
- } catch(e) {
- exception = e;
- }
-};
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-
-function g() {
- return "s"; // expected line
-}
-
-function testFunction() {
- var f = g;
- var s = 1 +f(10);
-}
-
-function g2() {
- return "s2"; // expected line
-}
-
-function testFunction2() {
- var f = g2;
- var s = 1 +f(10, 20);
-}
-
-// Run three times. First time the function will be compiled lazily,
-// second time cached version will be used.
-for (var i = 0; i < 3; i++) {
- state = 0;
- expected_function_name = 'g';
- expected_source_line_text = ' return "s"; // expected line';
- step_in_count = 2;
- // Set a break point and call to invoke the debug event listener.
- Debug.setBreakPoint(testFunction, 1, 0);
- testFunction();
- assertNull(exception);
- assertEquals(3, state);
-}
-
-// Test stepping into function call when a breakpoint is set at the place
-// of call. Use different pair of functions so that g2 is compiled lazily.
-// Run twice: first time function will be compiled lazily, second time
-// cached version will be used.
-for (var i = 0; i < 3; i++) {
- state = 0;
- expected_function_name = 'g2';
- expected_source_line_text = ' return "s2"; // expected line';
- step_in_count = 1;
- // Set a break point and call to invoke the debug event listener.
- Debug.setBreakPoint(testFunction2, 2, 0);
- testFunction2();
- assertNull(exception);
- assertEquals(3, state);
-}
-
-
-// Get rid of the debug event listener.
-Debug.setListener(null);
+// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug +// Get the Debug object exposed from the debug context global object. +Debug = debug.Debug + +var exception = null; +var state = 0; +var expected_function_name = null; +var expected_source_line_text = null; +var expected_caller_source_line = null; +var step_in_count = 2; + +// Simple debug event handler which first time will cause 'step in' action +// to get into g.call and than check that execution is pauesed inside +// function 'g'. +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + if (state == 0) { + // Step into f(). + exec_state.prepareStep(Debug.StepAction.StepIn, step_in_count); + state = 2; + } else if (state == 2) { + assertEquals(expected_source_line_text, + event_data.sourceLineText()); + assertEquals(expected_function_name, event_data.func().name()); + state = 3; + } + } + } catch(e) { + exception = e; + } +}; + +// Add the debug event listener. +Debug.setListener(listener); + + +function g() { + return "s"; // expected line +} + +function testFunction() { + var f = g; + var s = 1 +f(10); +} + +function g2() { + return "s2"; // expected line +} + +function testFunction2() { + var f = g2; + var s = 1 +f(10, 20); +} + +// Run three times. First time the function will be compiled lazily, +// second time cached version will be used. +for (var i = 0; i < 3; i++) { + state = 0; + expected_function_name = 'g'; + expected_source_line_text = ' return "s"; // expected line'; + step_in_count = 2; + // Set a break point and call to invoke the debug event listener. + Debug.setBreakPoint(testFunction, 1, 0); + testFunction(); + assertNull(exception); + assertEquals(3, state); +} + +// Test stepping into function call when a breakpoint is set at the place +// of call. Use different pair of functions so that g2 is compiled lazily. +// Run twice: first time function will be compiled lazily, second time +// cached version will be used. +for (var i = 0; i < 3; i++) { + state = 0; + expected_function_name = 'g2'; + expected_source_line_text = ' return "s2"; // expected line'; + step_in_count = 1; + // Set a break point and call to invoke the debug event listener. + Debug.setBreakPoint(testFunction2, 2, 0); + testFunction2(); + assertNull(exception); + assertEquals(3, state); +} + + +// Get rid of the debug event listener. +Debug.setListener(null); diff --git a/deps/v8/test/mjsunit/debug-stepin-function-call.js b/deps/v8/test/mjsunit/debug-stepin-function-call.js index 9f24c017c1..385fcb2f8b 100644 --- a/deps/v8/test/mjsunit/debug-stepin-function-call.js +++ b/deps/v8/test/mjsunit/debug-stepin-function-call.js @@ -1,149 +1,149 @@ -// Copyright 2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
-
-var exception = null;
-var state = 0;
-
-// Simple debug event handler which first time will cause 'step in' action
-// to get into g.call and than check that execution is pauesed inside
-// function 'g'.
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- if (state == 0) {
- // Step into f2.call:
- exec_state.prepareStep(Debug.StepAction.StepIn, 2);
- state = 2;
- } else if (state == 2) {
- assertEquals('g', event_data.func().name());
- assertEquals(' return t + 1; // expected line',
- event_data.sourceLineText());
- state = 3;
- }
- }
- } catch(e) {
- exception = e;
- }
-};
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-
-// Sample functions.
-function g(t) {
- return t + 1; // expected line
-}
-
-// Test step into function call from a function without local variables.
-function call1() {
- debugger;
- g.call(null, 3);
-}
-
-
-// Test step into function call from a function with some local variables.
-function call2() {
- var aLocalVar = 'test';
- var anotherLocalVar = g(aLocalVar) + 's';
- var yetAnotherLocal = 10;
- debugger;
- g.call(null, 3);
-}
-
-// Test step into function call which is a part of an expression.
-function call3() {
- var alias = g;
- debugger;
- var r = 10 + alias.call(null, 3);
- var aLocalVar = 'test';
- var anotherLocalVar = g(aLocalVar) + 's';
- var yetAnotherLocal = 10;
-}
-
-// Test step into function call from a function with some local variables.
-function call4() {
- var alias = g;
- debugger;
- alias.call(null, 3);
- var aLocalVar = 'test';
- var anotherLocalVar = g(aLocalVar) + 's';
- var yetAnotherLocal = 10;
-}
-
-// Test step into function apply from a function without local variables.
-function apply1() {
- debugger;
- g.apply(null, [3]);
-}
-
-
-// Test step into function apply from a function with some local variables.
-function apply2() {
- var aLocalVar = 'test';
- var anotherLocalVar = g(aLocalVar) + 's';
- var yetAnotherLocal = 10;
- debugger;
- g.apply(null, [3, 4]);
-}
-
-// Test step into function apply which is a part of an expression.
-function apply3() {
- var alias = g;
- debugger;
- var r = 10 + alias.apply(null, [3, 'unused arg']);
- var aLocalVar = 'test';
- var anotherLocalVar = g(aLocalVar) + 's';
- var yetAnotherLocal = 10;
-}
-
-// Test step into function apply from a function with some local variables.
-function apply4() {
- var alias = g;
- debugger;
- alias.apply(null, [3]);
- var aLocalVar = 'test';
- var anotherLocalVar = g(aLocalVar) + 's';
- var yetAnotherLocal = 10;
-}
-
-var testFunctions =
- [call1, call2, call3, call4, apply1, apply2, apply3, apply4];
-
-for (var i = 0; i < testFunctions.length; i++) {
- state = 0;
- testFunctions[i]();
- assertNull(exception);
- assertEquals(3, state);
-}
-
-// Get rid of the debug event listener.
+// Copyright 2008 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug +// Get the Debug object exposed from the debug context global object. +Debug = debug.Debug + +var exception = null; +var state = 0; + +// Simple debug event handler which first time will cause 'step in' action +// to get into g.call and than check that execution is pauesed inside +// function 'g'. +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + if (state == 0) { + // Step into f2.call: + exec_state.prepareStep(Debug.StepAction.StepIn, 2); + state = 2; + } else if (state == 2) { + assertEquals('g', event_data.func().name()); + assertEquals(' return t + 1; // expected line', + event_data.sourceLineText()); + state = 3; + } + } + } catch(e) { + exception = e; + } +}; + +// Add the debug event listener. +Debug.setListener(listener); + + +// Sample functions. +function g(t) { + return t + 1; // expected line +} + +// Test step into function call from a function without local variables. +function call1() { + debugger; + g.call(null, 3); +} + + +// Test step into function call from a function with some local variables. +function call2() { + var aLocalVar = 'test'; + var anotherLocalVar = g(aLocalVar) + 's'; + var yetAnotherLocal = 10; + debugger; + g.call(null, 3); +} + +// Test step into function call which is a part of an expression. +function call3() { + var alias = g; + debugger; + var r = 10 + alias.call(null, 3); + var aLocalVar = 'test'; + var anotherLocalVar = g(aLocalVar) + 's'; + var yetAnotherLocal = 10; +} + +// Test step into function call from a function with some local variables. +function call4() { + var alias = g; + debugger; + alias.call(null, 3); + var aLocalVar = 'test'; + var anotherLocalVar = g(aLocalVar) + 's'; + var yetAnotherLocal = 10; +} + +// Test step into function apply from a function without local variables. +function apply1() { + debugger; + g.apply(null, [3]); +} + + +// Test step into function apply from a function with some local variables. +function apply2() { + var aLocalVar = 'test'; + var anotherLocalVar = g(aLocalVar) + 's'; + var yetAnotherLocal = 10; + debugger; + g.apply(null, [3, 4]); +} + +// Test step into function apply which is a part of an expression. +function apply3() { + var alias = g; + debugger; + var r = 10 + alias.apply(null, [3, 'unused arg']); + var aLocalVar = 'test'; + var anotherLocalVar = g(aLocalVar) + 's'; + var yetAnotherLocal = 10; +} + +// Test step into function apply from a function with some local variables. +function apply4() { + var alias = g; + debugger; + alias.apply(null, [3]); + var aLocalVar = 'test'; + var anotherLocalVar = g(aLocalVar) + 's'; + var yetAnotherLocal = 10; +} + +var testFunctions = + [call1, call2, call3, call4, apply1, apply2, apply3, apply4]; + +for (var i = 0; i < testFunctions.length; i++) { + state = 0; + testFunctions[i](); + assertNull(exception); + assertEquals(3, state); +} + +// Get rid of the debug event listener. Debug.setListener(null);
\ No newline at end of file diff --git a/deps/v8/test/mjsunit/debug-stepnext-do-while.js b/deps/v8/test/mjsunit/debug-stepnext-do-while.js index 17058a7b63..bbb18bc436 100644 --- a/deps/v8/test/mjsunit/debug-stepnext-do-while.js +++ b/deps/v8/test/mjsunit/debug-stepnext-do-while.js @@ -1,79 +1,79 @@ -// Copyright 2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
-
-var exception = null;
-var break_break_point_hit_count = 0;
-
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- if (break_break_point_hit_count == 0) {
- assertEquals(' debugger;',
- event_data.sourceLineText());
- assertEquals('runDoWhile', event_data.func().name());
- } else if (break_break_point_hit_count == 1) {
- assertEquals(' } while(condition());',
- event_data.sourceLineText());
- assertEquals('runDoWhile', event_data.func().name());
- }
-
- break_break_point_hit_count++;
- // Continue stepping until returned to bottom frame.
- if (exec_state.frameCount() > 1) {
- exec_state.prepareStep(Debug.StepAction.StepNext);
- }
-
- }
- } catch(e) {
- exception = e;
- }
-};
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-function condition() {
- return false;
-}
-
-function runDoWhile() {
- do {
- debugger;
- } while(condition());
-};
-
-break_break_point_hit_count = 0;
-runDoWhile();
-assertNull(exception);
-assertEquals(4, break_break_point_hit_count);
-
-// Get rid of the debug event listener.
-Debug.setListener(null);
+// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug +// Get the Debug object exposed from the debug context global object. +Debug = debug.Debug + +var exception = null; +var break_break_point_hit_count = 0; + +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + if (break_break_point_hit_count == 0) { + assertEquals(' debugger;', + event_data.sourceLineText()); + assertEquals('runDoWhile', event_data.func().name()); + } else if (break_break_point_hit_count == 1) { + assertEquals(' } while(condition());', + event_data.sourceLineText()); + assertEquals('runDoWhile', event_data.func().name()); + } + + break_break_point_hit_count++; + // Continue stepping until returned to bottom frame. + if (exec_state.frameCount() > 1) { + exec_state.prepareStep(Debug.StepAction.StepNext); + } + + } + } catch(e) { + exception = e; + } +}; + +// Add the debug event listener. +Debug.setListener(listener); + +function condition() { + return false; +} + +function runDoWhile() { + do { + debugger; + } while(condition()); +}; + +break_break_point_hit_count = 0; +runDoWhile(); +assertNull(exception); +assertEquals(4, break_break_point_hit_count); + +// Get rid of the debug event listener. +Debug.setListener(null); diff --git a/deps/v8/test/mjsunit/debug-stepout-recursive-function.js b/deps/v8/test/mjsunit/debug-stepout-recursive-function.js index 2f8780c950..475fe26592 100644 --- a/deps/v8/test/mjsunit/debug-stepout-recursive-function.js +++ b/deps/v8/test/mjsunit/debug-stepout-recursive-function.js @@ -1,106 +1,106 @@ -// Copyright 2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
-
-var exception = null;
-var step_out_count = 1;
-
-// Simple debug event handler which counts the number of breaks hit and steps.
-var break_point_hit_count = 0;
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- break_point_hit_count++;
- // Continue stepping until returned to bottom frame.
- if (exec_state.frameCount() > 1) {
- exec_state.prepareStep(Debug.StepAction.StepOut, step_out_count);
- }
-
- }
- } catch(e) {
- exception = e;
- }
-
-};
-
-function BeginTest(name) {
- test_name = name;
- break_point_hit_count = 0;
- exception = null;
-}
-
-function EndTest(expected_break_point_hit_count) {
- assertEquals(expected_break_point_hit_count, break_point_hit_count, test_name);
- assertNull(exception, test_name);
- test_name = null;
-}
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-
-var shouldBreak = null;
-function fact(x) {
- if (shouldBreak(x)) {
- debugger;
- }
- if (x < 2) {
- return 1;
- } else {
- return x*fact(x-1);
- }
-}
-
-BeginTest('Test 1');
-shouldBreak = function(x) { return x == 3; };
-step_out_count = 1;
-fact(3);
-EndTest(2);
-
-BeginTest('Test 2');
-shouldBreak = function(x) { return x == 2; };
-step_out_count = 1;
-fact(3);
-EndTest(3);
-
-BeginTest('Test 3');
-shouldBreak = function(x) { return x == 1; };
-step_out_count = 2;
-fact(3);
-EndTest(2);
-
-BeginTest('Test 4');
-shouldBreak = function(x) { print(x); return x == 1 || x == 3; };
-step_out_count = 2;
-fact(3);
-EndTest(3);
-
-// Get rid of the debug event listener.
-Debug.setListener(null);
+// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug +// Get the Debug object exposed from the debug context global object. +Debug = debug.Debug + +var exception = null; +var step_out_count = 1; + +// Simple debug event handler which counts the number of breaks hit and steps. +var break_point_hit_count = 0; +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + break_point_hit_count++; + // Continue stepping until returned to bottom frame. + if (exec_state.frameCount() > 1) { + exec_state.prepareStep(Debug.StepAction.StepOut, step_out_count); + } + + } + } catch(e) { + exception = e; + } + +}; + +function BeginTest(name) { + test_name = name; + break_point_hit_count = 0; + exception = null; +} + +function EndTest(expected_break_point_hit_count) { + assertEquals(expected_break_point_hit_count, break_point_hit_count, test_name); + assertNull(exception, test_name); + test_name = null; +} + +// Add the debug event listener. +Debug.setListener(listener); + + +var shouldBreak = null; +function fact(x) { + if (shouldBreak(x)) { + debugger; + } + if (x < 2) { + return 1; + } else { + return x*fact(x-1); + } +} + +BeginTest('Test 1'); +shouldBreak = function(x) { return x == 3; }; +step_out_count = 1; +fact(3); +EndTest(2); + +BeginTest('Test 2'); +shouldBreak = function(x) { return x == 2; }; +step_out_count = 1; +fact(3); +EndTest(3); + +BeginTest('Test 3'); +shouldBreak = function(x) { return x == 1; }; +step_out_count = 2; +fact(3); +EndTest(2); + +BeginTest('Test 4'); +shouldBreak = function(x) { print(x); return x == 1 || x == 3; }; +step_out_count = 2; +fact(3); +EndTest(3); + +// Get rid of the debug event listener. +Debug.setListener(null); diff --git a/deps/v8/test/mjsunit/debug-stepout-to-builtin.js b/deps/v8/test/mjsunit/debug-stepout-to-builtin.js index 486eee0e4d..772fb4b5e8 100644 --- a/deps/v8/test/mjsunit/debug-stepout-to-builtin.js +++ b/deps/v8/test/mjsunit/debug-stepout-to-builtin.js @@ -1,84 +1,84 @@ -// Copyright 2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --expose-debug-as debug
-
-// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
-
-var exception = null;
-var state = 1;
-var expected_source_line_text = null;
-var expected_function_name = null;
-
-// Simple debug event handler which first time will cause 'step out' action
-// and than check that execution is paused inside function
-// expected_function_name.
-function listener(event, exec_state, event_data, data) {
- try {
- if (event == Debug.DebugEvent.Break) {
- if (state == 1) {
- exec_state.prepareStep(Debug.StepAction.StepOut, 2);
- state = 2;
- } else if (state == 2) {
- assertEquals(expected_function_name, event_data.func().name());
- assertEquals(expected_source_line_text,
- event_data.sourceLineText());
- state = 3;
- }
- }
- } catch(e) {
- exception = e;
- }
-};
-
-// Add the debug event listener.
-Debug.setListener(listener);
-
-var obj = {key:10};
-
-function replacer(key, value) {
- if (key == 'key') {
- debugger;
- }
- return value;
-}
-
-// Test step into function call from a function without local variables.
-function testStepOutToBuiltIn() {
- expected_function_name = 'testStepOutToBuiltIn';
- expected_source_line_text = '} // expected line';
- JSON.stringify(obj, replacer);
-} // expected line
-
-state = 1;
-testStepOutToBuiltIn();
-assertNull(exception);
-assertEquals(3, state);
-
-// Get rid of the debug event listener.
-Debug.setListener(null);
+// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --expose-debug-as debug + +// Get the Debug object exposed from the debug context global object. +Debug = debug.Debug + +var exception = null; +var state = 1; +var expected_source_line_text = null; +var expected_function_name = null; + +// Simple debug event handler which first time will cause 'step out' action +// and than check that execution is paused inside function +// expected_function_name. +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + if (state == 1) { + exec_state.prepareStep(Debug.StepAction.StepOut, 2); + state = 2; + } else if (state == 2) { + assertEquals(expected_function_name, event_data.func().name()); + assertEquals(expected_source_line_text, + event_data.sourceLineText()); + state = 3; + } + } + } catch(e) { + exception = e; + } +}; + +// Add the debug event listener. +Debug.setListener(listener); + +var obj = {key:10}; + +function replacer(key, value) { + if (key == 'key') { + debugger; + } + return value; +} + +// Test step into function call from a function without local variables. +function testStepOutToBuiltIn() { + expected_function_name = 'testStepOutToBuiltIn'; + expected_source_line_text = '} // expected line'; + JSON.stringify(obj, replacer); +} // expected line + +state = 1; +testStepOutToBuiltIn(); +assertNull(exception); +assertEquals(3, state); + +// Get rid of the debug event listener. +Debug.setListener(null); diff --git a/deps/v8/test/mjsunit/global-deleted-property-keyed.js b/deps/v8/test/mjsunit/global-deleted-property-keyed.js index e249fd32b8..1a1d3cb99b 100644 --- a/deps/v8/test/mjsunit/global-deleted-property-keyed.js +++ b/deps/v8/test/mjsunit/global-deleted-property-keyed.js @@ -1,38 +1,38 @@ -// Copyright 2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-
-// Flags: --expose-natives_as natives
-// Test keyed access to deleted property in a global object without access checks.
-// Regression test that exposed the_hole value from Runtime_KeyedGetPoperty.
-
-var name = "fisk";
-natives[name] = name;
-function foo() { natives[name] + 12; }
-for(var i = 0; i < 3; i++) foo();
-delete natives[name];
-for(var i = 0; i < 3; i++) foo();
+// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Flags: --expose-natives_as natives +// Test keyed access to deleted property in a global object without access checks. +// Regression test that exposed the_hole value from Runtime_KeyedGetPoperty. + +var name = "fisk"; +natives[name] = name; +function foo() { natives[name] + 12; } +for(var i = 0; i < 3; i++) foo(); +delete natives[name]; +for(var i = 0; i < 3; i++) foo(); diff --git a/deps/v8/test/mjsunit/object-literal.js b/deps/v8/test/mjsunit/object-literal.js index 0ad1968e16..397d670644 100644 --- a/deps/v8/test/mjsunit/object-literal.js +++ b/deps/v8/test/mjsunit/object-literal.js @@ -146,7 +146,7 @@ function testKeywordProperty(keyword) { eval("var " + keyword + " = 42;"); assertUnreachable("Not a keyword: " + keyword); } catch (e) { } - + // Simple property, read and write. var x = eval("({" + keyword + ": 42})"); assertEquals(42, x[keyword]); @@ -154,7 +154,7 @@ function testKeywordProperty(keyword) { eval("x." + keyword + " = 37"); assertEquals(37, x[keyword]); assertEquals(37, eval("x." + keyword)); - + // Getter/setter property, read and write. var y = eval("({value : 42, get " + keyword + "(){return this.value}," + " set " + keyword + "(v) { this.value = v; }})"); @@ -163,12 +163,12 @@ function testKeywordProperty(keyword) { eval("y." + keyword + " = 37"); assertEquals(37, y[keyword]); assertEquals(37, eval("y." + keyword)); - + // Quoted keyword works is read back by unquoted as well. var z = eval("({\"" + keyword + "\": 42})"); assertEquals(42, z[keyword]); assertEquals(42, eval("z." + keyword)); - + // Function property, called. var was_called; function test_call() { this.was_called = true; was_called = true; } @@ -187,26 +187,4 @@ function testKeywordProperty(keyword) { for (var i = 0; i < keywords.length; i++) { testKeywordProperty(keywords[i]); -} - -// Test getter and setter properties with string/number literal names. - -var obj = {get 42() { return 42; }, - get 3.14() { return "PI"; }, - get "PI"() { return 3.14; }, - readback: 0, - set 37(v) { this.readback = v; }, - set 1.44(v) { this.readback = v; }, - set "Poo"(v) { this.readback = v; }} - -assertEquals(42, obj[42]); -assertEquals("PI", obj[3.14]); -assertEquals(3.14, obj["PI"]); -obj[37] = "t1"; -assertEquals("t1", obj.readback); -obj[1.44] = "t2"; -assertEquals("t2", obj.readback); -obj["Poo"] = "t3"; -assertEquals("t3", obj.readback); - - +}
\ No newline at end of file diff --git a/deps/v8/test/mjsunit/regexp-capture.js b/deps/v8/test/mjsunit/regexp-capture.js index d4433d8cd2..dc24491d9c 100755 --- a/deps/v8/test/mjsunit/regexp-capture.js +++ b/deps/v8/test/mjsunit/regexp-capture.js @@ -1,57 +1,57 @@ -// Copyright 2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Tests from http://blog.stevenlevithan.com/archives/npcg-javascript
-
-assertEquals(true, /(x)?\1y/.test("y"));
-assertEquals(["y", undefined], /(x)?\1y/.exec("y"));
-assertEquals(["y", undefined], /(x)?y/.exec("y"));
-assertEquals(["y", undefined], "y".match(/(x)?\1y/));
-assertEquals(["y", undefined], "y".match(/(x)?y/));
-assertEquals(["y"], "y".match(/(x)?\1y/g));
-assertEquals(["", undefined, ""], "y".split(/(x)?\1y/));
-assertEquals(["", undefined, ""], "y".split(/(x)?y/));
-assertEquals(0, "y".search(/(x)?\1y/));
-assertEquals("z", "y".replace(/(x)?\1y/, "z"));
-assertEquals("", "y".replace(/(x)?y/, "$1"));
-assertEquals("undefined", "y".replace(/(x)?\1y/,
- function($0, $1){
- return String($1);
- }));
-assertEquals("undefined", "y".replace(/(x)?y/,
- function($0, $1){
- return String($1);
- }));
-assertEquals("undefined", "y".replace(/(x)?y/,
- function($0, $1){
- return $1;
- }));
-
-// See https://bugzilla.mozilla.org/show_bug.cgi?id=476146
-assertEquals("bbc,b", /^(b+|a){1,2}?bc/.exec("bbc"));
-assertEquals("bbaa,a,,a", /((\3|b)\2(a)){2,}/.exec("bbaababbabaaaaabbaaaabba"));
-
+// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Tests from http://blog.stevenlevithan.com/archives/npcg-javascript + +assertEquals(true, /(x)?\1y/.test("y")); +assertEquals(["y", undefined], /(x)?\1y/.exec("y")); +assertEquals(["y", undefined], /(x)?y/.exec("y")); +assertEquals(["y", undefined], "y".match(/(x)?\1y/)); +assertEquals(["y", undefined], "y".match(/(x)?y/)); +assertEquals(["y"], "y".match(/(x)?\1y/g)); +assertEquals(["", undefined, ""], "y".split(/(x)?\1y/)); +assertEquals(["", undefined, ""], "y".split(/(x)?y/)); +assertEquals(0, "y".search(/(x)?\1y/)); +assertEquals("z", "y".replace(/(x)?\1y/, "z")); +assertEquals("", "y".replace(/(x)?y/, "$1")); +assertEquals("undefined", "y".replace(/(x)?\1y/, + function($0, $1){ + return String($1); + })); +assertEquals("undefined", "y".replace(/(x)?y/, + function($0, $1){ + return String($1); + })); +assertEquals("undefined", "y".replace(/(x)?y/, + function($0, $1){ + return $1; + })); + +// See https://bugzilla.mozilla.org/show_bug.cgi?id=476146 +assertEquals("bbc,b", /^(b+|a){1,2}?bc/.exec("bbc")); +assertEquals("bbaa,a,,a", /((\3|b)\2(a)){2,}/.exec("bbaababbabaaaaabbaaaabba")); + diff --git a/deps/v8/test/mjsunit/regress/bitops-register-alias.js b/deps/v8/test/mjsunit/regress/bitops-register-alias.js new file mode 100644 index 0000000000..389255df05 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/bitops-register-alias.js @@ -0,0 +1,31 @@ +// Copyright 2010 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Test that the code generator can cope with left and right being in +// the same register for bitops. +function f() { for (var i = 10; i < 100; i++) { return i | i; } } +assertEquals(10, f()); diff --git a/deps/v8/test/mjsunit/regress/regress-246.js b/deps/v8/test/mjsunit/regress/regress-246.js index 4324b54041..09b746b7aa 100644 --- a/deps/v8/test/mjsunit/regress/regress-246.js +++ b/deps/v8/test/mjsunit/regress/regress-246.js @@ -1,31 +1,31 @@ -// Copyright 2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// See: http://code.google.com/p/v8/issues/detail?id=246
-
-assertTrue(/(?:text)/.test("text"));
+// Copyright 2008 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// See: http://code.google.com/p/v8/issues/detail?id=246 + +assertTrue(/(?:text)/.test("text")); assertEquals(["text"], /(?:text)/.exec("text"));
\ No newline at end of file diff --git a/deps/v8/test/sputnik/sputnik.status b/deps/v8/test/sputnik/sputnik.status index 13108c0fb3..bc8c1e3992 100644 --- a/deps/v8/test/sputnik/sputnik.status +++ b/deps/v8/test/sputnik/sputnik.status @@ -183,8 +183,8 @@ S8.5_A2.1: PASS, FAIL if $system == linux, FAIL if $system == macos # These tests check for ES3 semantics, and differ from ES5. # When we follow ES5 semantics, it's ok to fail the test. -# Allow keywords as names of properties in object initialisers and -# in dot-notation property access. +# Allow keywords as names of properties in object initialisers and +# in dot-notation property access. S11.1.5_A4.1: FAIL_OK S11.1.5_A4.2: FAIL_OK diff --git a/deps/v8/tools/gyp/v8.gyp b/deps/v8/tools/gyp/v8.gyp index 839ae0bb0b..dbd94bf0b9 100644 --- a/deps/v8/tools/gyp/v8.gyp +++ b/deps/v8/tools/gyp/v8.gyp @@ -108,8 +108,6 @@ 'conditions': [ [ 'gcc_version==44', { 'cflags': [ - # Avoid gcc 4.4 strict aliasing issues in dtoa.c - '-fno-strict-aliasing', # Avoid crashes with gcc 4.4 in the v8 test suite. '-fno-tree-vrp', ], |