summaryrefslogtreecommitdiff
path: root/deps/v8/src/builtins.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/builtins.cc')
-rw-r--r--deps/v8/src/builtins.cc115
1 files changed, 63 insertions, 52 deletions
diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc
index 3a0393efbc..b4f4a0611a 100644
--- a/deps/v8/src/builtins.cc
+++ b/deps/v8/src/builtins.cc
@@ -243,7 +243,7 @@ BUILTIN(ArrayCodeGeneric) {
}
-static Object* AllocateJSArray() {
+MUST_USE_RESULT static Object* AllocateJSArray() {
JSFunction* array_function =
Top::context()->global_context()->array_function();
Object* result = Heap::AllocateJSObject(array_function);
@@ -252,7 +252,7 @@ static Object* AllocateJSArray() {
}
-static Object* AllocateEmptyJSArray() {
+MUST_USE_RESULT static Object* AllocateEmptyJSArray() {
Object* result = AllocateJSArray();
if (result->IsFailure()) return result;
JSArray* result_array = JSArray::cast(result);
@@ -269,6 +269,7 @@ static void CopyElements(AssertNoAllocation* no_gc,
int src_index,
int len) {
ASSERT(dst != src); // Use MoveElements instead.
+ ASSERT(dst->map() != Heap::fixed_cow_array_map());
ASSERT(len > 0);
CopyWords(dst->data_start() + dst_index,
src->data_start() + src_index,
@@ -286,6 +287,7 @@ static void MoveElements(AssertNoAllocation* no_gc,
FixedArray* src,
int src_index,
int len) {
+ ASSERT(dst->map() != Heap::fixed_cow_array_map());
memmove(dst->data_start() + dst_index,
src->data_start() + src_index,
len * kPointerSize);
@@ -297,17 +299,17 @@ static void MoveElements(AssertNoAllocation* no_gc,
static void FillWithHoles(FixedArray* dst, int from, int to) {
+ ASSERT(dst->map() != Heap::fixed_cow_array_map());
MemsetPointer(dst->data_start() + from, Heap::the_hole_value(), to - from);
}
static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) {
- // For now this trick is only applied to fixed arrays in new space.
+ ASSERT(elms->map() != Heap::fixed_cow_array_map());
+ // For now this trick is only applied to fixed arrays in new and paged space.
// In large object space the object's start must coincide with chunk
// and thus the trick is just not applicable.
- // In old space we do not use this trick to avoid dealing with
- // region dirty marks.
- ASSERT(Heap::new_space()->Contains(elms));
+ ASSERT(!Heap::lo_space()->Contains(elms));
STATIC_ASSERT(FixedArray::kMapOffset == 0);
STATIC_ASSERT(FixedArray::kLengthOffset == kPointerSize);
@@ -317,6 +319,17 @@ static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) {
const int len = elms->length();
+ if (to_trim > FixedArray::kHeaderSize / kPointerSize &&
+ !Heap::new_space()->Contains(elms)) {
+ // If we are doing a big trim in old space then we zap the space that was
+ // formerly part of the array so that the GC (aided by the card-based
+ // remembered set) won't find pointers to new-space there.
+ Object** zap = reinterpret_cast<Object**>(elms->address());
+ zap++; // Header of filler must be at least one word so skip that.
+ for (int i = 1; i < to_trim; i++) {
+ *zap++ = Smi::FromInt(0);
+ }
+ }
// Technically in new space this write might be omitted (except for
// debug mode which iterates through the heap), but to play safer
// we still do it.
@@ -325,9 +338,8 @@ static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) {
former_start[to_trim] = Heap::fixed_array_map();
former_start[to_trim + 1] = Smi::FromInt(len - to_trim);
- ASSERT_EQ(elms->address() + to_trim * kPointerSize,
- (elms + to_trim * kPointerSize)->address());
- return elms + to_trim * kPointerSize;
+ return FixedArray::cast(HeapObject::FromAddress(
+ elms->address() + to_trim * kPointerSize));
}
@@ -348,33 +360,24 @@ static bool ArrayPrototypeHasNoElements(Context* global_context,
}
-static bool IsJSArrayWithFastElements(Object* receiver,
- FixedArray** elements) {
- if (!receiver->IsJSArray()) {
- return false;
- }
-
+static inline Object* EnsureJSArrayWithWritableFastElements(Object* receiver) {
+ if (!receiver->IsJSArray()) return NULL;
JSArray* array = JSArray::cast(receiver);
-
HeapObject* elms = HeapObject::cast(array->elements());
- if (elms->map() != Heap::fixed_array_map()) {
- return false;
+ if (elms->map() == Heap::fixed_array_map()) return elms;
+ if (elms->map() == Heap::fixed_cow_array_map()) {
+ return array->EnsureWritableFastElements();
}
-
- *elements = FixedArray::cast(elms);
- return true;
+ return NULL;
}
-static bool IsFastElementMovingAllowed(Object* receiver,
- FixedArray** elements) {
- if (!IsJSArrayWithFastElements(receiver, elements)) return false;
-
+static inline bool IsJSArrayFastElementMovingAllowed(JSArray* receiver) {
Context* global_context = Top::context()->global_context();
JSObject* array_proto =
JSObject::cast(global_context->array_function()->prototype());
- if (JSArray::cast(receiver)->GetPrototype() != array_proto) return false;
- return ArrayPrototypeHasNoElements(global_context, array_proto);
+ return receiver->GetPrototype() == array_proto &&
+ ArrayPrototypeHasNoElements(global_context, array_proto);
}
@@ -405,10 +408,10 @@ static Object* CallJsBuiltin(const char* name,
BUILTIN(ArrayPush) {
Object* receiver = *args.receiver();
- FixedArray* elms = NULL;
- if (!IsJSArrayWithFastElements(receiver, &elms)) {
- return CallJsBuiltin("ArrayPush", args);
- }
+ Object* elms_obj = EnsureJSArrayWithWritableFastElements(receiver);
+ if (elms_obj == NULL) return CallJsBuiltin("ArrayPush", args);
+ if (elms_obj->IsFailure()) return elms_obj;
+ FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
int len = Smi::cast(array->length())->value();
@@ -454,10 +457,10 @@ BUILTIN(ArrayPush) {
BUILTIN(ArrayPop) {
Object* receiver = *args.receiver();
- FixedArray* elms = NULL;
- if (!IsJSArrayWithFastElements(receiver, &elms)) {
- return CallJsBuiltin("ArrayPop", args);
- }
+ Object* elms_obj = EnsureJSArrayWithWritableFastElements(receiver);
+ if (elms_obj == NULL) return CallJsBuiltin("ArrayPop", args);
+ if (elms_obj->IsFailure()) return elms_obj;
+ FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
int len = Smi::cast(array->length())->value();
@@ -483,10 +486,13 @@ BUILTIN(ArrayPop) {
BUILTIN(ArrayShift) {
Object* receiver = *args.receiver();
- FixedArray* elms = NULL;
- if (!IsFastElementMovingAllowed(receiver, &elms)) {
+ Object* elms_obj = EnsureJSArrayWithWritableFastElements(receiver);
+ if (elms_obj->IsFailure()) return elms_obj;
+ if (elms_obj == NULL ||
+ !IsJSArrayFastElementMovingAllowed(JSArray::cast(receiver))) {
return CallJsBuiltin("ArrayShift", args);
}
+ FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
ASSERT(array->HasFastElements());
@@ -499,8 +505,8 @@ BUILTIN(ArrayShift) {
first = Heap::undefined_value();
}
- if (Heap::new_space()->Contains(elms)) {
- // As elms still in the same space they used to be (new space),
+ if (!Heap::lo_space()->Contains(elms)) {
+ // As elms still in the same space they used to be,
// there is no need to update region dirty mark.
array->set_elements(LeftTrimFixedArray(elms, 1), SKIP_WRITE_BARRIER);
} else {
@@ -519,10 +525,13 @@ BUILTIN(ArrayShift) {
BUILTIN(ArrayUnshift) {
Object* receiver = *args.receiver();
- FixedArray* elms = NULL;
- if (!IsFastElementMovingAllowed(receiver, &elms)) {
+ Object* elms_obj = EnsureJSArrayWithWritableFastElements(receiver);
+ if (elms_obj->IsFailure()) return elms_obj;
+ if (elms_obj == NULL ||
+ !IsJSArrayFastElementMovingAllowed(JSArray::cast(receiver))) {
return CallJsBuiltin("ArrayUnshift", args);
}
+ FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
ASSERT(array->HasFastElements());
@@ -568,10 +577,13 @@ BUILTIN(ArrayUnshift) {
BUILTIN(ArraySlice) {
Object* receiver = *args.receiver();
- FixedArray* elms = NULL;
- if (!IsFastElementMovingAllowed(receiver, &elms)) {
+ Object* elms_obj = EnsureJSArrayWithWritableFastElements(receiver);
+ if (elms_obj->IsFailure()) return elms_obj;
+ if (elms_obj == NULL ||
+ !IsJSArrayFastElementMovingAllowed(JSArray::cast(receiver))) {
return CallJsBuiltin("ArraySlice", args);
}
+ FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
ASSERT(array->HasFastElements());
@@ -637,10 +649,13 @@ BUILTIN(ArraySlice) {
BUILTIN(ArraySplice) {
Object* receiver = *args.receiver();
- FixedArray* elms = NULL;
- if (!IsFastElementMovingAllowed(receiver, &elms)) {
+ Object* elms_obj = EnsureJSArrayWithWritableFastElements(receiver);
+ if (elms_obj->IsFailure()) return elms_obj;
+ if (elms_obj == NULL ||
+ !IsJSArrayFastElementMovingAllowed(JSArray::cast(receiver))) {
return CallJsBuiltin("ArraySplice", args);
}
+ FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
ASSERT(array->HasFastElements());
@@ -648,13 +663,9 @@ BUILTIN(ArraySplice) {
int n_arguments = args.length() - 1;
- // SpiderMonkey and JSC return undefined in the case where no
- // arguments are given instead of using the implicit undefined
- // arguments. This does not follow ECMA-262, but we do the same for
- // compatibility.
- // TraceMonkey follows ECMA-262 though.
+ // Return empty array when no arguments are supplied.
if (n_arguments == 0) {
- return Heap::undefined_value();
+ return AllocateEmptyJSArray();
}
int relative_start = 0;
@@ -717,7 +728,7 @@ BUILTIN(ArraySplice) {
if (item_count < actual_delete_count) {
// Shrink the array.
- const bool trim_array = Heap::new_space()->Contains(elms) &&
+ const bool trim_array = !Heap::lo_space()->Contains(elms) &&
((actual_start + item_count) <
(len - actual_delete_count - actual_start));
if (trim_array) {