summaryrefslogtreecommitdiff
path: root/deps/v8
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8')
-rw-r--r--deps/v8/ChangeLog35
-rw-r--r--deps/v8/SConstruct25
-rw-r--r--deps/v8/include/v8.h25
-rw-r--r--deps/v8/preparser/SConscript4
-rw-r--r--deps/v8/samples/SConscript4
-rwxr-xr-xdeps/v8/src/SConscript9
-rw-r--r--deps/v8/src/api.cc214
-rw-r--r--deps/v8/src/arm/code-stubs-arm.cc97
-rw-r--r--deps/v8/src/arm/code-stubs-arm.h57
-rw-r--r--deps/v8/src/arm/codegen-arm.h2
-rw-r--r--deps/v8/src/arm/full-codegen-arm.cc17
-rw-r--r--deps/v8/src/arm/ic-arm.cc99
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.cc8
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.cc94
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.h10
-rw-r--r--deps/v8/src/arm/regexp-macro-assembler-arm.h3
-rw-r--r--deps/v8/src/arm/stub-cache-arm.cc55
-rw-r--r--deps/v8/src/code-stubs.cc122
-rw-r--r--deps/v8/src/code-stubs.h173
-rw-r--r--deps/v8/src/codegen.cc4
-rw-r--r--deps/v8/src/conversions-inl.h53
-rw-r--r--deps/v8/src/conversions.cc20
-rw-r--r--deps/v8/src/conversions.h2
-rw-r--r--deps/v8/src/cpu-profiler-inl.h4
-rw-r--r--deps/v8/src/cpu-profiler.cc12
-rw-r--r--deps/v8/src/cpu-profiler.h10
-rw-r--r--deps/v8/src/d8.cc437
-rw-r--r--deps/v8/src/d8.gyp3
-rw-r--r--deps/v8/src/d8.h91
-rw-r--r--deps/v8/src/debug.cc7
-rw-r--r--deps/v8/src/debug.h3
-rw-r--r--deps/v8/src/deoptimizer.cc52
-rw-r--r--deps/v8/src/deoptimizer.h36
-rw-r--r--deps/v8/src/flag-definitions.h19
-rw-r--r--deps/v8/src/frames.cc4
-rw-r--r--deps/v8/src/frames.h2
-rw-r--r--deps/v8/src/full-codegen.h2
-rw-r--r--deps/v8/src/handles.cc5
-rw-r--r--deps/v8/src/heap-profiler.cc11
-rw-r--r--deps/v8/src/heap-profiler.h8
-rw-r--r--deps/v8/src/heap.cc74
-rw-r--r--deps/v8/src/heap.h10
-rw-r--r--deps/v8/src/hydrogen-instructions.cc15
-rw-r--r--deps/v8/src/hydrogen-instructions.h14
-rw-r--r--deps/v8/src/ia32/code-stubs-ia32.cc86
-rw-r--r--deps/v8/src/ia32/code-stubs-ia32.h53
-rw-r--r--deps/v8/src/ia32/codegen-ia32.h2
-rw-r--r--deps/v8/src/ia32/full-codegen-ia32.cc18
-rw-r--r--deps/v8/src/ia32/ic-ia32.cc118
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.cc1
-rw-r--r--deps/v8/src/ia32/lithium-gap-resolver-ia32.cc9
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.cc98
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.h9
-rw-r--r--deps/v8/src/ia32/regexp-macro-assembler-ia32.h3
-rw-r--r--deps/v8/src/ia32/stub-cache-ia32.cc69
-rw-r--r--deps/v8/src/ic.cc41
-rw-r--r--deps/v8/src/ic.h21
-rw-r--r--deps/v8/src/isolate.cc9
-rw-r--r--deps/v8/src/isolate.h41
-rw-r--r--deps/v8/src/jsregexp.cc1
-rw-r--r--deps/v8/src/jsregexp.h1
-rw-r--r--deps/v8/src/lithium-allocator.cc2
-rw-r--r--deps/v8/src/log-inl.h4
-rw-r--r--deps/v8/src/log-utils.cc168
-rw-r--r--deps/v8/src/log-utils.h105
-rw-r--r--deps/v8/src/log.cc132
-rw-r--r--deps/v8/src/log.h16
-rw-r--r--deps/v8/src/mark-compact.cc53
-rw-r--r--deps/v8/src/messages.js1
-rw-r--r--deps/v8/src/mips/assembler-mips.h9
-rw-r--r--deps/v8/src/mips/builtins-mips.cc5
-rw-r--r--deps/v8/src/mips/code-stubs-mips.cc162
-rw-r--r--deps/v8/src/mips/code-stubs-mips.h63
-rw-r--r--deps/v8/src/mips/codegen-mips.h2
-rw-r--r--deps/v8/src/mips/full-codegen-mips.cc67
-rw-r--r--deps/v8/src/mips/ic-mips.cc116
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.cc542
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.h116
-rw-r--r--deps/v8/src/mips/regexp-macro-assembler-mips.cc4
-rw-r--r--deps/v8/src/mips/regexp-macro-assembler-mips.h7
-rw-r--r--deps/v8/src/mips/stub-cache-mips.cc58
-rw-r--r--deps/v8/src/mksnapshot.cc3
-rw-r--r--deps/v8/src/objects-visiting.h16
-rw-r--r--deps/v8/src/objects.cc119
-rw-r--r--deps/v8/src/objects.h7
-rw-r--r--deps/v8/src/parser.cc38
-rw-r--r--deps/v8/src/platform-cygwin.cc23
-rw-r--r--deps/v8/src/platform-freebsd.cc21
-rw-r--r--deps/v8/src/platform-linux.cc24
-rw-r--r--deps/v8/src/platform-macos.cc19
-rw-r--r--deps/v8/src/platform-nullos.cc16
-rw-r--r--deps/v8/src/platform-openbsd.cc21
-rw-r--r--deps/v8/src/platform-posix.cc15
-rw-r--r--deps/v8/src/platform-solaris.cc21
-rw-r--r--deps/v8/src/platform-tls.h2
-rw-r--r--deps/v8/src/platform-win32.cc81
-rw-r--r--deps/v8/src/platform.h12
-rw-r--r--deps/v8/src/profile-generator-inl.h4
-rw-r--r--deps/v8/src/profile-generator.cc4
-rw-r--r--deps/v8/src/profile-generator.h4
-rw-r--r--deps/v8/src/property.h1
-rw-r--r--deps/v8/src/proxy.js12
-rw-r--r--deps/v8/src/runtime-profiler.cc12
-rw-r--r--deps/v8/src/runtime.cc50
-rw-r--r--deps/v8/src/runtime.h16
-rw-r--r--deps/v8/src/scopeinfo.cc2
-rw-r--r--deps/v8/src/scopes.cc2
-rw-r--r--deps/v8/src/serialize.cc1
-rw-r--r--deps/v8/src/spaces-inl.h29
-rw-r--r--deps/v8/src/spaces.cc81
-rw-r--r--deps/v8/src/spaces.h51
-rw-r--r--deps/v8/src/string-stream.cc4
-rw-r--r--deps/v8/src/stub-cache.cc35
-rw-r--r--deps/v8/src/stub-cache.h8
-rw-r--r--deps/v8/src/type-info.h1
-rw-r--r--deps/v8/src/v8globals.h11
-rw-r--r--deps/v8/src/v8natives.js121
-rw-r--r--deps/v8/src/v8utils.cc52
-rw-r--r--deps/v8/src/v8utils.h3
-rw-r--r--deps/v8/src/version.cc4
-rw-r--r--deps/v8/src/vm-state-inl.h41
-rw-r--r--deps/v8/src/vm-state.h11
-rw-r--r--deps/v8/src/x64/code-stubs-x64.cc86
-rw-r--r--deps/v8/src/x64/code-stubs-x64.h53
-rw-r--r--deps/v8/src/x64/codegen-x64.h2
-rw-r--r--deps/v8/src/x64/full-codegen-x64.cc16
-rw-r--r--deps/v8/src/x64/ic-x64.cc108
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.cc1
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.cc103
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.h9
-rw-r--r--deps/v8/src/x64/regexp-macro-assembler-x64.h6
-rw-r--r--deps/v8/src/x64/stub-cache-x64.cc53
-rw-r--r--deps/v8/src/zone.h2
-rw-r--r--deps/v8/test/cctest/SConscript5
-rw-r--r--deps/v8/test/cctest/cctest.gyp1
-rw-r--r--deps/v8/test/cctest/log-eq-of-logging-and-traversal.js191
-rw-r--r--deps/v8/test/cctest/test-api.cc10
-rw-r--r--deps/v8/test/cctest/test-cpu-profiler.cc4
-rw-r--r--deps/v8/test/cctest/test-heap-profiler.cc4
-rw-r--r--deps/v8/test/cctest/test-lockers.cc12
-rw-r--r--deps/v8/test/cctest/test-log-stack-tracer.cc4
-rw-r--r--deps/v8/test/cctest/test-log-utils.cc140
-rw-r--r--deps/v8/test/cctest/test-log.cc867
-rw-r--r--deps/v8/test/cctest/test-profile-generator.cc4
-rw-r--r--deps/v8/test/cctest/test-regexp.cc6
-rw-r--r--deps/v8/test/message/regress/regress-1527.js33
-rw-r--r--deps/v8/test/message/regress/regress-1527.out32
-rw-r--r--deps/v8/test/mjsunit/debug-backtrace.js3
-rw-r--r--deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js33
-rw-r--r--deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js31
-rw-r--r--deps/v8/test/mjsunit/harmony/proxies.js236
-rw-r--r--deps/v8/test/mjsunit/polymorph-arrays.js177
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1531.js49
-rw-r--r--deps/v8/test/mjsunit/regress/regress-88591.js42
-rw-r--r--deps/v8/test/mjsunit/regress/regress-88858.js65
-rw-r--r--deps/v8/test/mjsunit/regress/regress-regexp-codeflush.js55
-rw-r--r--deps/v8/test/mjsunit/tools/profile_view.js3
-rw-r--r--deps/v8/test/test262/README4
-rw-r--r--deps/v8/test/test262/harness-adapt.js2
-rw-r--r--deps/v8/test/test262/test262.status286
-rw-r--r--deps/v8/tools/codemap.js8
-rw-r--r--deps/v8/tools/gcmole/gcmole.lua2
-rw-r--r--deps/v8/tools/gyp/v8.gyp15
-rw-r--r--deps/v8/tools/profile.js48
-rw-r--r--deps/v8/tools/splaytree.js11
165 files changed, 3693 insertions, 4290 deletions
diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog
index ad57e3317d..7e7df06809 100644
--- a/deps/v8/ChangeLog
+++ b/deps/v8/ChangeLog
@@ -1,3 +1,36 @@
+2011-07-13: Version 3.4.12
+
+ Added --prof profiling option to d8 shell.
+
+ Fixed a bug where reading a directory in d8 shell hangs (issue 1533).
+
+ Fixed a potential assertion failure in const declarations.
+
+ Fixed an assertion failure in descriptor arrays (issue 1526).
+
+ Enabled fast thread-local storage by default on supported platforms.
+
+ Improved reporting of source position for global variable loads
+ (issue 1527).
+
+
+2011-07-11: Version 3.4.11
+
+ Fixed MinGW32 build.
+
+ Fixed a GC bug with RegExp code flushing.
+
+ Implemented Object.defineProperty for proxies.
+
+ Fixed a bug in for/in iteration of arguments objects (issue 1531).
+
+ Added debugger support for inspecting optimized frames (issue 1140).
+
+ Allowed JSObject::PreventExtensions to work for arguments objects.
+
+ Bugfixes and performance work.
+
+
2011-07-06: Version 3.4.10
Fixed debugger not breaking on certain "if" statements (issue 1523).
@@ -36,7 +69,7 @@
Ensure 16-byte stack alignment on Solaris (issue 1505).
- Fix "illegal access" when calling parseInt with a radix
+ Fix "illegal access" when calling parseInt with a radix
that is not a smi. (issue 1246).
diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct
index 7ee9f136ab..5276ce2cae 100644
--- a/deps/v8/SConstruct
+++ b/deps/v8/SConstruct
@@ -60,26 +60,17 @@ LIBRARY_FLAGS = {
'mode:debug': {
'CPPDEFINES': ['V8_ENABLE_CHECKS', 'OBJECT_PRINT']
},
- 'vmstate:on': {
- 'CPPDEFINES': ['ENABLE_VMSTATE_TRACKING'],
- },
'objectprint:on': {
'CPPDEFINES': ['OBJECT_PRINT'],
},
- 'protectheap:on': {
- 'CPPDEFINES': ['ENABLE_VMSTATE_TRACKING', 'ENABLE_HEAP_PROTECTION'],
- },
- 'profilingsupport:on': {
- 'CPPDEFINES': ['ENABLE_VMSTATE_TRACKING', 'ENABLE_LOGGING_AND_PROFILING'],
- },
'debuggersupport:on': {
'CPPDEFINES': ['ENABLE_DEBUGGER_SUPPORT'],
},
'inspector:on': {
'CPPDEFINES': ['INSPECTOR'],
},
- 'fasttls:on': {
- 'CPPDEFINES': ['V8_FAST_TLS'],
+ 'fasttls:off': {
+ 'CPPDEFINES': ['V8_NO_FAST_TLS'],
},
'liveobjectlist:on': {
'CPPDEFINES': ['ENABLE_DEBUGGER_SUPPORT', 'INSPECTOR',
@@ -929,21 +920,11 @@ SIMPLE_OPTIONS = {
'default': 'static',
'help': 'the type of library to produce'
},
- 'vmstate': {
- 'values': ['on', 'off'],
- 'default': 'off',
- 'help': 'enable VM state tracking'
- },
'objectprint': {
'values': ['on', 'off'],
'default': 'off',
'help': 'enable object printing'
},
- 'protectheap': {
- 'values': ['on', 'off'],
- 'default': 'off',
- 'help': 'enable heap protection'
- },
'profilingsupport': {
'values': ['on', 'off'],
'default': 'on',
@@ -1425,7 +1406,7 @@ def BuildSpecific(env, mode, env_overrides, tools):
preparser_object = preparser_env.SConscript(
join('preparser', 'SConscript'),
build_dir=join('obj', 'preparser', target_id),
- exports='context',
+ exports='context tools',
duplicate=False
)
preparser_name = join('obj', 'preparser', target_id, 'preparser')
diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h
index fb10c71576..0872411067 100644
--- a/deps/v8/include/v8.h
+++ b/deps/v8/include/v8.h
@@ -2984,31 +2984,6 @@ class V8EXPORT V8 {
static bool IsProfilerPaused();
/**
- * If logging is performed into a memory buffer (via --logfile=*), allows to
- * retrieve previously written messages. This can be used for retrieving
- * profiler log data in the application. This function is thread-safe.
- *
- * Caller provides a destination buffer that must exist during GetLogLines
- * call. Only whole log lines are copied into the buffer.
- *
- * \param from_pos specified a point in a buffer to read from, 0 is the
- * beginning of a buffer. It is assumed that caller updates its current
- * position using returned size value from the previous call.
- * \param dest_buf destination buffer for log data.
- * \param max_size size of the destination buffer.
- * \returns actual size of log data copied into buffer.
- */
- static int GetLogLines(int from_pos, char* dest_buf, int max_size);
-
- /**
- * The minimum allowed size for a log lines buffer. If the size of
- * the buffer given will not be enough to hold a line of the maximum
- * length, an attempt to find a log line end in GetLogLines will
- * fail, and an empty result will be returned.
- */
- static const int kMinimumSizeForLogLinesBuffer = 2048;
-
- /**
* Retrieve the V8 thread id of the calling thread.
*
* The thread id for a thread should only be retrieved after the V8
diff --git a/deps/v8/preparser/SConscript b/deps/v8/preparser/SConscript
index 1d51e826cc..10b3953193 100644
--- a/deps/v8/preparser/SConscript
+++ b/deps/v8/preparser/SConscript
@@ -26,10 +26,10 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from os.path import join
-Import('context')
+Import('context tools')
def ConfigureObjectFiles():
- env = Environment()
+ env = Environment(tools=tools)
env.Replace(**context.flags['preparser'])
context.ApplyEnvOverrides(env)
return env.Object('preparser-process.cc')
diff --git a/deps/v8/samples/SConscript b/deps/v8/samples/SConscript
index 31990b681b..84c48c9047 100644
--- a/deps/v8/samples/SConscript
+++ b/deps/v8/samples/SConscript
@@ -26,10 +26,10 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from os.path import join
-Import('sample context')
+Import('sample context tools')
def ConfigureObjectFiles():
- env = Environment()
+ env = Environment(tools=tools)
env.Replace(**context.flags['sample'])
context.ApplyEnvOverrides(env)
return env.Object(sample + '.cc')
diff --git a/deps/v8/src/SConscript b/deps/v8/src/SConscript
index 4b0ba16341..6b3059aea6 100755
--- a/deps/v8/src/SConscript
+++ b/deps/v8/src/SConscript
@@ -231,15 +231,11 @@ SOURCES = {
PREPARSER_SOURCES = {
'all': Split("""
allocation.cc
- bignum.cc
- cached-powers.cc
- conversions.cc
hashmap.cc
preparse-data.cc
preparser.cc
preparser-api.cc
scanner-base.cc
- strtod.cc
token.cc
unicode.cc
utils.cc
@@ -317,10 +313,7 @@ def ConfigureObjectFiles():
env.Replace(**context.flags['v8'])
context.ApplyEnvOverrides(env)
env['BUILDERS']['JS2C'] = Builder(action=js2c.JS2C)
- if 'ENABLE_LOGGING_AND_PROFILING' in env['CPPDEFINES']:
- env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET --logfile "$LOGFILE" --log-snapshot-positions')
- else:
- env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET')
+ env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET --logfile "$LOGFILE" --log-snapshot-positions')
def BuildJS2CEnv(type):
js2c_env = { 'TYPE': type, 'COMPRESSION': 'off' }
diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc
index 71a715c1bb..dc1f90c0e2 100644
--- a/deps/v8/src/api.cc
+++ b/deps/v8/src/api.cc
@@ -54,16 +54,11 @@
#define LOG_API(isolate, expr) LOG(isolate, ApiEntryCall(expr))
-#ifdef ENABLE_VMSTATE_TRACKING
#define ENTER_V8(isolate) \
ASSERT((isolate)->IsInitialized()); \
i::VMState __state__((isolate), i::OTHER)
#define LEAVE_V8(isolate) \
i::VMState __state__((isolate), i::EXTERNAL)
-#else
-#define ENTER_V8(isolate) ((void) 0)
-#define LEAVE_V8(isolate) ((void) 0)
-#endif
namespace v8 {
@@ -114,9 +109,7 @@ namespace v8 {
static void DefaultFatalErrorHandler(const char* location,
const char* message) {
-#ifdef ENABLE_VMSTATE_TRACKING
i::VMState __state__(i::Isolate::Current(), i::OTHER);
-#endif
API_Fatal(location, message);
}
@@ -4832,37 +4825,20 @@ void V8::RemoveMemoryAllocationCallback(MemoryAllocationCallback callback) {
void V8::PauseProfiler() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
isolate->logger()->PauseProfiler();
-#endif
}
void V8::ResumeProfiler() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
isolate->logger()->ResumeProfiler();
-#endif
}
bool V8::IsProfilerPaused() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
return isolate->logger()->IsProfilerPaused();
-#else
- return true;
-#endif
-}
-
-
-int V8::GetLogLines(int from_pos, char* dest_buf, int max_size) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
- ASSERT(max_size >= kMinimumSizeForLogLinesBuffer);
- return LOGGER->GetLogLines(from_pos, dest_buf, max_size);
-#endif
- return 0;
}
@@ -5327,7 +5303,6 @@ Local<Context> Debug::GetDebugContext() {
Handle<String> CpuProfileNode::GetFunctionName() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetFunctionName");
const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
@@ -5340,117 +5315,77 @@ Handle<String> CpuProfileNode::GetFunctionName() const {
isolate->factory()->LookupAsciiSymbol(entry->name_prefix()),
isolate->factory()->LookupAsciiSymbol(entry->name()))));
}
-#else
- return v8::String::Empty();
-#endif
}
Handle<String> CpuProfileNode::GetScriptResourceName() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetScriptResourceName");
const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
return Handle<String>(ToApi<String>(isolate->factory()->LookupAsciiSymbol(
node->entry()->resource_name())));
-#else
- return v8::String::Empty();
-#endif
}
int CpuProfileNode::GetLineNumber() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetLineNumber");
return reinterpret_cast<const i::ProfileNode*>(this)->entry()->line_number();
-#else
- return 0;
-#endif
}
double CpuProfileNode::GetTotalTime() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetTotalTime");
return reinterpret_cast<const i::ProfileNode*>(this)->GetTotalMillis();
-#else
- return 0.0;
-#endif
}
double CpuProfileNode::GetSelfTime() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetSelfTime");
return reinterpret_cast<const i::ProfileNode*>(this)->GetSelfMillis();
-#else
- return 0.0;
-#endif
}
double CpuProfileNode::GetTotalSamplesCount() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetTotalSamplesCount");
return reinterpret_cast<const i::ProfileNode*>(this)->total_ticks();
-#else
- return 0.0;
-#endif
}
double CpuProfileNode::GetSelfSamplesCount() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetSelfSamplesCount");
return reinterpret_cast<const i::ProfileNode*>(this)->self_ticks();
-#else
- return 0.0;
-#endif
}
unsigned CpuProfileNode::GetCallUid() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetCallUid");
return reinterpret_cast<const i::ProfileNode*>(this)->entry()->GetCallUid();
-#else
- return 0;
-#endif
}
int CpuProfileNode::GetChildrenCount() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetChildrenCount");
return reinterpret_cast<const i::ProfileNode*>(this)->children()->length();
-#else
- return 0;
-#endif
}
const CpuProfileNode* CpuProfileNode::GetChild(int index) const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfileNode::GetChild");
const i::ProfileNode* child =
reinterpret_cast<const i::ProfileNode*>(this)->children()->at(index);
return reinterpret_cast<const CpuProfileNode*>(child);
-#else
- return NULL;
-#endif
}
void CpuProfile::Delete() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfile::Delete");
i::CpuProfiler::DeleteProfile(reinterpret_cast<i::CpuProfile*>(this));
@@ -5459,153 +5394,109 @@ void CpuProfile::Delete() {
// If this was the last profile, clean up all accessory data as well.
i::CpuProfiler::DeleteAllProfiles();
}
-#endif
}
unsigned CpuProfile::GetUid() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfile::GetUid");
return reinterpret_cast<const i::CpuProfile*>(this)->uid();
-#else
- return 0;
-#endif
}
Handle<String> CpuProfile::GetTitle() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfile::GetTitle");
const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
return Handle<String>(ToApi<String>(isolate->factory()->LookupAsciiSymbol(
profile->title())));
-#else
- return v8::String::Empty();
-#endif
}
const CpuProfileNode* CpuProfile::GetBottomUpRoot() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfile::GetBottomUpRoot");
const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
return reinterpret_cast<const CpuProfileNode*>(profile->bottom_up()->root());
-#else
- return NULL;
-#endif
}
const CpuProfileNode* CpuProfile::GetTopDownRoot() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfile::GetTopDownRoot");
const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
return reinterpret_cast<const CpuProfileNode*>(profile->top_down()->root());
-#else
- return NULL;
-#endif
}
int CpuProfiler::GetProfilesCount() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfiler::GetProfilesCount");
return i::CpuProfiler::GetProfilesCount();
-#else
- return 0;
-#endif
}
const CpuProfile* CpuProfiler::GetProfile(int index,
Handle<Value> security_token) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfiler::GetProfile");
return reinterpret_cast<const CpuProfile*>(
i::CpuProfiler::GetProfile(
security_token.IsEmpty() ? NULL : *Utils::OpenHandle(*security_token),
index));
-#else
- return NULL;
-#endif
}
const CpuProfile* CpuProfiler::FindProfile(unsigned uid,
Handle<Value> security_token) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfiler::FindProfile");
return reinterpret_cast<const CpuProfile*>(
i::CpuProfiler::FindProfile(
security_token.IsEmpty() ? NULL : *Utils::OpenHandle(*security_token),
uid));
-#else
- return NULL;
-#endif
}
void CpuProfiler::StartProfiling(Handle<String> title) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfiler::StartProfiling");
i::CpuProfiler::StartProfiling(*Utils::OpenHandle(*title));
-#endif
}
const CpuProfile* CpuProfiler::StopProfiling(Handle<String> title,
Handle<Value> security_token) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfiler::StopProfiling");
return reinterpret_cast<const CpuProfile*>(
i::CpuProfiler::StopProfiling(
security_token.IsEmpty() ? NULL : *Utils::OpenHandle(*security_token),
*Utils::OpenHandle(*title)));
-#else
- return NULL;
-#endif
}
void CpuProfiler::DeleteAllProfiles() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::CpuProfiler::DeleteAllProfiles");
i::CpuProfiler::DeleteAllProfiles();
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
static i::HeapGraphEdge* ToInternal(const HeapGraphEdge* edge) {
return const_cast<i::HeapGraphEdge*>(
reinterpret_cast<const i::HeapGraphEdge*>(edge));
}
-#endif
HeapGraphEdge::Type HeapGraphEdge::GetType() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphEdge::GetType");
return static_cast<HeapGraphEdge::Type>(ToInternal(this)->type());
-#else
- return static_cast<HeapGraphEdge::Type>(0);
-#endif
}
Handle<Value> HeapGraphEdge::GetName() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphEdge::GetName");
i::HeapGraphEdge* edge = ToInternal(this);
@@ -5622,166 +5513,112 @@ Handle<Value> HeapGraphEdge::GetName() const {
edge->index())));
default: UNREACHABLE();
}
-#endif
return v8::Undefined();
}
const HeapGraphNode* HeapGraphEdge::GetFromNode() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphEdge::GetFromNode");
const i::HeapEntry* from = ToInternal(this)->From();
return reinterpret_cast<const HeapGraphNode*>(from);
-#else
- return NULL;
-#endif
}
const HeapGraphNode* HeapGraphEdge::GetToNode() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphEdge::GetToNode");
const i::HeapEntry* to = ToInternal(this)->to();
return reinterpret_cast<const HeapGraphNode*>(to);
-#else
- return NULL;
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
static i::HeapEntry* ToInternal(const HeapGraphNode* entry) {
return const_cast<i::HeapEntry*>(
reinterpret_cast<const i::HeapEntry*>(entry));
}
-#endif
HeapGraphNode::Type HeapGraphNode::GetType() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetType");
return static_cast<HeapGraphNode::Type>(ToInternal(this)->type());
-#else
- return static_cast<HeapGraphNode::Type>(0);
-#endif
}
Handle<String> HeapGraphNode::GetName() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetName");
return Handle<String>(ToApi<String>(isolate->factory()->LookupAsciiSymbol(
ToInternal(this)->name())));
-#else
- return v8::String::Empty();
-#endif
}
uint64_t HeapGraphNode::GetId() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetId");
return ToInternal(this)->id();
-#else
- return 0;
-#endif
}
int HeapGraphNode::GetSelfSize() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetSelfSize");
return ToInternal(this)->self_size();
-#else
- return 0;
-#endif
}
int HeapGraphNode::GetRetainedSize(bool exact) const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainedSize");
return ToInternal(this)->RetainedSize(exact);
-#else
- return 0;
-#endif
}
int HeapGraphNode::GetChildrenCount() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetChildrenCount");
return ToInternal(this)->children().length();
-#else
- return 0;
-#endif
}
const HeapGraphEdge* HeapGraphNode::GetChild(int index) const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetChild");
return reinterpret_cast<const HeapGraphEdge*>(
&ToInternal(this)->children()[index]);
-#else
- return NULL;
-#endif
}
int HeapGraphNode::GetRetainersCount() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainersCount");
return ToInternal(this)->retainers().length();
-#else
- return 0;
-#endif
}
const HeapGraphEdge* HeapGraphNode::GetRetainer(int index) const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainer");
return reinterpret_cast<const HeapGraphEdge*>(
ToInternal(this)->retainers()[index]);
-#else
- return NULL;
-#endif
}
const HeapGraphNode* HeapGraphNode::GetDominatorNode() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetDominatorNode");
return reinterpret_cast<const HeapGraphNode*>(ToInternal(this)->dominator());
-#else
- return NULL;
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
static i::HeapSnapshot* ToInternal(const HeapSnapshot* snapshot) {
return const_cast<i::HeapSnapshot*>(
reinterpret_cast<const i::HeapSnapshot*>(snapshot));
}
-#endif
void HeapSnapshot::Delete() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::Delete");
if (i::HeapProfiler::GetSnapshotsCount() > 1) {
@@ -5790,93 +5627,63 @@ void HeapSnapshot::Delete() {
// If this is the last snapshot, clean up all accessory data as well.
i::HeapProfiler::DeleteAllSnapshots();
}
-#endif
}
HeapSnapshot::Type HeapSnapshot::GetType() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetType");
return static_cast<HeapSnapshot::Type>(ToInternal(this)->type());
-#else
- return static_cast<HeapSnapshot::Type>(0);
-#endif
}
unsigned HeapSnapshot::GetUid() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetUid");
return ToInternal(this)->uid();
-#else
- return 0;
-#endif
}
Handle<String> HeapSnapshot::GetTitle() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetTitle");
return Handle<String>(ToApi<String>(isolate->factory()->LookupAsciiSymbol(
ToInternal(this)->title())));
-#else
- return v8::String::Empty();
-#endif
}
const HeapGraphNode* HeapSnapshot::GetRoot() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetHead");
return reinterpret_cast<const HeapGraphNode*>(ToInternal(this)->root());
-#else
- return 0;
-#endif
}
const HeapGraphNode* HeapSnapshot::GetNodeById(uint64_t id) const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodeById");
return reinterpret_cast<const HeapGraphNode*>(
ToInternal(this)->GetEntryById(id));
-#else
- return NULL;
-#endif
}
int HeapSnapshot::GetNodesCount() const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodesCount");
return ToInternal(this)->entries()->length();
-#else
- return 0;
-#endif
}
const HeapGraphNode* HeapSnapshot::GetNode(int index) const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetNode");
return reinterpret_cast<const HeapGraphNode*>(
ToInternal(this)->entries()->at(index));
-#else
- return 0;
-#endif
}
void HeapSnapshot::Serialize(OutputStream* stream,
HeapSnapshot::SerializationFormat format) const {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::Serialize");
ApiCheck(format == kJSON,
@@ -5890,49 +5697,35 @@ void HeapSnapshot::Serialize(OutputStream* stream,
"Invalid stream chunk size");
i::HeapSnapshotJSONSerializer serializer(ToInternal(this));
serializer.Serialize(stream);
-#endif
}
int HeapProfiler::GetSnapshotsCount() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapProfiler::GetSnapshotsCount");
return i::HeapProfiler::GetSnapshotsCount();
-#else
- return 0;
-#endif
}
const HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapProfiler::GetSnapshot");
return reinterpret_cast<const HeapSnapshot*>(
i::HeapProfiler::GetSnapshot(index));
-#else
- return NULL;
-#endif
}
const HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapProfiler::FindSnapshot");
return reinterpret_cast<const HeapSnapshot*>(
i::HeapProfiler::FindSnapshot(uid));
-#else
- return NULL;
-#endif
}
const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
HeapSnapshot::Type type,
ActivityControl* control) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapProfiler::TakeSnapshot");
i::HeapSnapshot::Type internal_type = i::HeapSnapshot::kFull;
@@ -5946,27 +5739,20 @@ const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
return reinterpret_cast<const HeapSnapshot*>(
i::HeapProfiler::TakeSnapshot(
*Utils::OpenHandle(*title), internal_type, control));
-#else
- return NULL;
-#endif
}
void HeapProfiler::DeleteAllSnapshots() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapProfiler::DeleteAllSnapshots");
i::HeapProfiler::DeleteAllSnapshots();
-#endif
}
void HeapProfiler::DefineWrapperClass(uint16_t class_id,
WrapperInfoCallback callback) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate::Current()->heap_profiler()->DefineWrapperClass(class_id,
callback);
-#endif
}
diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc
index 94572c92e9..ab7c6f247a 100644
--- a/deps/v8/src/arm/code-stubs-arm.cc
+++ b/deps/v8/src/arm/code-stubs-arm.cc
@@ -304,12 +304,6 @@ class ConvertToDoubleStub : public CodeStub {
}
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "ConvertToDoubleStub"; }
-
-#ifdef DEBUG
- void Print() { PrintF("ConvertToDoubleStub\n"); }
-#endif
};
@@ -1689,25 +1683,17 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
}
-const char* UnaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void UnaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name = NULL; // Make g++ happy.
switch (mode_) {
case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "UnaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- UnaryOpIC::GetName(operand_type_));
- return name_;
+ stream->Add("UnaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ UnaryOpIC::GetName(operand_type_));
}
@@ -2043,12 +2029,7 @@ void BinaryOpStub::Generate(MacroAssembler* masm) {
}
-const char* BinaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void BinaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name;
switch (mode_) {
@@ -2057,13 +2038,10 @@ const char* BinaryOpStub::GetName() {
case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
default: overwrite_name = "UnknownOverwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "BinaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- BinaryOpIC::GetName(operands_type_));
- return name_;
+ stream->Add("BinaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ BinaryOpIC::GetName(operands_type_));
}
@@ -3568,7 +3546,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Setup frame pointer for the frame to be pushed.
__ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
-#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
Label non_outermost_js;
ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
@@ -3584,7 +3561,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
__ bind(&cont);
__ push(ip);
-#endif
// Call a faked try-block that does the invoke.
__ bl(&invoke);
@@ -3645,7 +3621,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ PopTryHandler();
__ bind(&exit); // r0 holds result
-#ifdef ENABLE_LOGGING_AND_PROFILING
// Check if the current stack frame is marked as the outermost JS frame.
Label non_outermost_js_2;
__ pop(r5);
@@ -3655,7 +3630,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ mov(r5, Operand(ExternalReference(js_entry_sp)));
__ str(r6, MemOperand(r5));
__ bind(&non_outermost_js_2);
-#endif
// Restore the top frame descriptors from the stack.
__ pop(r3);
@@ -4755,16 +4729,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
-const char* CompareStub::GetName() {
+void CompareStub::PrintName(StringStream* stream) {
ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
(lhs_.is(r1) && rhs_.is(r0)));
-
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
-
const char* cc_name;
switch (cc_) {
case lt: cc_name = "LT"; break;
@@ -4775,40 +4742,14 @@ const char* CompareStub::GetName() {
case ne: cc_name = "NE"; break;
default: cc_name = "UnknownCondition"; break;
}
-
- const char* lhs_name = lhs_.is(r0) ? "_r0" : "_r1";
- const char* rhs_name = rhs_.is(r0) ? "_r0" : "_r1";
-
- const char* strict_name = "";
- if (strict_ && (cc_ == eq || cc_ == ne)) {
- strict_name = "_STRICT";
- }
-
- const char* never_nan_nan_name = "";
- if (never_nan_nan_ && (cc_ == eq || cc_ == ne)) {
- never_nan_nan_name = "_NO_NAN";
- }
-
- const char* include_number_compare_name = "";
- if (!include_number_compare_) {
- include_number_compare_name = "_NO_NUMBER";
- }
-
- const char* include_smi_compare_name = "";
- if (!include_smi_compare_) {
- include_smi_compare_name = "_NO_SMI";
- }
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "CompareStub_%s%s%s%s%s%s",
- cc_name,
- lhs_name,
- rhs_name,
- strict_name,
- never_nan_nan_name,
- include_number_compare_name,
- include_smi_compare_name);
- return name_;
+ bool is_equality = cc_ == eq || cc_ == ne;
+ stream->Add("CompareStub_%s", cc_name);
+ stream->Add(lhs_.is(r0) ? "_r0" : "_r1");
+ stream->Add(rhs_.is(r0) ? "_r0" : "_r1");
+ if (strict_ && is_equality) stream->Add("_STRICT");
+ if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
+ if (!include_number_compare_) stream->Add("_NO_NUMBER");
+ if (!include_smi_compare_) stream->Add("_NO_SMI");
}
diff --git a/deps/v8/src/arm/code-stubs-arm.h b/deps/v8/src/arm/code-stubs-arm.h
index 7427351308..557f7e6d41 100644
--- a/deps/v8/src/arm/code-stubs-arm.h
+++ b/deps/v8/src/arm/code-stubs-arm.h
@@ -65,8 +65,7 @@ class UnaryOpStub: public CodeStub {
UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
: op_(op),
mode_(mode),
- operand_type_(operand_type),
- name_(NULL) {
+ operand_type_(operand_type) {
}
private:
@@ -76,19 +75,7 @@ class UnaryOpStub: public CodeStub {
// Operand type information determined at runtime.
UnaryOpIC::TypeInfo operand_type_;
- char* name_;
-
- const char* GetName();
-
-#ifdef DEBUG
- void Print() {
- PrintF("UnaryOpStub %d (op %s), (mode %d, runtime_type_info %s)\n",
- MinorKey(),
- Token::String(op_),
- static_cast<int>(mode_),
- UnaryOpIC::GetName(operand_type_));
- }
-#endif
+ virtual void PrintName(StringStream* stream);
class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
class OpBits: public BitField<Token::Value, 1, 7> {};
@@ -142,8 +129,7 @@ class BinaryOpStub: public CodeStub {
: op_(op),
mode_(mode),
operands_type_(BinaryOpIC::UNINITIALIZED),
- result_type_(BinaryOpIC::UNINITIALIZED),
- name_(NULL) {
+ result_type_(BinaryOpIC::UNINITIALIZED) {
use_vfp3_ = CpuFeatures::IsSupported(VFP3);
ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
}
@@ -156,8 +142,7 @@ class BinaryOpStub: public CodeStub {
mode_(ModeBits::decode(key)),
use_vfp3_(VFP3Bits::decode(key)),
operands_type_(operands_type),
- result_type_(result_type),
- name_(NULL) { }
+ result_type_(result_type) { }
private:
enum SmiCodeGenerateHeapNumberResults {
@@ -173,20 +158,7 @@ class BinaryOpStub: public CodeStub {
BinaryOpIC::TypeInfo operands_type_;
BinaryOpIC::TypeInfo result_type_;
- char* name_;
-
- const char* GetName();
-
-#ifdef DEBUG
- void Print() {
- PrintF("BinaryOpStub %d (op %s), "
- "(mode %d, runtime_type_info %s)\n",
- MinorKey(),
- Token::String(op_),
- static_cast<int>(mode_),
- BinaryOpIC::GetName(operands_type_));
- }
-#endif
+ virtual void PrintName(StringStream* stream);
// Minor key encoding in 16 bits RRRTTTVOOOOOOOMM.
class ModeBits: public BitField<OverwriteMode, 0, 2> {};
@@ -370,12 +342,6 @@ class WriteInt32ToHeapNumberStub : public CodeStub {
}
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "WriteInt32ToHeapNumberStub"; }
-
-#ifdef DEBUG
- void Print() { PrintF("WriteInt32ToHeapNumberStub\n"); }
-#endif
};
@@ -402,8 +368,6 @@ class NumberToStringStub: public CodeStub {
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "NumberToStringStub"; }
};
@@ -421,8 +385,6 @@ class RegExpCEntryStub: public CodeStub {
int MinorKey() { return 0; }
bool NeedsImmovableCode() { return true; }
-
- const char* GetName() { return "RegExpCEntryStub"; }
};
@@ -443,8 +405,6 @@ class DirectCEntryStub: public CodeStub {
int MinorKey() { return 0; }
bool NeedsImmovableCode() { return true; }
-
- const char* GetName() { return "DirectCEntryStub"; }
};
@@ -627,13 +587,6 @@ class StringDictionaryLookupStub: public CodeStub {
StringDictionary::kHeaderSize +
StringDictionary::kElementsStartIndex * kPointerSize;
-
-#ifdef DEBUG
- void Print() {
- PrintF("StringDictionaryLookupStub\n");
- }
-#endif
-
Major MajorKey() { return StringDictionaryNegativeLookup; }
int MinorKey() {
diff --git a/deps/v8/src/arm/codegen-arm.h b/deps/v8/src/arm/codegen-arm.h
index 01aa8052e1..d27982abac 100644
--- a/deps/v8/src/arm/codegen-arm.h
+++ b/deps/v8/src/arm/codegen-arm.h
@@ -58,9 +58,7 @@ class CodeGenerator: public AstVisitor {
// Print the code after compiling it.
static void PrintCode(Handle<Code> code, CompilationInfo* info);
-#ifdef ENABLE_LOGGING_AND_PROFILING
static bool ShouldGenerateLog(Expression* type);
-#endif
static void SetFunctionInfo(Handle<JSFunction> fun,
FunctionLiteral* lit,
diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc
index 4b55915e91..c3440eb3ea 100644
--- a/deps/v8/src/arm/full-codegen-arm.cc
+++ b/deps/v8/src/arm/full-codegen-arm.cc
@@ -776,7 +776,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
// IDs for bailouts from optimized code.
ASSERT(prop->obj()->AsVariableProxy() != NULL);
{ AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ EmitVariableLoad(prop->obj()->AsVariableProxy());
}
__ push(r0);
@@ -1113,7 +1113,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Comment cmnt(masm_, "[ VariableProxy");
- EmitVariableLoad(expr->var());
+ EmitVariableLoad(expr);
}
@@ -1262,7 +1262,11 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
}
-void FullCodeGenerator::EmitVariableLoad(Variable* var) {
+void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
+ // Record position before possible IC call.
+ SetSourcePosition(proxy->position());
+ Variable* var = proxy->var();
+
// Three cases: non-this global variables, lookup slots, and all other
// types of slots.
Slot* slot = var->AsSlot();
@@ -1593,7 +1597,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
{ AccumulatorValueContext context(this);
switch (assign_type) {
case VARIABLE:
- EmitVariableLoad(expr->target()->AsVariableProxy()->var());
+ EmitVariableLoad(expr->target()->AsVariableProxy());
PrepareForBailout(expr->target(), TOS_REG);
break;
case NAMED_PROPERTY:
@@ -2772,13 +2776,12 @@ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
// with '%2s' (see Logger::LogRuntime for all the formats).
// 2 (array): Arguments to the format string.
ASSERT_EQ(args->length(), 3);
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallRuntime(Runtime::kLog, 2);
}
-#endif
+
// Finally, we're expected to leave a value on the top of the stack.
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
context()->Plug(r0);
@@ -3816,7 +3819,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
if (assign_type == VARIABLE) {
ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
AccumulatorValueContext context(this);
- EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
+ EmitVariableLoad(expr->expression()->AsVariableProxy());
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
diff --git a/deps/v8/src/arm/ic-arm.cc b/deps/v8/src/arm/ic-arm.cc
index dea875bad4..6038153a1a 100644
--- a/deps/v8/src/arm/ic-arm.cc
+++ b/deps/v8/src/arm/ic-arm.cc
@@ -212,101 +212,6 @@ static void GenerateDictionaryStore(MacroAssembler* masm,
}
-static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
- Label* miss,
- Register elements,
- Register key,
- Register result,
- Register t0,
- Register t1,
- Register t2) {
- // Register use:
- //
- // elements - holds the slow-case elements of the receiver on entry.
- // Unchanged unless 'result' is the same register.
- //
- // key - holds the smi key on entry.
- // Unchanged unless 'result' is the same register.
- //
- // result - holds the result on exit if the load succeeded.
- // Allowed to be the same as 'key' or 'result'.
- // Unchanged on bailout so 'key' or 'result' can be used
- // in further computation.
- //
- // Scratch registers:
- //
- // t0 - holds the untagged key on entry and holds the hash once computed.
- //
- // t1 - used to hold the capacity mask of the dictionary
- //
- // t2 - used for the index into the dictionary.
- Label done;
-
- // Compute the hash code from the untagged key. This must be kept in sync
- // with ComputeIntegerHash in utils.h.
- //
- // hash = ~hash + (hash << 15);
- __ mvn(t1, Operand(t0));
- __ add(t0, t1, Operand(t0, LSL, 15));
- // hash = hash ^ (hash >> 12);
- __ eor(t0, t0, Operand(t0, LSR, 12));
- // hash = hash + (hash << 2);
- __ add(t0, t0, Operand(t0, LSL, 2));
- // hash = hash ^ (hash >> 4);
- __ eor(t0, t0, Operand(t0, LSR, 4));
- // hash = hash * 2057;
- __ mov(t1, Operand(2057));
- __ mul(t0, t0, t1);
- // hash = hash ^ (hash >> 16);
- __ eor(t0, t0, Operand(t0, LSR, 16));
-
- // Compute the capacity mask.
- __ ldr(t1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
- __ mov(t1, Operand(t1, ASR, kSmiTagSize)); // convert smi to int
- __ sub(t1, t1, Operand(1));
-
- // Generate an unrolled loop that performs a few probes before giving up.
- static const int kProbes = 4;
- for (int i = 0; i < kProbes; i++) {
- // Use t2 for index calculations and keep the hash intact in t0.
- __ mov(t2, t0);
- // Compute the masked index: (hash + i + i * i) & mask.
- if (i > 0) {
- __ add(t2, t2, Operand(NumberDictionary::GetProbeOffset(i)));
- }
- __ and_(t2, t2, Operand(t1));
-
- // Scale the index by multiplying by the element size.
- ASSERT(NumberDictionary::kEntrySize == 3);
- __ add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
-
- // Check if the key is identical to the name.
- __ add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
- __ ldr(ip, FieldMemOperand(t2, NumberDictionary::kElementsStartOffset));
- __ cmp(key, Operand(ip));
- if (i != kProbes - 1) {
- __ b(eq, &done);
- } else {
- __ b(ne, miss);
- }
- }
-
- __ bind(&done);
- // Check that the value is a normal property.
- // t2: elements + (index * kPointerSize)
- const int kDetailsOffset =
- NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
- __ ldr(t1, FieldMemOperand(t2, kDetailsOffset));
- __ tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::mask())));
- __ b(ne, miss);
-
- // Get the value at the masked, scaled index and return.
- const int kValueOffset =
- NumberDictionary::kElementsStartOffset + kPointerSize;
- __ ldr(result, FieldMemOperand(t2, kValueOffset));
-}
-
-
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r2 : name
@@ -738,7 +643,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
__ b(ne, &slow_load);
__ mov(r0, Operand(r2, ASR, kSmiTagSize));
// r0: untagged index
- GenerateNumberDictionaryLoad(masm, &slow_load, r4, r2, r1, r0, r3, r5);
+ __ LoadFromNumberDictionary(&slow_load, r4, r2, r1, r0, r3, r5);
__ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
__ jmp(&do_call);
@@ -1127,7 +1032,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ cmp(r3, ip);
__ b(ne, &slow);
__ mov(r2, Operand(r0, ASR, kSmiTagSize));
- GenerateNumberDictionaryLoad(masm, &slow, r4, r0, r0, r2, r3, r5);
+ __ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5);
__ Ret();
// Slow case, key and receiver still in r0 and r1.
diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc
index ee36314209..dc93aea346 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.cc
+++ b/deps/v8/src/arm/lithium-codegen-arm.cc
@@ -551,6 +551,13 @@ void LCodeGen::CallCodeGeneric(Handle<Code> code,
RecordPosition(pointers->position());
__ Call(code, mode);
RegisterLazyDeoptimization(instr, safepoint_mode);
+
+ // Signal that we don't inline smi code before these stubs in the
+ // optimizing code generator.
+ if (code->kind() == Code::BINARY_OP_IC ||
+ code->kind() == Code::COMPARE_IC) {
+ __ nop();
+ }
}
@@ -1506,6 +1513,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ __ nop(); // Signals no inlined code.
}
diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc
index 08a1cb9453..320879a627 100644
--- a/deps/v8/src/arm/macro-assembler-arm.cc
+++ b/deps/v8/src/arm/macro-assembler-arm.cc
@@ -1343,6 +1343,100 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
+void MacroAssembler::LoadFromNumberDictionary(Label* miss,
+ Register elements,
+ Register key,
+ Register result,
+ Register t0,
+ Register t1,
+ Register t2) {
+ // Register use:
+ //
+ // elements - holds the slow-case elements of the receiver on entry.
+ // Unchanged unless 'result' is the same register.
+ //
+ // key - holds the smi key on entry.
+ // Unchanged unless 'result' is the same register.
+ //
+ // result - holds the result on exit if the load succeeded.
+ // Allowed to be the same as 'key' or 'result'.
+ // Unchanged on bailout so 'key' or 'result' can be used
+ // in further computation.
+ //
+ // Scratch registers:
+ //
+ // t0 - holds the untagged key on entry and holds the hash once computed.
+ //
+ // t1 - used to hold the capacity mask of the dictionary
+ //
+ // t2 - used for the index into the dictionary.
+ Label done;
+
+ // Compute the hash code from the untagged key. This must be kept in sync
+ // with ComputeIntegerHash in utils.h.
+ //
+ // hash = ~hash + (hash << 15);
+ mvn(t1, Operand(t0));
+ add(t0, t1, Operand(t0, LSL, 15));
+ // hash = hash ^ (hash >> 12);
+ eor(t0, t0, Operand(t0, LSR, 12));
+ // hash = hash + (hash << 2);
+ add(t0, t0, Operand(t0, LSL, 2));
+ // hash = hash ^ (hash >> 4);
+ eor(t0, t0, Operand(t0, LSR, 4));
+ // hash = hash * 2057;
+ mov(t1, Operand(2057));
+ mul(t0, t0, t1);
+ // hash = hash ^ (hash >> 16);
+ eor(t0, t0, Operand(t0, LSR, 16));
+
+ // Compute the capacity mask.
+ ldr(t1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
+ mov(t1, Operand(t1, ASR, kSmiTagSize)); // convert smi to int
+ sub(t1, t1, Operand(1));
+
+ // Generate an unrolled loop that performs a few probes before giving up.
+ static const int kProbes = 4;
+ for (int i = 0; i < kProbes; i++) {
+ // Use t2 for index calculations and keep the hash intact in t0.
+ mov(t2, t0);
+ // Compute the masked index: (hash + i + i * i) & mask.
+ if (i > 0) {
+ add(t2, t2, Operand(NumberDictionary::GetProbeOffset(i)));
+ }
+ and_(t2, t2, Operand(t1));
+
+ // Scale the index by multiplying by the element size.
+ ASSERT(NumberDictionary::kEntrySize == 3);
+ add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
+
+ // Check if the key is identical to the name.
+ add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
+ ldr(ip, FieldMemOperand(t2, NumberDictionary::kElementsStartOffset));
+ cmp(key, Operand(ip));
+ if (i != kProbes - 1) {
+ b(eq, &done);
+ } else {
+ b(ne, miss);
+ }
+ }
+
+ bind(&done);
+ // Check that the value is a normal property.
+ // t2: elements + (index * kPointerSize)
+ const int kDetailsOffset =
+ NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
+ ldr(t1, FieldMemOperand(t2, kDetailsOffset));
+ tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::mask())));
+ b(ne, miss);
+
+ // Get the value at the masked, scaled index and return.
+ const int kValueOffset =
+ NumberDictionary::kElementsStartOffset + kPointerSize;
+ ldr(result, FieldMemOperand(t2, kValueOffset));
+}
+
+
void MacroAssembler::AllocateInNewSpace(int object_size,
Register result,
Register scratch1,
diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h
index 1918858ebe..07281a7caf 100644
--- a/deps/v8/src/arm/macro-assembler-arm.h
+++ b/deps/v8/src/arm/macro-assembler-arm.h
@@ -433,6 +433,16 @@ class MacroAssembler: public Assembler {
Register scratch,
Label* miss);
+
+ void LoadFromNumberDictionary(Label* miss,
+ Register elements,
+ Register key,
+ Register result,
+ Register t0,
+ Register t1,
+ Register t2);
+
+
inline void MarkCode(NopMarkerTypes type) {
nop(type);
}
diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.h b/deps/v8/src/arm/regexp-macro-assembler-arm.h
index d771e4033f..0e653868b6 100644
--- a/deps/v8/src/arm/regexp-macro-assembler-arm.h
+++ b/deps/v8/src/arm/regexp-macro-assembler-arm.h
@@ -28,6 +28,9 @@
#ifndef V8_ARM_REGEXP_MACRO_ASSEMBLER_ARM_H_
#define V8_ARM_REGEXP_MACRO_ASSEMBLER_ARM_H_
+#include "arm/assembler-arm.h"
+#include "arm/assembler-arm-inl.h"
+
namespace v8 {
namespace internal {
diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc
index caa6a0eef9..86e49716d3 100644
--- a/deps/v8/src/arm/stub-cache-arm.cc
+++ b/deps/v8/src/arm/stub-cache-arm.cc
@@ -3100,7 +3100,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
// -- r1 : receiver
// -----------------------------------
Code* stub;
- MaybeObject* maybe_stub = ComputeSharedKeyedLoadElementStub(receiver_map);
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
+ MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(r1,
r2,
@@ -3193,7 +3194,10 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
// -- r3 : scratch
// -----------------------------------
Code* stub;
- MaybeObject* maybe_stub = ComputeSharedKeyedStoreElementStub(receiver_map);
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
+ bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
+ MaybeObject* maybe_stub =
+ KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(r2,
r3,
@@ -3388,6 +3392,53 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
#define __ ACCESS_MASM(masm)
+void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
+ MacroAssembler* masm) {
+ // ---------- S t a t e --------------
+ // -- lr : return address
+ // -- r0 : key
+ // -- r1 : receiver
+ // -----------------------------------
+ Label slow, miss_force_generic;
+
+ Register key = r0;
+ Register receiver = r1;
+
+ __ JumpIfNotSmi(key, &miss_force_generic);
+ __ mov(r2, Operand(key, ASR, kSmiTagSize));
+ __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
+ __ Ret();
+
+ __ bind(&slow);
+ __ IncrementCounter(
+ masm->isolate()->counters()->keyed_load_external_array_slow(),
+ 1, r2, r3);
+
+ // ---------- S t a t e --------------
+ // -- lr : return address
+ // -- r0 : key
+ // -- r1 : receiver
+ // -----------------------------------
+ Handle<Code> slow_ic =
+ masm->isolate()->builtins()->KeyedLoadIC_Slow();
+ __ Jump(slow_ic, RelocInfo::CODE_TARGET);
+
+ // Miss case, call the runtime.
+ __ bind(&miss_force_generic);
+
+ // ---------- S t a t e --------------
+ // -- lr : return address
+ // -- r0 : key
+ // -- r1 : receiver
+ // -----------------------------------
+
+ Handle<Code> miss_ic =
+ masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
+ __ Jump(miss_ic, RelocInfo::CODE_TARGET);
+}
+
+
static bool IsElementTypeSigned(JSObject::ElementsKind elements_kind) {
switch (elements_kind) {
case JSObject::EXTERNAL_BYTE_ELEMENTS:
diff --git a/deps/v8/src/code-stubs.cc b/deps/v8/src/code-stubs.cc
index db57280f41..5c0ef5a4a5 100644
--- a/deps/v8/src/code-stubs.cc
+++ b/deps/v8/src/code-stubs.cc
@@ -61,21 +61,29 @@ void CodeStub::GenerateCode(MacroAssembler* masm) {
}
+SmartPointer<const char> CodeStub::GetName() {
+ char buffer[100];
+ NoAllocationStringAllocator allocator(buffer,
+ static_cast<unsigned>(sizeof(buffer)));
+ StringStream stream(&allocator);
+ PrintName(&stream);
+ return stream.ToCString();
+}
+
+
void CodeStub::RecordCodeGeneration(Code* code, MacroAssembler* masm) {
code->set_major_key(MajorKey());
Isolate* isolate = masm->isolate();
- PROFILE(isolate, CodeCreateEvent(Logger::STUB_TAG, code, GetName()));
- GDBJIT(AddCode(GDBJITInterface::STUB, GetName(), code));
+ SmartPointer<const char> name = GetName();
+ PROFILE(isolate, CodeCreateEvent(Logger::STUB_TAG, code, *name));
+ GDBJIT(AddCode(GDBJITInterface::STUB, *name, code));
Counters* counters = isolate->counters();
counters->total_stubs_code_size()->Increment(code->instruction_size());
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_code_stubs) {
-#ifdef DEBUG
- Print();
-#endif
- code->Disassemble(GetName());
+ code->Disassemble(*name);
PrintF("\n");
}
#endif
@@ -170,7 +178,7 @@ MaybeObject* CodeStub::TryGetCode() {
const char* CodeStub::MajorName(CodeStub::Major major_key,
bool allow_unknown_keys) {
switch (major_key) {
-#define DEF_CASE(name) case name: return #name;
+#define DEF_CASE(name) case name: return #name "Stub";
CODE_STUB_LIST(DEF_CASE)
#undef DEF_CASE
default:
@@ -213,13 +221,7 @@ void ICCompareStub::Generate(MacroAssembler* masm) {
}
-const char* InstanceofStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
-
+void InstanceofStub::PrintName(StringStream* stream) {
const char* args = "";
if (HasArgsInRegisters()) {
args = "_REGS";
@@ -235,33 +237,95 @@ const char* InstanceofStub::GetName() {
return_true_false_object = "_TRUEFALSE";
}
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "InstanceofStub%s%s%s",
- args,
- inline_check,
- return_true_false_object);
- return name_;
+ stream->Add("InstanceofStub%s%s%s",
+ args,
+ inline_check,
+ return_true_false_object);
}
-void KeyedLoadFastElementStub::Generate(MacroAssembler* masm) {
- KeyedLoadStubCompiler::GenerateLoadFastElement(masm);
+void KeyedLoadElementStub::Generate(MacroAssembler* masm) {
+ switch (elements_kind_) {
+ case JSObject::FAST_ELEMENTS:
+ KeyedLoadStubCompiler::GenerateLoadFastElement(masm);
+ break;
+ case JSObject::FAST_DOUBLE_ELEMENTS:
+ UNIMPLEMENTED();
+ break;
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+ case JSObject::EXTERNAL_INT_ELEMENTS:
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
+ KeyedLoadStubCompiler::GenerateLoadExternalArray(masm, elements_kind_);
+ break;
+ case JSObject::DICTIONARY_ELEMENTS:
+ KeyedLoadStubCompiler::GenerateLoadDictionaryElement(masm);
+ break;
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
+ UNREACHABLE();
+ break;
+ }
}
-void KeyedStoreFastElementStub::Generate(MacroAssembler* masm) {
- KeyedStoreStubCompiler::GenerateStoreFastElement(masm, is_js_array_);
+void KeyedStoreElementStub::Generate(MacroAssembler* masm) {
+ switch (elements_kind_) {
+ case JSObject::FAST_ELEMENTS:
+ KeyedStoreStubCompiler::GenerateStoreFastElement(masm, is_js_array_);
+ break;
+ case JSObject::FAST_DOUBLE_ELEMENTS:
+ UNIMPLEMENTED();
+ break;
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+ case JSObject::EXTERNAL_INT_ELEMENTS:
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
+ KeyedStoreStubCompiler::GenerateStoreExternalArray(masm, elements_kind_);
+ break;
+ case JSObject::DICTIONARY_ELEMENTS:
+ KeyedStoreStubCompiler::GenerateStoreDictionaryElement(masm);
+ break;
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
+ UNREACHABLE();
+ break;
+ }
}
-void KeyedLoadExternalArrayStub::Generate(MacroAssembler* masm) {
- KeyedLoadStubCompiler::GenerateLoadExternalArray(masm, elements_kind_);
+void ArgumentsAccessStub::PrintName(StringStream* stream) {
+ const char* type_name = NULL; // Make g++ happy.
+ switch (type_) {
+ case READ_ELEMENT: type_name = "ReadElement"; break;
+ case NEW_NON_STRICT_FAST: type_name = "NewNonStrictFast"; break;
+ case NEW_NON_STRICT_SLOW: type_name = "NewNonStrictSlow"; break;
+ case NEW_STRICT: type_name = "NewStrict"; break;
+ }
+ stream->Add("ArgumentsAccessStub_%s", type_name);
}
-void KeyedStoreExternalArrayStub::Generate(MacroAssembler* masm) {
- KeyedStoreStubCompiler::GenerateStoreExternalArray(masm, elements_kind_);
+void CallFunctionStub::PrintName(StringStream* stream) {
+ const char* in_loop_name = NULL; // Make g++ happy.
+ switch (in_loop_) {
+ case NOT_IN_LOOP: in_loop_name = ""; break;
+ case IN_LOOP: in_loop_name = "_InLoop"; break;
+ }
+ const char* flags_name = NULL; // Make g++ happy.
+ switch (flags_) {
+ case NO_CALL_FUNCTION_FLAGS: flags_name = ""; break;
+ case RECEIVER_MIGHT_BE_IMPLICIT: flags_name = "_Implicit"; break;
+ }
+ stream->Add("CallFunctionStub_Args%d%s%s", argc_, in_loop_name, flags_name);
}
-
} } // namespace v8::internal
diff --git a/deps/v8/src/code-stubs.h b/deps/v8/src/code-stubs.h
index 3a756585e5..17c245c80e 100644
--- a/deps/v8/src/code-stubs.h
+++ b/deps/v8/src/code-stubs.h
@@ -70,10 +70,8 @@ namespace internal {
V(NumberToString) \
V(CEntry) \
V(JSEntry) \
- V(KeyedLoadFastElement) \
- V(KeyedStoreFastElement) \
- V(KeyedLoadExternalArray) \
- V(KeyedStoreExternalArray) \
+ V(KeyedLoadElement) \
+ V(KeyedStoreElement) \
V(DebuggerStatement) \
V(StringDictionaryNegativeLookup)
@@ -183,16 +181,15 @@ class CodeStub BASE_EMBEDDED {
}
// Returns a name for logging/debugging purposes.
- virtual const char* GetName() { return MajorName(MajorKey(), false); }
+ SmartPointer<const char> GetName();
+ virtual void PrintName(StringStream* stream) {
+ stream->Add("%s", MajorName(MajorKey(), false));
+ }
// Returns whether the code generated for this stub needs to be allocated as
// a fixed (non-moveable) code object.
virtual bool NeedsImmovableCode() { return false; }
- #ifdef DEBUG
- virtual void Print() { PrintF("%s\n", GetName()); }
-#endif
-
// Computes the key based on major and minor.
uint32_t GetKey() {
ASSERT(static_cast<int>(MajorKey()) < NUMBER_OF_IDS);
@@ -274,8 +271,6 @@ class StackCheckStub : public CodeStub {
void Generate(MacroAssembler* masm);
private:
- const char* GetName() { return "StackCheckStub"; }
-
Major MajorKey() { return StackCheck; }
int MinorKey() { return 0; }
};
@@ -290,7 +285,6 @@ class ToNumberStub: public CodeStub {
private:
Major MajorKey() { return ToNumber; }
int MinorKey() { return 0; }
- const char* GetName() { return "ToNumberStub"; }
};
@@ -302,7 +296,6 @@ class FastNewClosureStub : public CodeStub {
void Generate(MacroAssembler* masm);
private:
- const char* GetName() { return "FastNewClosureStub"; }
Major MajorKey() { return FastNewClosure; }
int MinorKey() { return strict_mode_; }
@@ -323,7 +316,6 @@ class FastNewContextStub : public CodeStub {
private:
int slots_;
- const char* GetName() { return "FastNewContextStub"; }
Major MajorKey() { return FastNewContext; }
int MinorKey() { return slots_; }
};
@@ -352,7 +344,6 @@ class FastCloneShallowArrayStub : public CodeStub {
Mode mode_;
int length_;
- const char* GetName() { return "FastCloneShallowArrayStub"; }
Major MajorKey() { return FastCloneShallowArray; }
int MinorKey() {
ASSERT(mode_ == 0 || mode_ == 1);
@@ -370,7 +361,7 @@ class InstanceofStub: public CodeStub {
kReturnTrueFalseObject = 1 << 2
};
- explicit InstanceofStub(Flags flags) : flags_(flags), name_(NULL) { }
+ explicit InstanceofStub(Flags flags) : flags_(flags) { }
static Register left();
static Register right();
@@ -393,10 +384,9 @@ class InstanceofStub: public CodeStub {
return (flags_ & kReturnTrueFalseObject) != 0;
}
- const char* GetName();
+ virtual void PrintName(StringStream* stream);
Flags flags_;
- char* name_;
};
@@ -408,8 +398,6 @@ class MathPowStub: public CodeStub {
private:
virtual CodeStub::Major MajorKey() { return MathPow; }
virtual int MinorKey() { return 0; }
-
- const char* GetName() { return "MathPowStub"; }
};
@@ -476,8 +464,7 @@ class CompareStub: public CodeStub {
include_number_compare_((flags & NO_NUMBER_COMPARE_IN_STUB) == 0),
include_smi_compare_((flags & NO_SMI_COMPARE_IN_STUB) == 0),
lhs_(lhs),
- rhs_(rhs),
- name_(NULL) { }
+ rhs_(rhs) { }
CompareStub(Condition cc,
bool strict,
@@ -488,8 +475,7 @@ class CompareStub: public CodeStub {
include_number_compare_((flags & NO_NUMBER_COMPARE_IN_STUB) == 0),
include_smi_compare_((flags & NO_SMI_COMPARE_IN_STUB) == 0),
lhs_(no_reg),
- rhs_(no_reg),
- name_(NULL) { }
+ rhs_(no_reg) { }
void Generate(MacroAssembler* masm);
@@ -543,26 +529,7 @@ class CompareStub: public CodeStub {
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
- char* name_;
- const char* GetName();
-#ifdef DEBUG
- void Print() {
- PrintF("CompareStub (minor %d) (cc %d), (strict %s), "
- "(never_nan_nan %s), (smi_compare %s) (number_compare %s) ",
- MinorKey(),
- static_cast<int>(cc_),
- strict_ ? "true" : "false",
- never_nan_nan_ ? "true" : "false",
- include_smi_compare_ ? "inluded" : "not included",
- include_number_compare_ ? "included" : "not included");
-
- if (!lhs_.is(no_reg) && !rhs_.is(no_reg)) {
- PrintF("(lhs r%d), (rhs r%d)\n", lhs_.code(), rhs_.code());
- } else {
- PrintF("\n");
- }
- }
-#endif
+ virtual void PrintName(StringStream* stream);
};
@@ -593,8 +560,6 @@ class CEntryStub : public CodeStub {
int MinorKey();
bool NeedsImmovableCode();
-
- const char* GetName() { return "CEntryStub"; }
};
@@ -610,8 +575,6 @@ class JSEntryStub : public CodeStub {
private:
Major MajorKey() { return JSEntry; }
int MinorKey() { return 0; }
-
- const char* GetName() { return "JSEntryStub"; }
};
@@ -624,7 +587,9 @@ class JSConstructEntryStub : public JSEntryStub {
private:
int MinorKey() { return 1; }
- const char* GetName() { return "JSConstructEntryStub"; }
+ virtual void PrintName(StringStream* stream) {
+ stream->Add("JSConstructEntryStub");
+ }
};
@@ -651,13 +616,7 @@ class ArgumentsAccessStub: public CodeStub {
void GenerateNewNonStrictFast(MacroAssembler* masm);
void GenerateNewNonStrictSlow(MacroAssembler* masm);
- const char* GetName() { return "ArgumentsAccessStub"; }
-
-#ifdef DEBUG
- void Print() {
- PrintF("ArgumentsAccessStub (type %d)\n", type_);
- }
-#endif
+ virtual void PrintName(StringStream* stream);
};
@@ -670,14 +629,6 @@ class RegExpExecStub: public CodeStub {
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "RegExpExecStub"; }
-
-#ifdef DEBUG
- void Print() {
- PrintF("RegExpExecStub\n");
- }
-#endif
};
@@ -690,14 +641,6 @@ class RegExpConstructResultStub: public CodeStub {
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "RegExpConstructResultStub"; }
-
-#ifdef DEBUG
- void Print() {
- PrintF("RegExpConstructResultStub\n");
- }
-#endif
};
@@ -717,14 +660,7 @@ class CallFunctionStub: public CodeStub {
InLoopFlag in_loop_;
CallFunctionFlags flags_;
-#ifdef DEBUG
- void Print() {
- PrintF("CallFunctionStub (args %d, in_loop %d, flags %d)\n",
- argc_,
- static_cast<int>(in_loop_),
- static_cast<int>(flags_));
- }
-#endif
+ virtual void PrintName(StringStream* stream);
// Minor key encoding in 32 bits with Bitfield <Type, shift, size>.
class InLoopBits: public BitField<InLoopFlag, 0, 1> {};
@@ -921,83 +857,44 @@ class AllowStubCallsScope {
DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope);
};
-#ifdef DEBUG
-#define DECLARE_ARRAY_STUB_PRINT(name) void Print() { PrintF(#name); }
-#else
-#define DECLARE_ARRAY_STUB_PRINT(name)
-#endif
-
-class KeyedLoadFastElementStub : public CodeStub {
+class KeyedLoadElementStub : public CodeStub {
public:
- explicit KeyedLoadFastElementStub() {
- }
+ explicit KeyedLoadElementStub(JSObject::ElementsKind elements_kind)
+ : elements_kind_(elements_kind)
+ { }
- Major MajorKey() { return KeyedLoadFastElement; }
- int MinorKey() { return 0; }
+ Major MajorKey() { return KeyedLoadElement; }
+ int MinorKey() { return elements_kind_; }
void Generate(MacroAssembler* masm);
- const char* GetName() { return "KeyedLoadFastElementStub"; }
+ private:
+ JSObject::ElementsKind elements_kind_;
- DECLARE_ARRAY_STUB_PRINT(KeyedLoadFastElementStub)
+ DISALLOW_COPY_AND_ASSIGN(KeyedLoadElementStub);
};
-class KeyedStoreFastElementStub : public CodeStub {
+class KeyedStoreElementStub : public CodeStub {
public:
- explicit KeyedStoreFastElementStub(bool is_js_array)
- : is_js_array_(is_js_array) { }
+ KeyedStoreElementStub(bool is_js_array,
+ JSObject::ElementsKind elements_kind)
+ : is_js_array_(is_js_array),
+ elements_kind_(elements_kind) { }
- Major MajorKey() { return KeyedStoreFastElement; }
- int MinorKey() { return is_js_array_ ? 1 : 0; }
+ Major MajorKey() { return KeyedStoreElement; }
+ int MinorKey() {
+ return (is_js_array_ ? 0 : JSObject::kElementsKindCount) + elements_kind_;
+ }
void Generate(MacroAssembler* masm);
- const char* GetName() { return "KeyedStoreFastElementStub"; }
-
- DECLARE_ARRAY_STUB_PRINT(KeyedStoreFastElementStub)
-
private:
bool is_js_array_;
-};
-
-
-class KeyedLoadExternalArrayStub : public CodeStub {
- public:
- explicit KeyedLoadExternalArrayStub(JSObject::ElementsKind elements_kind)
- : elements_kind_(elements_kind) { }
-
- Major MajorKey() { return KeyedLoadExternalArray; }
- int MinorKey() { return elements_kind_; }
-
- void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "KeyedLoadExternalArrayStub"; }
-
- DECLARE_ARRAY_STUB_PRINT(KeyedLoadExternalArrayStub)
-
- protected:
JSObject::ElementsKind elements_kind_;
-};
-
-
-class KeyedStoreExternalArrayStub : public CodeStub {
- public:
- explicit KeyedStoreExternalArrayStub(JSObject::ElementsKind elements_kind)
- : elements_kind_(elements_kind) { }
-
- Major MajorKey() { return KeyedStoreExternalArray; }
- int MinorKey() { return elements_kind_; }
-
- void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "KeyedStoreExternalArrayStub"; }
- DECLARE_ARRAY_STUB_PRINT(KeyedStoreExternalArrayStub)
-
- protected:
- JSObject::ElementsKind elements_kind_;
+ DISALLOW_COPY_AND_ASSIGN(KeyedStoreElementStub);
};
diff --git a/deps/v8/src/codegen.cc b/deps/v8/src/codegen.cc
index 4e5c781361..fb723a3bcc 100644
--- a/deps/v8/src/codegen.cc
+++ b/deps/v8/src/codegen.cc
@@ -169,8 +169,6 @@ void CodeGenerator::PrintCode(Handle<Code> code, CompilationInfo* info) {
#endif // ENABLE_DISASSEMBLER
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
static Vector<const char> kRegexp = CStrVector("regexp");
bool CodeGenerator::ShouldGenerateLog(Expression* type) {
@@ -187,8 +185,6 @@ bool CodeGenerator::ShouldGenerateLog(Expression* type) {
return false;
}
-#endif
-
bool CodeGenerator::RecordPositions(MacroAssembler* masm,
int pos,
diff --git a/deps/v8/src/conversions-inl.h b/deps/v8/src/conversions-inl.h
index bb24a9c2b7..f1f526ffc0 100644
--- a/deps/v8/src/conversions-inl.h
+++ b/deps/v8/src/conversions-inl.h
@@ -43,6 +43,11 @@
namespace v8 {
namespace internal {
+static inline double JunkStringValue() {
+ return std::numeric_limits<double>::quiet_NaN();
+}
+
+
// The fast double-to-unsigned-int conversion routine does not guarantee
// rounding towards zero, or any reasonable value if the argument is larger
// than what fits in an unsigned 32-bit integer.
@@ -151,7 +156,7 @@ static double InternalStringToIntDouble(UnicodeCache* unicode_cache,
!AdvanceToNonspace(unicode_cache, &current, end)) {
break;
} else {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
}
@@ -181,7 +186,7 @@ static double InternalStringToIntDouble(UnicodeCache* unicode_cache,
if (!allow_trailing_junk &&
AdvanceToNonspace(unicode_cache, &current, end)) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
int middle_value = (1 << (overflow_bits_count - 1));
@@ -229,7 +234,7 @@ static double InternalStringToInt(UnicodeCache* unicode_cache,
EndMark end,
int radix) {
const bool allow_trailing_junk = true;
- const double empty_string_val = JUNK_STRING_VALUE;
+ const double empty_string_val = JunkStringValue();
if (!AdvanceToNonspace(unicode_cache, &current, end)) {
return empty_string_val;
@@ -242,12 +247,12 @@ static double InternalStringToInt(UnicodeCache* unicode_cache,
// Ignore leading sign; skip following spaces.
++current;
if (current == end) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
} else if (*current == '-') {
++current;
if (current == end) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
negative = true;
}
@@ -260,7 +265,7 @@ static double InternalStringToInt(UnicodeCache* unicode_cache,
if (*current == 'x' || *current == 'X') {
radix = 16;
++current;
- if (current == end) return JUNK_STRING_VALUE;
+ if (current == end) return JunkStringValue();
} else {
radix = 8;
leading_zero = true;
@@ -275,14 +280,14 @@ static double InternalStringToInt(UnicodeCache* unicode_cache,
if (current == end) return SignedZero(negative);
if (*current == 'x' || *current == 'X') {
++current;
- if (current == end) return JUNK_STRING_VALUE;
+ if (current == end) return JunkStringValue();
} else {
leading_zero = true;
}
}
}
- if (radix < 2 || radix > 36) return JUNK_STRING_VALUE;
+ if (radix < 2 || radix > 36) return JunkStringValue();
// Skip leading zeros.
while (*current == '0') {
@@ -292,7 +297,7 @@ static double InternalStringToInt(UnicodeCache* unicode_cache,
}
if (!leading_zero && !isDigit(*current, radix)) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
if (IsPowerOf2(radix)) {
@@ -340,7 +345,7 @@ static double InternalStringToInt(UnicodeCache* unicode_cache,
if (!allow_trailing_junk &&
AdvanceToNonspace(unicode_cache, &current, end)) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
ASSERT(buffer_pos < kBufferSize);
@@ -406,7 +411,7 @@ static double InternalStringToInt(UnicodeCache* unicode_cache,
if (!allow_trailing_junk &&
AdvanceToNonspace(unicode_cache, &current, end)) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
return negative ? -v : v;
@@ -456,22 +461,22 @@ static double InternalStringToDouble(UnicodeCache* unicode_cache,
if (*current == '+') {
// Ignore leading sign.
++current;
- if (current == end) return JUNK_STRING_VALUE;
+ if (current == end) return JunkStringValue();
} else if (*current == '-') {
++current;
- if (current == end) return JUNK_STRING_VALUE;
+ if (current == end) return JunkStringValue();
negative = true;
}
static const char kInfinitySymbol[] = "Infinity";
if (*current == kInfinitySymbol[0]) {
if (!SubStringEquals(&current, end, kInfinitySymbol)) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
if (!allow_trailing_junk &&
AdvanceToNonspace(unicode_cache, &current, end)) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
ASSERT(buffer_pos == 0);
@@ -489,7 +494,7 @@ static double InternalStringToDouble(UnicodeCache* unicode_cache,
if ((flags & ALLOW_HEX) && (*current == 'x' || *current == 'X')) {
++current;
if (current == end || !isDigit(*current, 16)) {
- return JUNK_STRING_VALUE; // "0x".
+ return JunkStringValue(); // "0x".
}
return InternalStringToIntDouble<4>(unicode_cache,
@@ -529,13 +534,13 @@ static double InternalStringToDouble(UnicodeCache* unicode_cache,
}
if (*current == '.') {
- if (octal && !allow_trailing_junk) return JUNK_STRING_VALUE;
+ if (octal && !allow_trailing_junk) return JunkStringValue();
if (octal) goto parsing_done;
++current;
if (current == end) {
if (significant_digits == 0 && !leading_zero) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
} else {
goto parsing_done;
}
@@ -576,18 +581,18 @@ static double InternalStringToDouble(UnicodeCache* unicode_cache,
// If exponent < 0 then string was [+-]\.0*...
// If significant_digits != 0 the string is not equal to 0.
// Otherwise there are no digits in the string.
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
// Parse exponential part.
if (*current == 'e' || *current == 'E') {
- if (octal) return JUNK_STRING_VALUE;
+ if (octal) return JunkStringValue();
++current;
if (current == end) {
if (allow_trailing_junk) {
goto parsing_done;
} else {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
}
char sign = '+';
@@ -598,7 +603,7 @@ static double InternalStringToDouble(UnicodeCache* unicode_cache,
if (allow_trailing_junk) {
goto parsing_done;
} else {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
}
}
@@ -607,7 +612,7 @@ static double InternalStringToDouble(UnicodeCache* unicode_cache,
if (allow_trailing_junk) {
goto parsing_done;
} else {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
}
@@ -631,7 +636,7 @@ static double InternalStringToDouble(UnicodeCache* unicode_cache,
if (!allow_trailing_junk &&
AdvanceToNonspace(unicode_cache, &current, end)) {
- return JUNK_STRING_VALUE;
+ return JunkStringValue();
}
parsing_done:
diff --git a/deps/v8/src/conversions.cc b/deps/v8/src/conversions.cc
index 232eda08c9..c34fe519c4 100644
--- a/deps/v8/src/conversions.cc
+++ b/deps/v8/src/conversions.cc
@@ -430,24 +430,4 @@ char* DoubleToRadixCString(double value, int radix) {
return builder.Finalize();
}
-
-static Mutex* dtoa_lock_one = OS::CreateMutex();
-static Mutex* dtoa_lock_zero = OS::CreateMutex();
-
-
} } // namespace v8::internal
-
-
-extern "C" {
-void ACQUIRE_DTOA_LOCK(int n) {
- ASSERT(n == 0 || n == 1);
- (n == 0 ? v8::internal::dtoa_lock_zero : v8::internal::dtoa_lock_one)->Lock();
-}
-
-
-void FREE_DTOA_LOCK(int n) {
- ASSERT(n == 0 || n == 1);
- (n == 0 ? v8::internal::dtoa_lock_zero : v8::internal::dtoa_lock_one)->
- Unlock();
-}
-}
diff --git a/deps/v8/src/conversions.h b/deps/v8/src/conversions.h
index c3e27b2025..7b02c47f6a 100644
--- a/deps/v8/src/conversions.h
+++ b/deps/v8/src/conversions.h
@@ -44,8 +44,6 @@ namespace internal {
// we don't need to preserve all the digits.
const int kMaxSignificantDigits = 772;
-static const double JUNK_STRING_VALUE =
- std::numeric_limits<double>::quiet_NaN();
static bool isDigit(int x, int radix) {
return (x >= '0' && x <= '9' && x < '0' + radix)
diff --git a/deps/v8/src/cpu-profiler-inl.h b/deps/v8/src/cpu-profiler-inl.h
index d7a23a518a..938b632214 100644
--- a/deps/v8/src/cpu-profiler-inl.h
+++ b/deps/v8/src/cpu-profiler-inl.h
@@ -30,8 +30,6 @@
#include "cpu-profiler.h"
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include <new>
#include "circular-queue-inl.h"
#include "profile-generator-inl.h"
@@ -83,6 +81,4 @@ bool ProfilerEventsProcessor::FilterOutCodeCreateEvent(
} } // namespace v8::internal
-#endif // ENABLE_LOGGING_AND_PROFILING
-
#endif // V8_CPU_PROFILER_INL_H_
diff --git a/deps/v8/src/cpu-profiler.cc b/deps/v8/src/cpu-profiler.cc
index 8b10e8188c..bb480fc345 100644
--- a/deps/v8/src/cpu-profiler.cc
+++ b/deps/v8/src/cpu-profiler.cc
@@ -29,8 +29,6 @@
#include "cpu-profiler-inl.h"
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include "frames-inl.h"
#include "hashmap.h"
#include "log-inl.h"
@@ -574,31 +572,21 @@ void CpuProfiler::StopProcessor() {
logger->logging_nesting_ = saved_logging_nesting_;
}
-} } // namespace v8::internal
-
-#endif // ENABLE_LOGGING_AND_PROFILING
-
-namespace v8 {
-namespace internal {
void CpuProfiler::Setup() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
Isolate* isolate = Isolate::Current();
if (isolate->cpu_profiler() == NULL) {
isolate->set_cpu_profiler(new CpuProfiler());
}
-#endif
}
void CpuProfiler::TearDown() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
Isolate* isolate = Isolate::Current();
if (isolate->cpu_profiler() != NULL) {
delete isolate->cpu_profiler();
}
isolate->set_cpu_profiler(NULL);
-#endif
}
} } // namespace v8::internal
diff --git a/deps/v8/src/cpu-profiler.h b/deps/v8/src/cpu-profiler.h
index 42d79a578e..4175e8f680 100644
--- a/deps/v8/src/cpu-profiler.h
+++ b/deps/v8/src/cpu-profiler.h
@@ -28,8 +28,6 @@
#ifndef V8_CPU_PROFILER_H_
#define V8_CPU_PROFILER_H_
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include "allocation.h"
#include "atomicops.h"
#include "circular-queue.h"
@@ -206,9 +204,6 @@ class ProfilerEventsProcessor : public Thread {
v8::internal::CpuProfiler::Call; \
} \
} while (false)
-#else
-#define PROFILE(isolate, Call) LOG(isolate, Call)
-#endif // ENABLE_LOGGING_AND_PROFILING
namespace v8 {
@@ -221,7 +216,6 @@ class CpuProfiler {
static void Setup();
static void TearDown();
-#ifdef ENABLE_LOGGING_AND_PROFILING
static void StartProfiling(const char* title);
static void StartProfiling(String* title);
static CpuProfile* StopProfiling(const char* title);
@@ -289,10 +283,6 @@ class CpuProfiler {
bool need_to_stop_sampler_;
Atomic32 is_profiling_;
-#else
- static INLINE(bool is_profiling(Isolate* isolate)) { return false; }
-#endif // ENABLE_LOGGING_AND_PROFILING
-
private:
DISALLOW_COPY_AND_ASSIGN(CpuProfiler);
};
diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc
index 6f948c6e5c..5f57350093 100644
--- a/deps/v8/src/d8.cc
+++ b/deps/v8/src/d8.cc
@@ -41,6 +41,9 @@
#include "natives.h"
#include "platform.h"
+#if !defined(_WIN32) && !defined(_WIN64)
+#include <unistd.h> // NOLINT
+#endif
namespace v8 {
@@ -97,6 +100,8 @@ CounterCollection Shell::local_counters_;
CounterCollection* Shell::counters_ = &local_counters_;
Persistent<Context> Shell::utility_context_;
Persistent<Context> Shell::evaluation_context_;
+i::Mutex* Shell::context_mutex_(i::OS::CreateMutex());
+ShellOptions Shell::options;
bool CounterMap::Match(void* key1, void* key2) {
@@ -119,6 +124,7 @@ bool Shell::ExecuteString(Handle<String> source,
bool report_exceptions) {
HandleScope handle_scope;
TryCatch try_catch;
+ options.script_executed = true;
if (i::FLAG_debugger) {
// When debugging make exceptions appear to be uncaught.
try_catch.SetVerbose(true);
@@ -238,7 +244,7 @@ Handle<Value> Shell::CreateExternalArray(const Arguments& args,
if (raw_length < 0) {
return ThrowException(String::New("Array length must not be negative."));
}
- if (raw_length > v8::internal::ExternalArray::kMaxLength) {
+ if (raw_length > i::ExternalArray::kMaxLength) {
return ThrowException(
String::New("Array length exceeds maximum length."));
}
@@ -246,7 +252,7 @@ Handle<Value> Shell::CreateExternalArray(const Arguments& args,
} else {
return ThrowException(String::New("Array length must be a number."));
}
- if (length > static_cast<size_t>(internal::ExternalArray::kMaxLength)) {
+ if (length > static_cast<size_t>(i::ExternalArray::kMaxLength)) {
return ThrowException(String::New("Array length exceeds maximum length."));
}
void* data = calloc(length, element_size);
@@ -540,7 +546,6 @@ void Shell::InstallUtilityScript() {
shell_source_name.length());
Handle<Script> script = Script::Compile(source, name);
script->Run();
-
// Mark the d8 shell script as native to avoid it showing up as normal source
// in the debugger.
i::Handle<i::Object> compiled_script = Utils::OpenHandle(*script);
@@ -550,6 +555,13 @@ void Shell::InstallUtilityScript() {
: i::Handle<i::Script>(i::Script::cast(
i::SharedFunctionInfo::cast(*compiled_script)->script()));
script_object->set_type(i::Smi::FromInt(i::Script::TYPE_NATIVE));
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ // Start the in-process debugger if requested.
+ if (i::FLAG_debugger && !i::FLAG_debugger_agent) {
+ v8::Debug::SetDebugEventListener(HandleDebugEvent);
+ }
+#endif
}
@@ -625,7 +637,7 @@ Handle<ObjectTemplate> Shell::CreateGlobalTemplate() {
}
-void Shell::Initialize(bool test_shell) {
+void Shell::Initialize() {
#ifdef COMPRESS_STARTUP_DATA_BZ2
BZip2Decompressor startup_data_decompressor;
int bz2_result = startup_data_decompressor.Decompress();
@@ -645,7 +657,7 @@ void Shell::Initialize(bool test_shell) {
V8::SetAddHistogramSampleFunction(AddHistogramSample);
}
- if (test_shell) return;
+ if (options.test_shell) return;
Locker lock;
HandleScope scope;
@@ -657,26 +669,17 @@ void Shell::Initialize(bool test_shell) {
if (i::FLAG_debugger_agent) {
v8::Debug::EnableAgent("d8 shell", i::FLAG_debugger_port, true);
}
-
- // Start the in-process debugger if requested.
- if (i::FLAG_debugger && !i::FLAG_debugger_agent) {
- v8::Debug::SetDebugEventListener(HandleDebugEvent);
- }
#endif
}
-void Shell::RenewEvaluationContext() {
+Persistent<Context> Shell::CreateEvaluationContext() {
+ // This needs to be a critical section since this is not thread-safe
+ i::ScopedLock lock(context_mutex_);
// Initialize the global objects
- HandleScope scope;
Handle<ObjectTemplate> global_template = CreateGlobalTemplate();
-
- // (Re-)create the evaluation context
- if (!evaluation_context_.IsEmpty()) {
- evaluation_context_.Dispose();
- }
- evaluation_context_ = Context::New(NULL, global_template);
- Context::Scope utility_scope(evaluation_context_);
+ Persistent<Context> context = Context::New(NULL, global_template);
+ Context::Scope scope(context);
i::JSArguments js_args = i::FLAG_js_arguments;
i::Handle<i::FixedArray> arguments_array =
@@ -688,28 +691,27 @@ void Shell::RenewEvaluationContext() {
}
i::Handle<i::JSArray> arguments_jsarray =
FACTORY->NewJSArrayWithElements(arguments_array);
- evaluation_context_->Global()->Set(String::New("arguments"),
+ context->Global()->Set(String::New("arguments"),
Utils::ToLocal(arguments_jsarray));
+ return context;
}
void Shell::OnExit() {
if (i::FLAG_dump_counters) {
- ::printf("+----------------------------------------+-------------+\n");
- ::printf("| Name | Value |\n");
- ::printf("+----------------------------------------+-------------+\n");
+ printf("+----------------------------------------+-------------+\n");
+ printf("| Name | Value |\n");
+ printf("+----------------------------------------+-------------+\n");
for (CounterMap::Iterator i(counter_map_); i.More(); i.Next()) {
Counter* counter = i.CurrentValue();
if (counter->is_histogram()) {
- ::printf("| c:%-36s | %11i |\n", i.CurrentKey(), counter->count());
- ::printf("| t:%-36s | %11i |\n",
- i.CurrentKey(),
- counter->sample_total());
+ printf("| c:%-36s | %11i |\n", i.CurrentKey(), counter->count());
+ printf("| t:%-36s | %11i |\n", i.CurrentKey(), counter->sample_total());
} else {
- ::printf("| %-38s | %11i |\n", i.CurrentKey(), counter->count());
+ printf("| %-38s | %11i |\n", i.CurrentKey(), counter->count());
}
}
- ::printf("+----------------------------------------+-------------+\n");
+ printf("+----------------------------------------+-------------+\n");
}
if (counters_file_ != NULL)
delete counters_file_;
@@ -717,7 +719,8 @@ void Shell::OnExit() {
static char* ReadChars(const char* name, int* size_out) {
- v8::Unlocker unlocker; // Release the V8 lock while reading files.
+ // Release the V8 lock while reading files.
+ v8::Unlocker unlocker(Isolate::GetCurrent());
FILE* file = i::OS::FOpen(name, "rb");
if (file == NULL) return NULL;
@@ -806,11 +809,6 @@ class ShellThread : public i::Thread {
void ShellThread::Run() {
- // Prepare the context for this thread.
- Locker locker;
- HandleScope scope;
- Handle<ObjectTemplate> global_template = Shell::CreateGlobalTemplate();
-
char* ptr = const_cast<char*>(files_.start());
while ((ptr != NULL) && (*ptr != '\0')) {
// For each newline-separated line.
@@ -822,7 +820,10 @@ void ShellThread::Run() {
continue;
}
- Persistent<Context> thread_context = Context::New(NULL, global_template);
+ // Prepare the context for this thread.
+ Locker locker;
+ HandleScope scope;
+ Persistent<Context> thread_context = Shell::CreateEvaluationContext();
Context::Scope context_scope(thread_context);
while ((ptr != NULL) && (*ptr != '\0')) {
@@ -848,153 +849,296 @@ void ShellThread::Run() {
}
}
-int Shell::RunMain(int argc, char* argv[], bool* executed) {
- // Default use preemption if threads are created.
- bool use_preemption = true;
- // Default to use lowest possible thread preemption interval to test as many
- // edgecases as possible.
- int preemption_interval = 1;
+void SourceGroup::ExitShell(int exit_code) {
+ // Use _exit instead of exit to avoid races between isolate
+ // threads and static destructors.
+ fflush(stdout);
+ fflush(stderr);
+ _exit(exit_code);
+}
- i::List<i::Thread*> threads(1);
- {
- // Since the thread below may spawn new threads accessing V8 holding the
- // V8 lock here is mandatory.
- Locker locker;
- RenewEvaluationContext();
- Context::Scope context_scope(evaluation_context_);
- for (int i = 1; i < argc; i++) {
- char* str = argv[i];
- if (strcmp(str, "--preemption") == 0) {
- use_preemption = true;
- } else if (strcmp(str, "--no-preemption") == 0) {
- use_preemption = false;
- } else if (strcmp(str, "--preemption-interval") == 0) {
- if (i + 1 < argc) {
- char* end = NULL;
- preemption_interval = strtol(argv[++i], &end, 10); // NOLINT
- if (preemption_interval <= 0 || *end != '\0' || errno == ERANGE) {
- printf("Invalid value for --preemption-interval '%s'\n", argv[i]);
- return 1;
- }
- } else {
- printf("Missing value for --preemption-interval\n");
- return 1;
- }
- } else if (strcmp(str, "-f") == 0) {
- // Ignore any -f flags for compatibility with other stand-alone
- // JavaScript engines.
- continue;
- } else if (strncmp(str, "--", 2) == 0) {
- printf("Warning: unknown flag %s.\nTry --help for options\n", str);
- } else if (strcmp(str, "-e") == 0 && i + 1 < argc) {
- // Execute argument given to -e option directly.
- v8::HandleScope handle_scope;
- v8::Handle<v8::String> file_name = v8::String::New("unnamed");
- v8::Handle<v8::String> source = v8::String::New(argv[++i]);
- (*executed) = true;
- if (!ExecuteString(source, file_name, false, true)) {
- OnExit();
- return 1;
- }
- } else if (strcmp(str, "-p") == 0 && i + 1 < argc) {
- int size = 0;
- const char* files = ReadChars(argv[++i], &size);
- if (files == NULL) return 1;
- ShellThread* thread =
- new ShellThread(threads.length(),
- i::Vector<const char>(files, size));
- thread->Start();
- threads.Add(thread);
- (*executed) = true;
- } else {
- // Use all other arguments as names of files to load and run.
- HandleScope handle_scope;
- Handle<String> file_name = v8::String::New(str);
- Handle<String> source = ReadFile(str);
- (*executed) = true;
- if (source.IsEmpty()) {
- printf("Error reading '%s'\n", str);
- return 1;
- }
- if (!ExecuteString(source, file_name, false, true)) {
- OnExit();
- return 1;
- }
+void SourceGroup::Execute() {
+ for (int i = begin_offset_; i < end_offset_; ++i) {
+ const char* arg = argv_[i];
+ if (strcmp(arg, "-e") == 0 && i + 1 < end_offset_) {
+ // Execute argument given to -e option directly.
+ HandleScope handle_scope;
+ Handle<String> file_name = String::New("unnamed");
+ Handle<String> source = String::New(argv_[i + 1]);
+ if (!Shell::ExecuteString(source, file_name, false, true)) {
+ ExitShell(1);
+ return;
+ }
+ ++i;
+ } else if (arg[0] == '-') {
+ // Ignore other options. They have been parsed already.
+ } else {
+ // Use all other arguments as names of files to load and run.
+ HandleScope handle_scope;
+ Handle<String> file_name = String::New(arg);
+ Handle<String> source = ReadFile(arg);
+ if (source.IsEmpty()) {
+ printf("Error reading '%s'\n", arg);
+ ExitShell(1);
+ return;
+ }
+ if (!Shell::ExecuteString(source, file_name, false, true)) {
+ ExitShell(1);
+ return;
}
}
+ }
+}
- // Start preemption if threads have been created and preemption is enabled.
- if (threads.length() > 0 && use_preemption) {
- Locker::StartPreemption(preemption_interval);
- }
+
+Handle<String> SourceGroup::ReadFile(const char* name) {
+ FILE* file = fopen(name, "rb");
+ if (file == NULL) return Handle<String>();
+
+ fseek(file, 0, SEEK_END);
+ int size = ftell(file);
+ rewind(file);
+
+ char* chars = new char[size + 1];
+ chars[size] = '\0';
+ for (int i = 0; i < size;) {
+ int read = fread(&chars[i], 1, size - i, file);
+ i += read;
}
+ fclose(file);
+ Handle<String> result = String::New(chars, size);
+ delete[] chars;
+ return result;
+}
- for (int i = 0; i < threads.length(); i++) {
- i::Thread* thread = threads[i];
- thread->Join();
- delete thread;
+
+i::Thread::Options SourceGroup::GetThreadOptions() {
+ i::Thread::Options options;
+ options.name = "IsolateThread";
+ // On some systems (OSX 10.6) the stack size default is 0.5Mb or less
+ // which is not enough to parse the big literal expressions used in tests.
+ // The stack size should be at least StackGuard::kLimitSize + some
+ // OS-specific padding for thread startup code.
+ options.stack_size = 2 << 20; // 2 Mb seems to be enough
+ return options;
+}
+
+
+void SourceGroup::ExecuteInThread() {
+ Isolate* isolate = Isolate::New();
+ do {
+ if (next_semaphore_ != NULL) next_semaphore_->Wait();
+ {
+ Isolate::Scope iscope(isolate);
+ Locker lock(isolate);
+ HandleScope scope;
+ Persistent<Context> context = Shell::CreateEvaluationContext();
+ {
+ Context::Scope cscope(context);
+ Execute();
+ }
+ context.Dispose();
+ }
+ if (done_semaphore_ != NULL) done_semaphore_->Signal();
+ } while (!Shell::options.last_run);
+ isolate->Dispose();
+}
+
+
+void SourceGroup::StartExecuteInThread() {
+ if (thread_ == NULL) {
+ thread_ = new IsolateThread(this);
+ thread_->Start();
}
- OnExit();
- return 0;
+ next_semaphore_->Signal();
}
-int Shell::Main(int argc, char* argv[]) {
- // Figure out if we're requested to stress the optimization
- // infrastructure by running tests multiple times and forcing
- // optimization in the last run.
- bool FLAG_stress_opt = false;
- bool FLAG_stress_deopt = false;
- bool FLAG_interactive_shell = false;
- bool FLAG_test_shell = false;
- bool script_executed = false;
+void SourceGroup::WaitForThread() {
+ if (thread_ == NULL) return;
+ if (Shell::options.last_run) {
+ thread_->Join();
+ thread_ = NULL;
+ } else {
+ done_semaphore_->Wait();
+ }
+}
+
+bool Shell::SetOptions(int argc, char* argv[]) {
for (int i = 0; i < argc; i++) {
if (strcmp(argv[i], "--stress-opt") == 0) {
- FLAG_stress_opt = true;
+ options.stress_opt = true;
argv[i] = NULL;
} else if (strcmp(argv[i], "--stress-deopt") == 0) {
- FLAG_stress_deopt = true;
+ options.stress_deopt = true;
argv[i] = NULL;
} else if (strcmp(argv[i], "--noalways-opt") == 0) {
// No support for stressing if we can't use --always-opt.
- FLAG_stress_opt = false;
- FLAG_stress_deopt = false;
+ options.stress_opt = false;
+ options.stress_deopt = false;
} else if (strcmp(argv[i], "--shell") == 0) {
- FLAG_interactive_shell = true;
+ options.interactive_shell = true;
argv[i] = NULL;
} else if (strcmp(argv[i], "--test") == 0) {
- FLAG_test_shell = true;
+ options.test_shell = true;
+ argv[i] = NULL;
+ } else if (strcmp(argv[i], "--preemption") == 0) {
+ options.use_preemption = true;
argv[i] = NULL;
+ } else if (strcmp(argv[i], "--no-preemption") == 0) {
+ options.use_preemption = false;
+ argv[i] = NULL;
+ } else if (strcmp(argv[i], "--preemption-interval") == 0) {
+ if (++i < argc) {
+ argv[i-1] = NULL;
+ char* end = NULL;
+ options.preemption_interval = strtol(argv[i], &end, 10); // NOLINT
+ if (options.preemption_interval <= 0
+ || *end != '\0'
+ || errno == ERANGE) {
+ printf("Invalid value for --preemption-interval '%s'\n", argv[i]);
+ return false;
+ }
+ argv[i] = NULL;
+ } else {
+ printf("Missing value for --preemption-interval\n");
+ return false;
+ }
+ } else if (strcmp(argv[i], "-f") == 0) {
+ // Ignore any -f flags for compatibility with other stand-alone
+ // JavaScript engines.
+ continue;
+ } else if (strcmp(argv[i], "--isolate") == 0) {
+ options.num_isolates++;
+ }
+ }
+
+ // Run parallel threads if we are not using --isolate
+ for (int i = 1; i < argc; i++) {
+ if (argv[i] == NULL) continue;
+ if (strcmp(argv[i], "-p") == 0 && i + 1 < argc) {
+ if (options.num_isolates > 1) {
+ printf("-p is not compatible with --isolate\n");
+ return false;
+ }
+ argv[i] = NULL;
+ if (options.parallel_files == NULL) {
+ options.parallel_files = new i::List<i::Vector<const char> >();
+ }
+ int size = 0;
+ const char* files = ReadChars(argv[++i], &size);
+ if (files == NULL) {
+ printf("-p option incomplete\n");
+ return false;
+ }
+ argv[i] = NULL;
+ options.parallel_files->Add(i::Vector<const char>(files, size));
}
}
v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
- Initialize(FLAG_test_shell);
+ // set up isolated source groups
+ options.isolate_sources = new SourceGroup[options.num_isolates];
+ SourceGroup* current = options.isolate_sources;
+ current->Begin(argv, 1);
+ for (int i = 1; i < argc; i++) {
+ const char* str = argv[i];
+ if (strcmp(str, "--isolate") == 0) {
+ current->End(i);
+ current++;
+ current->Begin(argv, i + 1);
+ } else if (strncmp(argv[i], "--", 2) == 0) {
+ printf("Warning: unknown flag %s.\nTry --help for options\n", argv[i]);
+ }
+ }
+ current->End(argc);
+
+ return true;
+}
+
+
+int Shell::RunMain(int argc, char* argv[]) {
+ i::List<i::Thread*> threads(1);
+
+ {
+ if (options.parallel_files != NULL)
+ for (int i = 0; i < options.parallel_files->length(); i++) {
+ i::Vector<const char> files = options.parallel_files->at(i);
+ ShellThread* thread = new ShellThread(threads.length(), files);
+ thread->Start();
+ threads.Add(thread);
+ }
+
+ for (int i = 1; i < options.num_isolates; ++i) {
+ options.isolate_sources[i].StartExecuteInThread();
+ }
+
+ Locker lock;
+ HandleScope scope;
+ Persistent<Context> context = CreateEvaluationContext();
+ {
+ Context::Scope cscope(context);
+ options.isolate_sources[0].Execute();
+ }
+ if (options.last_run) {
+ // Keep using the same context in the interactive shell
+ evaluation_context_ = context;
+ } else {
+ context.Dispose();
+ }
+ // Start preemption if threads have been created and preemption is enabled.
+ if (options.parallel_files != NULL
+ && threads.length() > 0
+ && options.use_preemption) {
+ Locker::StartPreemption(options.preemption_interval);
+ }
+ }
+
+ for (int i = 1; i < options.num_isolates; ++i) {
+ options.isolate_sources[i].WaitForThread();
+ }
+
+ if (options.parallel_files != NULL)
+ for (int i = 0; i < threads.length(); i++) {
+ i::Thread* thread = threads[i];
+ thread->Join();
+ delete thread;
+ }
+
+ OnExit();
+ return 0;
+}
+
+
+int Shell::Main(int argc, char* argv[]) {
+ if (!SetOptions(argc, argv)) return 1;
+ Initialize();
int result = 0;
- if (FLAG_stress_opt || FLAG_stress_deopt) {
- v8::Testing::SetStressRunType(
- FLAG_stress_opt ? v8::Testing::kStressTypeOpt
- : v8::Testing::kStressTypeDeopt);
- int stress_runs = v8::Testing::GetStressRuns();
+ if (options.stress_opt || options.stress_deopt) {
+ Testing::SetStressRunType(
+ options.stress_opt ? Testing::kStressTypeOpt
+ : Testing::kStressTypeDeopt);
+ int stress_runs = Testing::GetStressRuns();
for (int i = 0; i < stress_runs && result == 0; i++) {
printf("============ Stress %d/%d ============\n", i + 1, stress_runs);
- v8::Testing::PrepareStressRun(i);
- result = RunMain(argc, argv, &script_executed);
+ Testing::PrepareStressRun(i);
+ options.last_run = (i == stress_runs - 1);
+ result = RunMain(argc, argv);
}
printf("======== Full Deoptimization =======\n");
- v8::Testing::DeoptimizeAll();
+ Testing::DeoptimizeAll();
} else {
- result = RunMain(argc, argv, &script_executed);
+ result = RunMain(argc, argv);
}
#ifdef ENABLE_DEBUGGER_SUPPORT
// Run remote debugger if requested, but never on --test
- if (i::FLAG_remote_debugger && !FLAG_test_shell) {
+ if (i::FLAG_remote_debugger && !options.test_shell) {
InstallUtilityScript();
RunRemoteDebugger(i::FLAG_debugger_port);
return 0;
@@ -1003,12 +1147,15 @@ int Shell::Main(int argc, char* argv[]) {
// Run interactive shell if explicitly requested or if no script has been
// executed, but never on --test
- if ((FLAG_interactive_shell || !script_executed) && !FLAG_test_shell) {
+
+ if (( options.interactive_shell
+ || !options.script_executed )
+ && !options.test_shell ) {
InstallUtilityScript();
RunShell();
}
- v8::V8::Dispose();
+ V8::Dispose();
return result;
}
diff --git a/deps/v8/src/d8.gyp b/deps/v8/src/d8.gyp
index 8b52ed9a9a..85914ec672 100644
--- a/deps/v8/src/d8.gyp
+++ b/deps/v8/src/d8.gyp
@@ -38,10 +38,7 @@
'../src',
],
'defines': [
- 'ENABLE_LOGGING_AND_PROFILING',
'ENABLE_DEBUGGER_SUPPORT',
- 'ENABLE_VMSTATE_TRACKING',
- 'V8_FAST_TLS',
],
'sources': [
'd8.cc',
diff --git a/deps/v8/src/d8.h b/deps/v8/src/d8.h
index e225469993..7f0272710b 100644
--- a/deps/v8/src/d8.h
+++ b/deps/v8/src/d8.h
@@ -112,6 +112,87 @@ class CounterMap {
};
+class SourceGroup {
+ public:
+ SourceGroup()
+ : next_semaphore_(v8::internal::OS::CreateSemaphore(0)),
+ done_semaphore_(v8::internal::OS::CreateSemaphore(0)),
+ thread_(NULL),
+ argv_(NULL),
+ begin_offset_(0),
+ end_offset_(0) { }
+
+ void Begin(char** argv, int offset) {
+ argv_ = const_cast<const char**>(argv);
+ begin_offset_ = offset;
+ }
+
+ void End(int offset) { end_offset_ = offset; }
+
+ void Execute();
+
+ void StartExecuteInThread();
+ void WaitForThread();
+
+ private:
+ class IsolateThread : public i::Thread {
+ public:
+ explicit IsolateThread(SourceGroup* group)
+ : i::Thread(GetThreadOptions()), group_(group) {}
+
+ virtual void Run() {
+ group_->ExecuteInThread();
+ }
+
+ private:
+ SourceGroup* group_;
+ };
+
+ static i::Thread::Options GetThreadOptions();
+ void ExecuteInThread();
+
+ i::Semaphore* next_semaphore_;
+ i::Semaphore* done_semaphore_;
+ i::Thread* thread_;
+
+ void ExitShell(int exit_code);
+ Handle<String> ReadFile(const char* name);
+
+ const char** argv_;
+ int begin_offset_;
+ int end_offset_;
+};
+
+
+class ShellOptions {
+ public:
+ ShellOptions()
+ : script_executed(false),
+ last_run(true),
+ stress_opt(false),
+ stress_deopt(false),
+ interactive_shell(false),
+ test_shell(false),
+ use_preemption(true),
+ preemption_interval(10),
+ num_isolates(1),
+ isolate_sources(NULL),
+ parallel_files(NULL) { }
+
+ bool script_executed;
+ bool last_run;
+ bool stress_opt;
+ bool stress_deopt;
+ bool interactive_shell;
+ bool test_shell;
+ bool use_preemption;
+ int preemption_interval;
+ int num_isolates;
+ SourceGroup* isolate_sources;
+ i::List< i::Vector<const char> >* parallel_files;
+};
+
+
class Shell: public i::AllStatic {
public:
static bool ExecuteString(Handle<String> source,
@@ -129,12 +210,13 @@ class Shell: public i::AllStatic {
static void AddHistogramSample(void* histogram, int sample);
static void MapCounters(const char* name);
static Handle<String> ReadFile(const char* name);
- static void Initialize(bool test_shell);
- static void RenewEvaluationContext();
+ static void Initialize();
+ static Persistent<Context> CreateEvaluationContext();
static void InstallUtilityScript();
static void RunShell();
+ static bool SetOptions(int argc, char* argv[]);
static int RunScript(char* filename);
- static int RunMain(int argc, char* argv[], bool* executed);
+ static int RunMain(int argc, char* argv[]);
static int Main(int argc, char* argv[]);
static Handle<ObjectTemplate> CreateGlobalTemplate();
static Handle<Array> GetCompletions(Handle<String> text,
@@ -205,6 +287,8 @@ class Shell: public i::AllStatic {
static const char* kHistoryFileName;
static const char* kPrompt;
+ static ShellOptions options;
+
private:
static Persistent<Context> utility_context_;
static Persistent<Context> evaluation_context_;
@@ -214,6 +298,7 @@ class Shell: public i::AllStatic {
static CounterCollection local_counters_;
static CounterCollection* counters_;
static i::OS::MemoryMappedFile* counters_file_;
+ static i::Mutex* context_mutex_;
static Counter* GetCounter(const char* name, bool is_histogram);
static Handle<Value> CreateExternalArray(const Arguments& args,
ExternalArrayType type,
diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc
index f341fc6f1f..c48e514ab8 100644
--- a/deps/v8/src/debug.cc
+++ b/deps/v8/src/debug.cc
@@ -1821,6 +1821,13 @@ void Debug::SetAfterBreakTarget(JavaScriptFrame* frame) {
bool Debug::IsBreakAtReturn(JavaScriptFrame* frame) {
HandleScope scope(isolate_);
+ // If there are no break points this cannot be break at return, as
+ // the debugger statement and stack guard bebug break cannot be at
+ // return.
+ if (!has_break_points_) {
+ return false;
+ }
+
// Get the executing function in which the debug break occurred.
Handle<SharedFunctionInfo> shared =
Handle<SharedFunctionInfo>(JSFunction::cast(frame->function())->shared());
diff --git a/deps/v8/src/debug.h b/deps/v8/src/debug.h
index c4d3c7e373..c614844ab5 100644
--- a/deps/v8/src/debug.h
+++ b/deps/v8/src/debug.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,6 +35,7 @@
#include "execution.h"
#include "factory.h"
#include "flags.h"
+#include "frames-inl.h"
#include "hashmap.h"
#include "platform.h"
#include "string-stream.h"
diff --git a/deps/v8/src/deoptimizer.cc b/deps/v8/src/deoptimizer.cc
index e8c659718f..175ee6e1fb 100644
--- a/deps/v8/src/deoptimizer.cc
+++ b/deps/v8/src/deoptimizer.cc
@@ -161,8 +161,7 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
// Get the "simulated" top and size for the requested frame.
Address top =
reinterpret_cast<Address>(deoptimizer->output_[frame_index]->GetTop());
- unsigned size =
- deoptimizer->output_[frame_index]->GetFrameSize() / kPointerSize;
+ uint32_t size = deoptimizer->output_[frame_index]->GetFrameSize();
// Done with the GC-unsafe frame descriptions. This re-enables allocation.
deoptimizer->DeleteFrameDescriptions();
@@ -547,7 +546,7 @@ void Deoptimizer::MaterializeHeapNumbers() {
#ifdef ENABLE_DEBUGGER_SUPPORT
void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
- Address top, intptr_t size, DeoptimizedFrameInfo* info) {
+ Address top, uint32_t size, DeoptimizedFrameInfo* info) {
ASSERT_EQ(DEBUGGER, bailout_type_);
for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
HeapNumberMaterializationDescriptor d = deferred_heap_numbers_[i];
@@ -557,17 +556,29 @@ void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
Address slot = d.slot_address();
if (top <= slot && slot < top + size) {
Handle<Object> num = isolate_->factory()->NewNumber(d.value());
- int expression_index = static_cast<int>(
+ // Calculate the index with the botton of the expression stack
+ // at index 0, and the fixed part (including incoming arguments)
+ // at negative indexes.
+ int index = static_cast<int>(
info->expression_count_ - (slot - top) / kPointerSize - 1);
if (FLAG_trace_deopt) {
PrintF("Materializing a new heap number %p [%e] in slot %p"
- "for expression stack index %d\n",
+ "for stack index %d\n",
reinterpret_cast<void*>(*num),
d.value(),
d.slot_address(),
- expression_index);
+ index);
+ }
+ if (index >=0) {
+ info->SetExpression(index, *num);
+ } else {
+ // Calculate parameter index subtracting one for the receiver.
+ int parameter_index =
+ index +
+ static_cast<int>(size) / kPointerSize -
+ info->expression_count_ - 1;
+ info->SetParameter(parameter_index, *num);
}
- info->SetExpression(expression_index, *num);
}
}
}
@@ -1126,6 +1137,22 @@ unsigned FrameDescription::GetOffsetFromSlotIndex(Deoptimizer* deoptimizer,
}
+int FrameDescription::ComputeParametersCount() {
+ return function_->shared()->formal_parameter_count();
+}
+
+
+Object* FrameDescription::GetParameter(Deoptimizer* deoptimizer, int index) {
+ ASSERT_EQ(Code::FUNCTION, kind_);
+ ASSERT(index >= 0);
+ ASSERT(index < ComputeParametersCount());
+ // The slot indexes for incoming arguments are negative.
+ unsigned offset = GetOffsetFromSlotIndex(deoptimizer,
+ index - ComputeParametersCount());
+ return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
+}
+
+
unsigned FrameDescription::GetExpressionCount(Deoptimizer* deoptimizer) {
ASSERT_EQ(Code::FUNCTION, kind_);
unsigned size = GetFrameSize() - deoptimizer->ComputeFixedSize(GetFunction());
@@ -1415,7 +1442,13 @@ void SlotRef::ComputeSlotMappingForArguments(JavaScriptFrame* frame,
DeoptimizedFrameInfo::DeoptimizedFrameInfo(
Deoptimizer* deoptimizer, int frame_index) {
FrameDescription* output_frame = deoptimizer->output_[frame_index];
+ SetFunction(output_frame->GetFunction());
expression_count_ = output_frame->GetExpressionCount(deoptimizer);
+ parameters_count_ = output_frame->ComputeParametersCount();
+ parameters_ = new Object*[parameters_count_];
+ for (int i = 0; i < parameters_count_; i++) {
+ SetParameter(i, output_frame->GetParameter(deoptimizer, i));
+ }
expression_stack_ = new Object*[expression_count_];
for (int i = 0; i < expression_count_; i++) {
SetExpression(i, output_frame->GetExpression(deoptimizer, i));
@@ -1424,10 +1457,13 @@ DeoptimizedFrameInfo::DeoptimizedFrameInfo(
DeoptimizedFrameInfo::~DeoptimizedFrameInfo() {
- delete expression_stack_;
+ delete[] expression_stack_;
+ delete[] parameters_;
}
void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
+ v->VisitPointer(reinterpret_cast<Object**>(&function_));
+ v->VisitPointers(parameters_, parameters_ + parameters_count_);
v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
}
diff --git a/deps/v8/src/deoptimizer.h b/deps/v8/src/deoptimizer.h
index 64823183a5..9265905366 100644
--- a/deps/v8/src/deoptimizer.h
+++ b/deps/v8/src/deoptimizer.h
@@ -194,7 +194,7 @@ class Deoptimizer : public Malloced {
void MaterializeHeapNumbers();
#ifdef ENABLE_DEBUGGER_SUPPORT
void MaterializeHeapNumbersForDebuggerInspectableFrame(
- Address top, intptr_t size, DeoptimizedFrameInfo* info);
+ Address top, uint32_t size, DeoptimizedFrameInfo* info);
#endif
static void ComputeOutputFrames(Deoptimizer* deoptimizer);
@@ -400,6 +400,12 @@ class FrameDescription {
void SetKind(Code::Kind kind) { kind_ = kind; }
#endif
+ // Get the incoming arguments count.
+ int ComputeParametersCount();
+
+ // Get a parameter value for an unoptimized frame.
+ Object* GetParameter(Deoptimizer* deoptimizer, int index);
+
// Get the expression stack height for a unoptimized frame.
unsigned GetExpressionCount(Deoptimizer* deoptimizer);
@@ -662,9 +668,23 @@ class DeoptimizedFrameInfo : public Malloced {
// GC support.
void Iterate(ObjectVisitor* v);
+ // Return the number of incoming arguments.
+ int parameters_count() { return parameters_count_; }
+
// Return the height of the expression stack.
int expression_count() { return expression_count_; }
+ // Get the frame function.
+ JSFunction* GetFunction() {
+ return function_;
+ }
+
+ // Get an incoming argument.
+ Object* GetParameter(int index) {
+ ASSERT(0 <= index && index < parameters_count());
+ return parameters_[index];
+ }
+
// Get an expression from the expression stack.
Object* GetExpression(int index) {
ASSERT(0 <= index && index < expression_count());
@@ -672,13 +692,27 @@ class DeoptimizedFrameInfo : public Malloced {
}
private:
+ // Set the frame function.
+ void SetFunction(JSFunction* function) {
+ function_ = function;
+ }
+
+ // Set an incoming argument.
+ void SetParameter(int index, Object* obj) {
+ ASSERT(0 <= index && index < parameters_count());
+ parameters_[index] = obj;
+ }
+
// Set an expression on the expression stack.
void SetExpression(int index, Object* obj) {
ASSERT(0 <= index && index < expression_count());
expression_stack_[index] = obj;
}
+ JSFunction* function_;
+ int parameters_count_;
int expression_count_;
+ Object** parameters_;
Object** expression_stack_;
friend class Deoptimizer;
diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h
index 6e13dd2d0a..2db44c3067 100644
--- a/deps/v8/src/flag-definitions.h
+++ b/deps/v8/src/flag-definitions.h
@@ -452,14 +452,10 @@ DEFINE_bool(trace_regexp_assembler,
"trace regexp macro assembler calls.")
//
-// Logging and profiling only flags
+// Logging and profiling flags
//
#undef FLAG
-#ifdef ENABLE_LOGGING_AND_PROFILING
#define FLAG FLAG_FULL
-#else
-#define FLAG FLAG_READONLY
-#endif
// log.cc
DEFINE_bool(log, false,
@@ -491,19 +487,6 @@ DEFINE_string(logfile, "v8.log", "Specify the name of the log file.")
DEFINE_bool(ll_prof, false, "Enable low-level linux profiler.")
//
-// Heap protection flags
-// Using heap protection requires ENABLE_LOGGING_AND_PROFILING as well.
-//
-#ifdef ENABLE_HEAP_PROTECTION
-#undef FLAG
-#define FLAG FLAG_FULL
-
-DEFINE_bool(protect_heap, false,
- "Protect/unprotect V8's heap when leaving/entring the VM.")
-
-#endif
-
-//
// Disassembler only flags
//
#undef FLAG
diff --git a/deps/v8/src/frames.cc b/deps/v8/src/frames.cc
index 4e67463f1f..eaf09ebaf5 100644
--- a/deps/v8/src/frames.cc
+++ b/deps/v8/src/frames.cc
@@ -36,6 +36,8 @@
#include "scopeinfo.h"
#include "string-stream.h"
+#include "allocation-inl.h"
+
namespace v8 {
namespace internal {
@@ -346,7 +348,6 @@ void SafeStackFrameIterator::Reset() {
// -------------------------------------------------------------------------
-#ifdef ENABLE_LOGGING_AND_PROFILING
SafeStackTraceFrameIterator::SafeStackTraceFrameIterator(
Isolate* isolate,
Address fp, Address sp, Address low_bound, Address high_bound) :
@@ -362,7 +363,6 @@ void SafeStackTraceFrameIterator::Advance() {
if (frame()->is_java_script()) return;
}
}
-#endif
Code* StackFrame::GetSafepointData(Isolate* isolate,
diff --git a/deps/v8/src/frames.h b/deps/v8/src/frames.h
index 9e93daef3d..f542a92d96 100644
--- a/deps/v8/src/frames.h
+++ b/deps/v8/src/frames.h
@@ -843,7 +843,6 @@ class SafeStackFrameIterator BASE_EMBEDDED {
};
-#ifdef ENABLE_LOGGING_AND_PROFILING
typedef JavaScriptFrameIteratorTemp<SafeStackFrameIterator>
SafeJavaScriptFrameIterator;
@@ -855,7 +854,6 @@ class SafeStackTraceFrameIterator: public SafeJavaScriptFrameIterator {
Address low_bound, Address high_bound);
void Advance();
};
-#endif
class StackFrameLocator BASE_EMBEDDED {
diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h
index d25ca490f6..6b174f7427 100644
--- a/deps/v8/src/full-codegen.h
+++ b/deps/v8/src/full-codegen.h
@@ -444,7 +444,7 @@ class FullCodeGenerator: public AstVisitor {
TypeofState typeof_state,
Label* slow,
Label* done);
- void EmitVariableLoad(Variable* expr);
+ void EmitVariableLoad(VariableProxy* proxy);
enum ResolveEvalFlag {
SKIP_CONTEXT_LOOKUP,
diff --git a/deps/v8/src/handles.cc b/deps/v8/src/handles.cc
index d8cc742576..d73aaf0fca 100644
--- a/deps/v8/src/handles.cc
+++ b/deps/v8/src/handles.cc
@@ -543,11 +543,6 @@ Handle<Object> SetAccessor(Handle<JSObject> obj, Handle<AccessorInfo> info) {
// associated with the wrapper and get rid of both the wrapper and the
// handle.
static void ClearWrapperCache(Persistent<v8::Value> handle, void*) {
-#ifdef ENABLE_HEAP_PROTECTION
- // Weak reference callbacks are called as if from outside V8. We
- // need to reeenter to unprotect the heap.
- VMState state(OTHER);
-#endif
Handle<Object> cache = Utils::OpenHandle(*handle);
JSValue* wrapper = JSValue::cast(*cache);
Foreign* foreign = Script::cast(wrapper->value())->wrapper();
diff --git a/deps/v8/src/heap-profiler.cc b/deps/v8/src/heap-profiler.cc
index fb1ea8a641..7e613e9173 100644
--- a/deps/v8/src/heap-profiler.cc
+++ b/deps/v8/src/heap-profiler.cc
@@ -34,7 +34,6 @@ namespace v8 {
namespace internal {
-#ifdef ENABLE_LOGGING_AND_PROFILING
HeapProfiler::HeapProfiler()
: snapshots_(new HeapSnapshotsCollection()),
next_snapshot_uid_(1) {
@@ -52,29 +51,21 @@ void HeapProfiler::ResetSnapshots() {
}
-#endif // ENABLE_LOGGING_AND_PROFILING
-
void HeapProfiler::Setup() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
Isolate* isolate = Isolate::Current();
if (isolate->heap_profiler() == NULL) {
isolate->set_heap_profiler(new HeapProfiler());
}
-#endif
}
void HeapProfiler::TearDown() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
Isolate* isolate = Isolate::Current();
delete isolate->heap_profiler();
isolate->set_heap_profiler(NULL);
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
HeapSnapshot* HeapProfiler::TakeSnapshot(const char* name,
int type,
v8::ActivityControl* control) {
@@ -179,7 +170,5 @@ void HeapProfiler::ObjectMoveEvent(Address from, Address to) {
snapshots_->ObjectMoveEvent(from, to);
}
-#endif // ENABLE_LOGGING_AND_PROFILING
-
} } // namespace v8::internal
diff --git a/deps/v8/src/heap-profiler.h b/deps/v8/src/heap-profiler.h
index c32f4c425f..b1bc91c307 100644
--- a/deps/v8/src/heap-profiler.h
+++ b/deps/v8/src/heap-profiler.h
@@ -33,8 +33,6 @@
namespace v8 {
namespace internal {
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
class HeapSnapshot;
class HeapSnapshotsCollection;
@@ -45,9 +43,6 @@ class HeapSnapshotsCollection;
profiler->call; \
} \
} while (false)
-#else
-#define HEAP_PROFILE(heap, call) ((void) 0)
-#endif // ENABLE_LOGGING_AND_PROFILING
// The HeapProfiler writes data to the log files, which can be postprocessed
// to generate .hp files for use by the GHC/Valgrind tool hp2ps.
@@ -56,7 +51,6 @@ class HeapProfiler {
static void Setup();
static void TearDown();
-#ifdef ENABLE_LOGGING_AND_PROFILING
static HeapSnapshot* TakeSnapshot(const char* name,
int type,
v8::ActivityControl* control);
@@ -93,8 +87,6 @@ class HeapProfiler {
HeapSnapshotsCollection* snapshots_;
unsigned next_snapshot_uid_;
List<v8::HeapProfiler::WrapperInfoCallback> wrapper_callbacks_;
-
-#endif // ENABLE_LOGGING_AND_PROFILING
};
} } // namespace v8::internal
diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc
index 508bdf3c49..98a2d3374b 100644
--- a/deps/v8/src/heap.cc
+++ b/deps/v8/src/heap.cc
@@ -293,12 +293,11 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
// TODO(1238405): Combine the infrastructure for --heap-stats and
// --log-gc to avoid the complicated preprocessor and flag testing.
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
void Heap::ReportStatisticsBeforeGC() {
// Heap::ReportHeapStatistics will also log NewSpace statistics when
- // compiled with ENABLE_LOGGING_AND_PROFILING and --log-gc is set. The
- // following logic is used to avoid double logging.
-#if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
+ // compiled --log-gc is set. The following logic is used to avoid
+ // double logging.
+#ifdef DEBUG
if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
if (FLAG_heap_stats) {
ReportHeapStatistics("Before GC");
@@ -306,23 +305,16 @@ void Heap::ReportStatisticsBeforeGC() {
new_space_.ReportStatistics();
}
if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
-#elif defined(DEBUG)
- if (FLAG_heap_stats) {
- new_space_.CollectStatistics();
- ReportHeapStatistics("Before GC");
- new_space_.ClearHistograms();
- }
-#elif defined(ENABLE_LOGGING_AND_PROFILING)
+#else
if (FLAG_log_gc) {
new_space_.CollectStatistics();
new_space_.ReportStatistics();
new_space_.ClearHistograms();
}
-#endif
+#endif // DEBUG
}
-#if defined(ENABLE_LOGGING_AND_PROFILING)
void Heap::PrintShortHeapStatistics() {
if (!FLAG_trace_gc_verbose) return;
PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
@@ -368,7 +360,6 @@ void Heap::PrintShortHeapStatistics() {
lo_space_->Size(),
lo_space_->Available());
}
-#endif
// TODO(1238405): Combine the infrastructure for --heap-stats and
@@ -376,20 +367,17 @@ void Heap::PrintShortHeapStatistics() {
void Heap::ReportStatisticsAfterGC() {
// Similar to the before GC, we use some complicated logic to ensure that
// NewSpace statistics are logged exactly once when --log-gc is turned on.
-#if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
+#if defined(DEBUG)
if (FLAG_heap_stats) {
new_space_.CollectStatistics();
ReportHeapStatistics("After GC");
} else if (FLAG_log_gc) {
new_space_.ReportStatistics();
}
-#elif defined(DEBUG)
- if (FLAG_heap_stats) ReportHeapStatistics("After GC");
-#elif defined(ENABLE_LOGGING_AND_PROFILING)
+#else
if (FLAG_log_gc) new_space_.ReportStatistics();
-#endif
+#endif // DEBUG
}
-#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
void Heap::GarbageCollectionPrologue() {
@@ -406,11 +394,11 @@ void Heap::GarbageCollectionPrologue() {
}
if (FLAG_gc_verbose) Print();
-#endif
+#endif // DEBUG
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
+#if defined(DEBUG)
ReportStatisticsBeforeGC();
-#endif
+#endif // DEBUG
LiveObjectList::GCPrologue();
}
@@ -447,12 +435,10 @@ void Heap::GarbageCollectionEpilogue() {
symbol_table()->Capacity());
isolate_->counters()->number_of_symbols()->Set(
symbol_table()->NumberOfElements());
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
+#if defined(DEBUG)
ReportStatisticsAfterGC();
-#endif
-#ifdef ENABLE_DEBUGGER_SUPPORT
+#endif // DEBUG
isolate_->debug()->AfterGarbageCollection();
-#endif
}
@@ -1335,15 +1321,12 @@ class ScavengingVisitor : public StaticVisitorBase {
enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
enum SizeRestriction { SMALL, UNKNOWN_SIZE };
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
bool should_record = false;
#ifdef DEBUG
should_record = FLAG_heap_stats;
#endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
should_record = should_record || FLAG_log_gc;
-#endif
if (should_record) {
if (heap->new_space()->Contains(obj)) {
heap->new_space()->RecordAllocation(obj);
@@ -1352,7 +1335,6 @@ class ScavengingVisitor : public StaticVisitorBase {
}
}
}
-#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Helper function used by CopyObject to copy a source object to an
// allocated target object and update the forwarding pointer in the source
@@ -1368,12 +1350,9 @@ class ScavengingVisitor : public StaticVisitorBase {
source->set_map_word(MapWord::FromForwardingAddress(target));
if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Update NewSpace stats if necessary.
RecordCopiedObject(heap, target);
-#endif
HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
-#if defined(ENABLE_LOGGING_AND_PROFILING)
Isolate* isolate = heap->isolate();
if (isolate->logger()->is_logging() ||
CpuProfiler::is_profiling(isolate)) {
@@ -1382,7 +1361,6 @@ class ScavengingVisitor : public StaticVisitorBase {
source->address(), target->address()));
}
}
-#endif
}
return target;
@@ -1558,7 +1536,6 @@ static void InitializeScavengingVisitorsTables() {
void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (scavenging_visitors_table_mode_ == LOGGING_AND_PROFILING_ENABLED) {
// Table was already updated by some isolate.
return;
@@ -1584,7 +1561,6 @@ void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() {
Release_Store(&scavenging_visitors_table_mode_,
LOGGING_AND_PROFILING_ENABLED);
}
-#endif
}
@@ -5213,28 +5189,6 @@ void Heap::Shrink() {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void Heap::Protect() {
- if (HasBeenSetup()) {
- AllSpaces spaces;
- for (Space* space = spaces.next(); space != NULL; space = spaces.next())
- space->Protect();
- }
-}
-
-
-void Heap::Unprotect() {
- if (HasBeenSetup()) {
- AllSpaces spaces;
- for (Space* space = spaces.next(); space != NULL; space = spaces.next())
- space->Unprotect();
- }
-}
-
-#endif
-
-
void Heap::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) {
ASSERT(callback != NULL);
GCPrologueCallbackPair pair(callback, gc_type);
@@ -5930,9 +5884,7 @@ GCTracer::~GCTracer() {
PrintF("\n");
}
-#if defined(ENABLE_LOGGING_AND_PROFILING)
heap_->PrintShortHeapStatistics();
-#endif
}
diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h
index 5aba05d9e7..d90a681d43 100644
--- a/deps/v8/src/heap.h
+++ b/deps/v8/src/heap.h
@@ -409,12 +409,6 @@ class Heap {
// Uncommit unused semi space.
bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
-#ifdef ENABLE_HEAP_PROTECTION
- // Protect/unprotect the heap by marking all spaces read-only/writable.
- void Protect();
- void Unprotect();
-#endif
-
// Allocates and initializes a new JavaScript object based on a
// constructor.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
@@ -1052,10 +1046,8 @@ class Heap {
void ZapFromSpace();
#endif
-#if defined(ENABLE_LOGGING_AND_PROFILING)
// Print short heap statistics.
void PrintShortHeapStatistics();
-#endif
// Makes a new symbol object
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
@@ -1514,11 +1506,9 @@ class Heap {
// around a GC).
inline void CompletelyClearInstanceofCache();
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC();
-#endif
// Slow part of scavenge object.
static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
diff --git a/deps/v8/src/hydrogen-instructions.cc b/deps/v8/src/hydrogen-instructions.cc
index e28b9efd8f..50f2f6df6c 100644
--- a/deps/v8/src/hydrogen-instructions.cc
+++ b/deps/v8/src/hydrogen-instructions.cc
@@ -784,6 +784,21 @@ void HChange::PrintDataTo(StringStream* stream) {
}
+HValue* HCheckInstanceType::Canonicalize() {
+ if (check_ == IS_STRING &&
+ !value()->type().IsUninitialized() &&
+ value()->type().IsString()) {
+ return NULL;
+ }
+ if (check_ == IS_SYMBOL &&
+ value()->IsConstant() &&
+ HConstant::cast(value())->handle()->IsSymbol()) {
+ return NULL;
+ }
+ return this;
+}
+
+
void HCheckInstanceType::GetCheckInterval(InstanceType* first,
InstanceType* last) {
ASSERT(is_interval_check());
diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h
index cc32b93831..401c2e4a03 100644
--- a/deps/v8/src/hydrogen-instructions.h
+++ b/deps/v8/src/hydrogen-instructions.h
@@ -2003,14 +2003,7 @@ class HCheckInstanceType: public HUnaryOperation {
virtual void Verify();
#endif
- virtual HValue* Canonicalize() {
- if (!value()->type().IsUninitialized() &&
- value()->type().IsString() &&
- check_ == IS_STRING) {
- return NULL;
- }
- return this;
- }
+ virtual HValue* Canonicalize();
bool is_interval_check() const { return check_ <= LAST_INTERVAL_CHECK; }
void GetCheckInterval(InstanceType* first, InstanceType* last);
@@ -3362,8 +3355,9 @@ class HLoadContextSlot: public HUnaryOperation {
static inline bool StoringValueNeedsWriteBarrier(HValue* value) {
- return !value->type().IsSmi() &&
- !(value->IsConstant() && HConstant::cast(value)->InOldSpace());
+ return !value->type().IsBoolean()
+ && !value->type().IsSmi()
+ && !(value->IsConstant() && HConstant::cast(value)->InOldSpace());
}
diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc
index e3eb122bc6..48bd8b1a57 100644
--- a/deps/v8/src/ia32/code-stubs-ia32.cc
+++ b/deps/v8/src/ia32/code-stubs-ia32.cc
@@ -511,25 +511,17 @@ static void IntegerConvert(MacroAssembler* masm,
}
-const char* UnaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void UnaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name = NULL; // Make g++ happy.
switch (mode_) {
case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "UnaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- UnaryOpIC::GetName(operand_type_));
- return name_;
+ stream->Add("UnaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ UnaryOpIC::GetName(operand_type_));
}
@@ -914,12 +906,7 @@ void BinaryOpStub::Generate(MacroAssembler* masm) {
}
-const char* BinaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void BinaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name;
switch (mode_) {
@@ -928,13 +915,10 @@ const char* BinaryOpStub::GetName() {
case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
default: overwrite_name = "UnknownOverwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "BinaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- BinaryOpIC::GetName(operands_type_));
- return name_;
+ stream->Add("BinaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ BinaryOpIC::GetName(operands_type_));
}
@@ -4380,9 +4364,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
Label invoke, exit;
-#ifdef ENABLE_LOGGING_AND_PROFILING
Label not_outermost_js, not_outermost_js_2;
-#endif
// Setup frame.
__ push(ebp);
@@ -4401,7 +4383,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate());
__ push(Operand::StaticVariable(c_entry_fp));
-#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
masm->isolate());
@@ -4414,7 +4395,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ bind(&not_outermost_js);
__ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
__ bind(&cont);
-#endif
// Call a faked try-block that does the invoke.
__ call(&invoke);
@@ -4462,7 +4442,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ PopTryHandler();
__ bind(&exit);
-#ifdef ENABLE_LOGGING_AND_PROFILING
// Check if the current stack frame is marked as the outermost JS frame.
__ pop(ebx);
__ cmp(Operand(ebx),
@@ -4470,7 +4449,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ j(not_equal, &not_outermost_js_2);
__ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
__ bind(&not_outermost_js_2);
-#endif
// Restore the top frame descriptor from the stack.
__ pop(Operand::StaticVariable(ExternalReference(
@@ -4732,15 +4710,8 @@ int CompareStub::MinorKey() {
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
-const char* CompareStub::GetName() {
+void CompareStub::PrintName(StringStream* stream) {
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
-
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
-
const char* cc_name;
switch (cc_) {
case less: cc_name = "LT"; break;
@@ -4751,35 +4722,12 @@ const char* CompareStub::GetName() {
case not_equal: cc_name = "NE"; break;
default: cc_name = "UnknownCondition"; break;
}
-
- const char* strict_name = "";
- if (strict_ && (cc_ == equal || cc_ == not_equal)) {
- strict_name = "_STRICT";
- }
-
- const char* never_nan_nan_name = "";
- if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
- never_nan_nan_name = "_NO_NAN";
- }
-
- const char* include_number_compare_name = "";
- if (!include_number_compare_) {
- include_number_compare_name = "_NO_NUMBER";
- }
-
- const char* include_smi_compare_name = "";
- if (!include_smi_compare_) {
- include_smi_compare_name = "_NO_SMI";
- }
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "CompareStub_%s%s%s%s%s",
- cc_name,
- strict_name,
- never_nan_nan_name,
- include_number_compare_name,
- include_smi_compare_name);
- return name_;
+ bool is_equality = cc_ == equal || cc_ == not_equal;
+ stream->Add("CompareStub_%s", cc_name);
+ if (strict_ && is_equality) stream->Add("_STRICT");
+ if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
+ if (!include_number_compare_) stream->Add("_NO_NUMBER");
+ if (!include_smi_compare_) stream->Add("_NO_SMI");
}
diff --git a/deps/v8/src/ia32/code-stubs-ia32.h b/deps/v8/src/ia32/code-stubs-ia32.h
index d02aa01d7b..fa255da1fd 100644
--- a/deps/v8/src/ia32/code-stubs-ia32.h
+++ b/deps/v8/src/ia32/code-stubs-ia32.h
@@ -67,8 +67,7 @@ class UnaryOpStub: public CodeStub {
UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
: op_(op),
mode_(mode),
- operand_type_(operand_type),
- name_(NULL) {
+ operand_type_(operand_type) {
}
private:
@@ -78,19 +77,7 @@ class UnaryOpStub: public CodeStub {
// Operand type information determined at runtime.
UnaryOpIC::TypeInfo operand_type_;
- char* name_;
-
- const char* GetName();
-
-#ifdef DEBUG
- void Print() {
- PrintF("UnaryOpStub %d (op %s), (mode %d, runtime_type_info %s)\n",
- MinorKey(),
- Token::String(op_),
- static_cast<int>(mode_),
- UnaryOpIC::GetName(operand_type_));
- }
-#endif
+ virtual void PrintName(StringStream* stream);
class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
class OpBits: public BitField<Token::Value, 1, 7> {};
@@ -153,8 +140,7 @@ class BinaryOpStub: public CodeStub {
: op_(op),
mode_(mode),
operands_type_(BinaryOpIC::UNINITIALIZED),
- result_type_(BinaryOpIC::UNINITIALIZED),
- name_(NULL) {
+ result_type_(BinaryOpIC::UNINITIALIZED) {
use_sse3_ = CpuFeatures::IsSupported(SSE3);
ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
}
@@ -167,8 +153,7 @@ class BinaryOpStub: public CodeStub {
mode_(ModeBits::decode(key)),
use_sse3_(SSE3Bits::decode(key)),
operands_type_(operands_type),
- result_type_(result_type),
- name_(NULL) { }
+ result_type_(result_type) { }
private:
enum SmiCodeGenerateHeapNumberResults {
@@ -184,20 +169,7 @@ class BinaryOpStub: public CodeStub {
BinaryOpIC::TypeInfo operands_type_;
BinaryOpIC::TypeInfo result_type_;
- char* name_;
-
- const char* GetName();
-
-#ifdef DEBUG
- void Print() {
- PrintF("BinaryOpStub %d (op %s), "
- "(mode %d, runtime_type_info %s)\n",
- MinorKey(),
- Token::String(op_),
- static_cast<int>(mode_),
- BinaryOpIC::GetName(operands_type_));
- }
-#endif
+ virtual void PrintName(StringStream* stream);
// Minor key encoding in 16 bits RRRTTTSOOOOOOOMM.
class ModeBits: public BitField<OverwriteMode, 0, 2> {};
@@ -415,14 +387,6 @@ class NumberToStringStub: public CodeStub {
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "NumberToStringStub"; }
-
-#ifdef DEBUG
- void Print() {
- PrintF("NumberToStringStub\n");
- }
-#endif
};
@@ -466,13 +430,6 @@ class StringDictionaryLookupStub: public CodeStub {
StringDictionary::kHeaderSize +
StringDictionary::kElementsStartIndex * kPointerSize;
-
-#ifdef DEBUG
- void Print() {
- PrintF("StringDictionaryLookupStub\n");
- }
-#endif
-
Major MajorKey() { return StringDictionaryNegativeLookup; }
int MinorKey() {
diff --git a/deps/v8/src/ia32/codegen-ia32.h b/deps/v8/src/ia32/codegen-ia32.h
index 8f090b124e..c85fa83e9e 100644
--- a/deps/v8/src/ia32/codegen-ia32.h
+++ b/deps/v8/src/ia32/codegen-ia32.h
@@ -53,9 +53,7 @@ class CodeGenerator {
// Print the code after compiling it.
static void PrintCode(Handle<Code> code, CompilationInfo* info);
-#ifdef ENABLE_LOGGING_AND_PROFILING
static bool ShouldGenerateLog(Expression* type);
-#endif
static bool RecordPositions(MacroAssembler* masm,
int pos,
diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc
index 75cc4b8608..f9f63a70ed 100644
--- a/deps/v8/src/ia32/full-codegen-ia32.cc
+++ b/deps/v8/src/ia32/full-codegen-ia32.cc
@@ -744,7 +744,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
// IDs for bailouts from optimized code.
ASSERT(prop->obj()->AsVariableProxy() != NULL);
{ AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ EmitVariableLoad(prop->obj()->AsVariableProxy());
}
__ push(eax);
@@ -1064,7 +1064,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Comment cmnt(masm_, "[ VariableProxy");
- EmitVariableLoad(expr->var());
+ EmitVariableLoad(expr);
}
@@ -1214,7 +1214,11 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
}
-void FullCodeGenerator::EmitVariableLoad(Variable* var) {
+void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
+ // Record position before possible IC call.
+ SetSourcePosition(proxy->position());
+ Variable* var = proxy->var();
+
// Three cases: non-this global variables, lookup slots, and all other
// types of slots.
Slot* slot = var->AsSlot();
@@ -1540,7 +1544,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
{ AccumulatorValueContext context(this);
switch (assign_type) {
case VARIABLE:
- EmitVariableLoad(expr->target()->AsVariableProxy()->var());
+ EmitVariableLoad(expr->target()->AsVariableProxy());
PrepareForBailout(expr->target(), TOS_REG);
break;
case NAMED_PROPERTY:
@@ -1769,7 +1773,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
ASSERT(prop->obj()->AsVariableProxy() != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
{ AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ EmitVariableLoad(prop->obj()->AsVariableProxy());
}
__ mov(edx, eax);
__ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle()));
@@ -2701,13 +2705,11 @@ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
// with '%2s' (see Logger::LogRuntime for all the formats).
// 2 (array): Arguments to the format string.
ASSERT_EQ(args->length(), 3);
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallRuntime(Runtime::kLog, 2);
}
-#endif
// Finally, we're expected to leave a value on the top of the stack.
__ mov(eax, isolate()->factory()->undefined_value());
context()->Plug(eax);
@@ -3768,7 +3770,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
if (assign_type == VARIABLE) {
ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
AccumulatorValueContext context(this);
- EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
+ EmitVariableLoad(expr->expression()->AsVariableProxy());
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc
index be5910a124..5f143b104f 100644
--- a/deps/v8/src/ia32/ic-ia32.cc
+++ b/deps/v8/src/ia32/ic-ia32.cc
@@ -216,105 +216,6 @@ static void GenerateDictionaryStore(MacroAssembler* masm,
}
-static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
- Label* miss,
- Register elements,
- Register key,
- Register r0,
- Register r1,
- Register r2,
- Register result) {
- // Register use:
- //
- // elements - holds the slow-case elements of the receiver and is unchanged.
- //
- // key - holds the smi key on entry and is unchanged.
- //
- // Scratch registers:
- //
- // r0 - holds the untagged key on entry and holds the hash once computed.
- //
- // r1 - used to hold the capacity mask of the dictionary
- //
- // r2 - used for the index into the dictionary.
- //
- // result - holds the result on exit if the load succeeds and we fall through.
-
- Label done;
-
- // Compute the hash code from the untagged key. This must be kept in sync
- // with ComputeIntegerHash in utils.h.
- //
- // hash = ~hash + (hash << 15);
- __ mov(r1, r0);
- __ not_(r0);
- __ shl(r1, 15);
- __ add(r0, Operand(r1));
- // hash = hash ^ (hash >> 12);
- __ mov(r1, r0);
- __ shr(r1, 12);
- __ xor_(r0, Operand(r1));
- // hash = hash + (hash << 2);
- __ lea(r0, Operand(r0, r0, times_4, 0));
- // hash = hash ^ (hash >> 4);
- __ mov(r1, r0);
- __ shr(r1, 4);
- __ xor_(r0, Operand(r1));
- // hash = hash * 2057;
- __ imul(r0, r0, 2057);
- // hash = hash ^ (hash >> 16);
- __ mov(r1, r0);
- __ shr(r1, 16);
- __ xor_(r0, Operand(r1));
-
- // Compute capacity mask.
- __ mov(r1, FieldOperand(elements, NumberDictionary::kCapacityOffset));
- __ shr(r1, kSmiTagSize); // convert smi to int
- __ dec(r1);
-
- // Generate an unrolled loop that performs a few probes before giving up.
- const int kProbes = 4;
- for (int i = 0; i < kProbes; i++) {
- // Use r2 for index calculations and keep the hash intact in r0.
- __ mov(r2, r0);
- // Compute the masked index: (hash + i + i * i) & mask.
- if (i > 0) {
- __ add(Operand(r2), Immediate(NumberDictionary::GetProbeOffset(i)));
- }
- __ and_(r2, Operand(r1));
-
- // Scale the index by multiplying by the entry size.
- ASSERT(NumberDictionary::kEntrySize == 3);
- __ lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
-
- // Check if the key matches.
- __ cmp(key, FieldOperand(elements,
- r2,
- times_pointer_size,
- NumberDictionary::kElementsStartOffset));
- if (i != (kProbes - 1)) {
- __ j(equal, &done);
- } else {
- __ j(not_equal, miss);
- }
- }
-
- __ bind(&done);
- // Check that the value is a normal propety.
- const int kDetailsOffset =
- NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
- ASSERT_EQ(NORMAL, 0);
- __ test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
- Immediate(PropertyDetails::TypeField::mask() << kSmiTagSize));
- __ j(not_zero, miss);
-
- // Get the value at the masked, scaled index.
- const int kValueOffset =
- NumberDictionary::kElementsStartOffset + kPointerSize;
- __ mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
-}
-
-
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : receiver
@@ -591,14 +492,13 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// Push receiver on the stack to free up a register for the dictionary
// probing.
__ push(edx);
- GenerateNumberDictionaryLoad(masm,
- &slow_pop_receiver,
- ecx,
- eax,
- ebx,
- edx,
- edi,
- eax);
+ __ LoadFromNumberDictionary(&slow_pop_receiver,
+ ecx,
+ eax,
+ ebx,
+ edx,
+ edi,
+ eax);
// Pop receiver before returning.
__ pop(edx);
__ ret(0);
@@ -1200,8 +1100,8 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
__ SmiUntag(ebx);
// ebx: untagged index
// Receiver in edx will be clobbered, need to reload it on miss.
- GenerateNumberDictionaryLoad(
- masm, &slow_reload_receiver, eax, ecx, ebx, edx, edi, edi);
+ __ LoadFromNumberDictionary(
+ &slow_reload_receiver, eax, ecx, ebx, edx, edi, edi);
__ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
__ jmp(&do_call);
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc
index 2f1b88e789..6293718f62 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc
@@ -1345,6 +1345,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ __ nop(); // Signals no inlined code.
}
diff --git a/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc b/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc
index 9d91c61840..fcf1f91378 100644
--- a/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc
+++ b/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc
@@ -305,8 +305,13 @@ void LGapResolver::EmitMove(int index) {
} else if (source->IsConstantOperand()) {
ASSERT(destination->IsRegister() || destination->IsStackSlot());
Immediate src = cgen_->ToImmediate(source);
- Operand dst = cgen_->ToOperand(destination);
- __ mov(dst, src);
+ if (destination->IsRegister()) {
+ Register dst = cgen_->ToRegister(destination);
+ __ Set(dst, src);
+ } else {
+ Operand dst = cgen_->ToOperand(destination);
+ __ Set(dst, src);
+ }
} else if (source->IsDoubleRegister()) {
XMMRegister src = cgen_->ToDoubleRegister(source);
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc
index 020acded7d..136b24c981 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.cc
+++ b/deps/v8/src/ia32/macro-assembler-ia32.cc
@@ -734,6 +734,104 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
+void MacroAssembler::LoadFromNumberDictionary(Label* miss,
+ Register elements,
+ Register key,
+ Register r0,
+ Register r1,
+ Register r2,
+ Register result) {
+ // Register use:
+ //
+ // elements - holds the slow-case elements of the receiver and is unchanged.
+ //
+ // key - holds the smi key on entry and is unchanged.
+ //
+ // Scratch registers:
+ //
+ // r0 - holds the untagged key on entry and holds the hash once computed.
+ //
+ // r1 - used to hold the capacity mask of the dictionary
+ //
+ // r2 - used for the index into the dictionary.
+ //
+ // result - holds the result on exit if the load succeeds and we fall through.
+
+ Label done;
+
+ // Compute the hash code from the untagged key. This must be kept in sync
+ // with ComputeIntegerHash in utils.h.
+ //
+ // hash = ~hash + (hash << 15);
+ mov(r1, r0);
+ not_(r0);
+ shl(r1, 15);
+ add(r0, Operand(r1));
+ // hash = hash ^ (hash >> 12);
+ mov(r1, r0);
+ shr(r1, 12);
+ xor_(r0, Operand(r1));
+ // hash = hash + (hash << 2);
+ lea(r0, Operand(r0, r0, times_4, 0));
+ // hash = hash ^ (hash >> 4);
+ mov(r1, r0);
+ shr(r1, 4);
+ xor_(r0, Operand(r1));
+ // hash = hash * 2057;
+ imul(r0, r0, 2057);
+ // hash = hash ^ (hash >> 16);
+ mov(r1, r0);
+ shr(r1, 16);
+ xor_(r0, Operand(r1));
+
+ // Compute capacity mask.
+ mov(r1, FieldOperand(elements, NumberDictionary::kCapacityOffset));
+ shr(r1, kSmiTagSize); // convert smi to int
+ dec(r1);
+
+ // Generate an unrolled loop that performs a few probes before giving up.
+ const int kProbes = 4;
+ for (int i = 0; i < kProbes; i++) {
+ // Use r2 for index calculations and keep the hash intact in r0.
+ mov(r2, r0);
+ // Compute the masked index: (hash + i + i * i) & mask.
+ if (i > 0) {
+ add(Operand(r2), Immediate(NumberDictionary::GetProbeOffset(i)));
+ }
+ and_(r2, Operand(r1));
+
+ // Scale the index by multiplying by the entry size.
+ ASSERT(NumberDictionary::kEntrySize == 3);
+ lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
+
+ // Check if the key matches.
+ cmp(key, FieldOperand(elements,
+ r2,
+ times_pointer_size,
+ NumberDictionary::kElementsStartOffset));
+ if (i != (kProbes - 1)) {
+ j(equal, &done);
+ } else {
+ j(not_equal, miss);
+ }
+ }
+
+ bind(&done);
+ // Check that the value is a normal propety.
+ const int kDetailsOffset =
+ NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
+ ASSERT_EQ(NORMAL, 0);
+ test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
+ Immediate(PropertyDetails::TypeField::mask() << kSmiTagSize));
+ j(not_zero, miss);
+
+ // Get the value at the masked, scaled index.
+ const int kValueOffset =
+ NumberDictionary::kElementsStartOffset + kPointerSize;
+ mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
+}
+
+
void MacroAssembler::LoadAllocationTopHelper(Register result,
Register scratch,
AllocationFlags flags) {
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h
index 837c500e9a..dac22731a9 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.h
+++ b/deps/v8/src/ia32/macro-assembler-ia32.h
@@ -352,6 +352,15 @@ class MacroAssembler: public Assembler {
Label* miss);
+ void LoadFromNumberDictionary(Label* miss,
+ Register elements,
+ Register key,
+ Register r0,
+ Register r1,
+ Register r2,
+ Register result);
+
+
// ---------------------------------------------------------------------------
// Allocation support
diff --git a/deps/v8/src/ia32/regexp-macro-assembler-ia32.h b/deps/v8/src/ia32/regexp-macro-assembler-ia32.h
index 21c86d050a..d504470280 100644
--- a/deps/v8/src/ia32/regexp-macro-assembler-ia32.h
+++ b/deps/v8/src/ia32/regexp-macro-assembler-ia32.h
@@ -28,6 +28,9 @@
#ifndef V8_IA32_REGEXP_MACRO_ASSEMBLER_IA32_H_
#define V8_IA32_REGEXP_MACRO_ASSEMBLER_IA32_H_
+#include "ia32/assembler-ia32.h"
+#include "ia32/assembler-ia32-inl.h"
+
namespace v8 {
namespace internal {
diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc
index e53cc0839b..2660850889 100644
--- a/deps/v8/src/ia32/stub-cache-ia32.cc
+++ b/deps/v8/src/ia32/stub-cache-ia32.cc
@@ -2679,7 +2679,10 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
// -- esp[0] : return address
// -----------------------------------
Code* stub;
- MaybeObject* maybe_stub = ComputeSharedKeyedStoreElementStub(receiver_map);
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
+ bool is_jsarray = receiver_map->instance_type() == JS_ARRAY_TYPE;
+ MaybeObject* maybe_stub =
+ KeyedStoreElementStub(is_jsarray, elements_kind).TryGetCode();
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(edx,
Handle<Map>(receiver_map),
@@ -3137,7 +3140,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
// -- esp[0] : return address
// -----------------------------------
Code* stub;
- MaybeObject* maybe_stub = ComputeSharedKeyedLoadElementStub(receiver_map);
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
+ MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(edx,
Handle<Map>(receiver_map),
@@ -3321,6 +3325,64 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
#define __ ACCESS_MASM(masm)
+void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
+ MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- eax : key
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ Label slow, miss_force_generic;
+
+ // This stub is meant to be tail-jumped to, the receiver must already
+ // have been verified by the caller to not be a smi.
+ __ JumpIfNotSmi(eax, &miss_force_generic);
+ __ mov(ebx, eax);
+ __ SmiUntag(ebx);
+ __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
+
+ // Push receiver on the stack to free up a register for the dictionary
+ // probing.
+ __ push(edx);
+ __ LoadFromNumberDictionary(&slow,
+ ecx,
+ eax,
+ ebx,
+ edx,
+ edi,
+ eax);
+ // Pop receiver before returning.
+ __ pop(edx);
+ __ ret(0);
+
+ __ bind(&slow);
+ __ pop(edx);
+
+ // ----------- S t a t e -------------
+ // -- eax : value
+ // -- ecx : key
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+
+ Handle<Code> slow_ic =
+ masm->isolate()->builtins()->KeyedLoadIC_Slow();
+ __ jmp(slow_ic, RelocInfo::CODE_TARGET);
+
+ __ bind(&miss_force_generic);
+ // ----------- S t a t e -------------
+ // -- eax : value
+ // -- ecx : key
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+
+ Handle<Code> miss_force_generic_ic =
+ masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
+ __ jmp(miss_force_generic_ic, RelocInfo::CODE_TARGET);
+}
+
+
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
MacroAssembler* masm,
JSObject::ElementsKind elements_kind) {
@@ -3731,7 +3793,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
bool is_js_array) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- eax : value
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
diff --git a/deps/v8/src/ic.cc b/deps/v8/src/ic.cc
index eb0f12a394..f70f75a7f6 100644
--- a/deps/v8/src/ic.cc
+++ b/deps/v8/src/ic.cc
@@ -1097,15 +1097,10 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
}
-MaybeObject* KeyedLoadIC::GetFastElementStubWithoutMapCheck(
- bool is_js_array) {
- return KeyedLoadFastElementStub().TryGetCode();
-}
-
-
-MaybeObject* KeyedLoadIC::GetExternalArrayStubWithoutMapCheck(
+MaybeObject* KeyedLoadIC::GetElementStubWithoutMapCheck(
+ bool is_js_array,
JSObject::ElementsKind elements_kind) {
- return KeyedLoadExternalArrayStub(elements_kind).TryGetCode();
+ return KeyedLoadElementStub(elements_kind).TryGetCode();
}
@@ -1675,7 +1670,7 @@ MaybeObject* KeyedIC::ComputeStub(JSObject* receiver,
for (int i = 0; i < target_receiver_maps.length(); ++i) {
Map* receiver_map(target_receiver_maps.at(i));
MaybeObject* maybe_cached_stub = ComputeMonomorphicStubWithoutMapCheck(
- receiver_map, strict_mode, generic_stub);
+ receiver_map, strict_mode);
Code* cached_stub;
if (!maybe_cached_stub->To(&cached_stub)) return maybe_cached_stub;
handler_ics.Add(cached_stub);
@@ -1694,18 +1689,18 @@ MaybeObject* KeyedIC::ComputeStub(JSObject* receiver,
MaybeObject* KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
Map* receiver_map,
- StrictModeFlag strict_mode,
- Code* generic_stub) {
+ StrictModeFlag strict_mode) {
if ((receiver_map->instance_type() & kNotStringTag) == 0) {
ASSERT(string_stub() != NULL);
return string_stub();
- } else if (receiver_map->has_external_array_elements()) {
- return GetExternalArrayStubWithoutMapCheck(receiver_map->elements_kind());
- } else if (receiver_map->has_fast_elements()) {
- bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
- return GetFastElementStubWithoutMapCheck(is_js_array);
} else {
- return generic_stub;
+ ASSERT(receiver_map->has_dictionary_elements() ||
+ receiver_map->has_fast_elements() ||
+ receiver_map->has_fast_double_elements() ||
+ receiver_map->has_external_array_elements());
+ bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
+ return GetElementStubWithoutMapCheck(is_js_array,
+ receiver_map->elements_kind());
}
}
@@ -1717,6 +1712,7 @@ MaybeObject* KeyedIC::ComputeMonomorphicStub(JSObject* receiver,
Code* result = NULL;
if (receiver->HasFastElements() ||
receiver->HasExternalArrayElements() ||
+ receiver->HasFastDoubleElements() ||
receiver->HasDictionaryElements()) {
MaybeObject* maybe_stub =
isolate()->stub_cache()->ComputeKeyedLoadOrStoreElement(
@@ -1729,15 +1725,10 @@ MaybeObject* KeyedIC::ComputeMonomorphicStub(JSObject* receiver,
}
-MaybeObject* KeyedStoreIC::GetFastElementStubWithoutMapCheck(
- bool is_js_array) {
- return KeyedStoreFastElementStub(is_js_array).TryGetCode();
-}
-
-
-MaybeObject* KeyedStoreIC::GetExternalArrayStubWithoutMapCheck(
+MaybeObject* KeyedStoreIC::GetElementStubWithoutMapCheck(
+ bool is_js_array,
JSObject::ElementsKind elements_kind) {
- return KeyedStoreExternalArrayStub(elements_kind).TryGetCode();
+ return KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
}
diff --git a/deps/v8/src/ic.h b/deps/v8/src/ic.h
index 9a663ba6aa..11c2e3af45 100644
--- a/deps/v8/src/ic.h
+++ b/deps/v8/src/ic.h
@@ -345,10 +345,8 @@ class KeyedIC: public IC {
explicit KeyedIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {}
virtual ~KeyedIC() {}
- virtual MaybeObject* GetFastElementStubWithoutMapCheck(
- bool is_js_array) = 0;
-
- virtual MaybeObject* GetExternalArrayStubWithoutMapCheck(
+ virtual MaybeObject* GetElementStubWithoutMapCheck(
+ bool is_js_array,
JSObject::ElementsKind elements_kind) = 0;
protected:
@@ -373,8 +371,7 @@ class KeyedIC: public IC {
MaybeObject* ComputeMonomorphicStubWithoutMapCheck(
Map* receiver_map,
- StrictModeFlag strict_mode,
- Code* generic_stub);
+ StrictModeFlag strict_mode);
MaybeObject* ComputeMonomorphicStub(JSObject* receiver,
bool is_store,
@@ -415,10 +412,8 @@ class KeyedLoadIC: public KeyedIC {
static const int kSlowCaseBitFieldMask =
(1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor);
- virtual MaybeObject* GetFastElementStubWithoutMapCheck(
- bool is_js_array);
-
- virtual MaybeObject* GetExternalArrayStubWithoutMapCheck(
+ virtual MaybeObject* GetElementStubWithoutMapCheck(
+ bool is_js_array,
JSObject::ElementsKind elements_kind);
protected:
@@ -568,10 +563,8 @@ class KeyedStoreIC: public KeyedIC {
static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode);
static void GenerateNonStrictArguments(MacroAssembler* masm);
- virtual MaybeObject* GetFastElementStubWithoutMapCheck(
- bool is_js_array);
-
- virtual MaybeObject* GetExternalArrayStubWithoutMapCheck(
+ virtual MaybeObject* GetElementStubWithoutMapCheck(
+ bool is_js_array,
JSObject::ElementsKind elements_kind);
protected:
diff --git a/deps/v8/src/isolate.cc b/deps/v8/src/isolate.cc
index 7423274a1e..8a30e7924f 100644
--- a/deps/v8/src/isolate.cc
+++ b/deps/v8/src/isolate.cc
@@ -85,13 +85,9 @@ void ThreadLocalTop::InitializeInternal() {
#ifdef USE_SIMULATOR
simulator_ = NULL;
#endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
js_entry_sp_ = NULL;
external_callback_ = NULL;
-#endif
-#ifdef ENABLE_VMSTATE_TRACKING
current_vm_state_ = EXTERNAL;
-#endif
try_catch_handler_address_ = NULL;
context_ = NULL;
thread_id_ = ThreadId::Invalid();
@@ -1279,11 +1275,9 @@ Handle<Context> Isolate::GetCallingGlobalContext() {
char* Isolate::ArchiveThread(char* to) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (RuntimeProfiler::IsEnabled() && current_vm_state() == JS) {
RuntimeProfiler::IsolateExitedJS(this);
}
-#endif
memcpy(to, reinterpret_cast<char*>(thread_local_top()),
sizeof(ThreadLocalTop));
InitializeThreadLocal();
@@ -1303,12 +1297,10 @@ char* Isolate::RestoreThread(char* from) {
thread_local_top()->simulator_ = Simulator::current(this);
#endif
#endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (RuntimeProfiler::IsEnabled() && current_vm_state() == JS) {
RuntimeProfiler::IsolateEnteredJS(this);
}
ASSERT(context() == NULL || context()->IsContext());
-#endif
return from + sizeof(ThreadLocalTop);
}
@@ -1627,7 +1619,6 @@ bool Isolate::PreInit() {
#define C(name) isolate_addresses_[Isolate::k_##name] = \
reinterpret_cast<Address>(name());
ISOLATE_ADDRESS_LIST(C)
- ISOLATE_ADDRESS_LIST_PROF(C)
#undef C
string_tracker_ = new StringTracker();
diff --git a/deps/v8/src/isolate.h b/deps/v8/src/isolate.h
index a4af1362a3..f2281aa418 100644
--- a/deps/v8/src/isolate.h
+++ b/deps/v8/src/isolate.h
@@ -125,14 +125,8 @@ typedef ZoneList<Handle<Object> > ZoneObjectList;
C(c_entry_fp_address) \
C(context_address) \
C(pending_exception_address) \
- C(external_caught_exception_address)
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
-#define ISOLATE_ADDRESS_LIST_PROF(C) \
+ C(external_caught_exception_address) \
C(js_entry_sp_address)
-#else
-#define ISOLATE_ADDRESS_LIST_PROF(C)
-#endif
// Platform-independent, reliable thread identifier.
@@ -252,14 +246,9 @@ class ThreadLocalTop BASE_EMBEDDED {
#endif
#endif // USE_SIMULATOR
-#ifdef ENABLE_LOGGING_AND_PROFILING
Address js_entry_sp_; // the stack pointer of the bottom js entry frame
Address external_callback_; // the external callback we're currently in
-#endif
-
-#ifdef ENABLE_VMSTATE_TRACKING
StateTag current_vm_state_;
-#endif
// Generated code scratch locations.
int32_t formal_count_;
@@ -313,18 +302,6 @@ class HashMap;
#endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
-#define ISOLATE_LOGGING_INIT_LIST(V) \
- V(CpuProfiler*, cpu_profiler, NULL) \
- V(HeapProfiler*, heap_profiler, NULL)
-
-#else
-
-#define ISOLATE_LOGGING_INIT_LIST(V)
-
-#endif
-
#define ISOLATE_INIT_ARRAY_LIST(V) \
/* SerializerDeserializer state. */ \
V(Object*, serialize_partial_snapshot_cache, kPartialSnapshotCacheCapacity) \
@@ -373,8 +350,9 @@ typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
/* SafeStackFrameIterator activations count. */ \
V(int, safe_stack_iterator_counter, 0) \
V(uint64_t, enabled_cpu_features, 0) \
+ V(CpuProfiler*, cpu_profiler, NULL) \
+ V(HeapProfiler*, heap_profiler, NULL) \
ISOLATE_PLATFORM_INIT_LIST(V) \
- ISOLATE_LOGGING_INIT_LIST(V) \
ISOLATE_DEBUGGER_INIT_LIST(V)
class Isolate {
@@ -445,7 +423,6 @@ class Isolate {
enum AddressId {
#define C(name) k_##name,
ISOLATE_ADDRESS_LIST(C)
- ISOLATE_ADDRESS_LIST_PROF(C)
#undef C
k_isolate_address_count
};
@@ -620,7 +597,6 @@ class Isolate {
}
inline Address* handler_address() { return &thread_local_top_.handler_; }
-#ifdef ENABLE_LOGGING_AND_PROFILING
// Bottom JS entry (see StackTracer::Trace in log.cc).
static Address js_entry_sp(ThreadLocalTop* thread) {
return thread->js_entry_sp_;
@@ -628,7 +604,6 @@ class Isolate {
inline Address* js_entry_sp_address() {
return &thread_local_top_.js_entry_sp_;
}
-#endif
// Generated code scratch locations.
void* formal_count_address() { return &thread_local_top_.formal_count_; }
@@ -945,16 +920,13 @@ class Isolate {
static const int kJSRegexpStaticOffsetsVectorSize = 50;
-#ifdef ENABLE_LOGGING_AND_PROFILING
Address external_callback() {
return thread_local_top_.external_callback_;
}
void set_external_callback(Address callback) {
thread_local_top_.external_callback_ = callback;
}
-#endif
-#ifdef ENABLE_VMSTATE_TRACKING
StateTag current_vm_state() {
return thread_local_top_.current_vm_state_;
}
@@ -980,7 +952,6 @@ class Isolate {
}
thread_local_top_.current_vm_state_ = state;
}
-#endif
void SetData(void* data) { embedder_data_ = data; }
void* GetData() { return embedder_data_; }
@@ -1356,10 +1327,4 @@ inline void Context::mark_out_of_memory() {
} } // namespace v8::internal
-// TODO(isolates): Get rid of these -inl.h includes and place them only where
-// they're needed.
-#include "allocation-inl.h"
-#include "zone-inl.h"
-#include "frames-inl.h"
-
#endif // V8_ISOLATE_H_
diff --git a/deps/v8/src/jsregexp.cc b/deps/v8/src/jsregexp.cc
index 3b521f648f..45a39ffbc2 100644
--- a/deps/v8/src/jsregexp.cc
+++ b/deps/v8/src/jsregexp.cc
@@ -491,6 +491,7 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
ASSERT(output.length() >= (IrregexpNumberOfCaptures(*irregexp) + 1) * 2);
do {
bool is_ascii = subject->IsAsciiRepresentation();
+ EnsureCompiledIrregexp(regexp, is_ascii);
Handle<Code> code(IrregexpNativeCode(*irregexp, is_ascii), isolate);
NativeRegExpMacroAssembler::Result res =
NativeRegExpMacroAssembler::Match(code,
diff --git a/deps/v8/src/jsregexp.h b/deps/v8/src/jsregexp.h
index 58958d8513..13f9e2ea06 100644
--- a/deps/v8/src/jsregexp.h
+++ b/deps/v8/src/jsregexp.h
@@ -29,7 +29,6 @@
#define V8_JSREGEXP_H_
#include "allocation.h"
-#include "macro-assembler.h"
#include "zone-inl.h"
namespace v8 {
diff --git a/deps/v8/src/lithium-allocator.cc b/deps/v8/src/lithium-allocator.cc
index dcdc5d9b7d..466110678a 100644
--- a/deps/v8/src/lithium-allocator.cc
+++ b/deps/v8/src/lithium-allocator.cc
@@ -1024,7 +1024,7 @@ void LAllocator::ResolvePhis(HBasicBlock* block) {
operand = chunk_->DefineConstantOperand(constant);
} else {
ASSERT(!op->EmitAtUses());
- LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
+ LUnallocated* unalloc = new LUnallocated(LUnallocated::ANY);
unalloc->set_virtual_register(op->id());
operand = unalloc;
}
diff --git a/deps/v8/src/log-inl.h b/deps/v8/src/log-inl.h
index 02238fe921..8aebbc7dde 100644
--- a/deps/v8/src/log-inl.h
+++ b/deps/v8/src/log-inl.h
@@ -34,8 +34,6 @@
namespace v8 {
namespace internal {
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
Logger::LogEventsAndTags Logger::ToNativeByScript(Logger::LogEventsAndTags tag,
Script* script) {
if ((tag == FUNCTION_TAG || tag == LAZY_COMPILE_TAG || tag == SCRIPT_TAG)
@@ -51,8 +49,6 @@ Logger::LogEventsAndTags Logger::ToNativeByScript(Logger::LogEventsAndTags tag,
}
}
-#endif // ENABLE_LOGGING_AND_PROFILING
-
} } // namespace v8::internal
diff --git a/deps/v8/src/log-utils.cc b/deps/v8/src/log-utils.cc
index 1bba7cd54e..2d1ce23dc0 100644
--- a/deps/v8/src/log-utils.cc
+++ b/deps/v8/src/log-utils.cc
@@ -33,101 +33,14 @@
namespace v8 {
namespace internal {
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
-LogDynamicBuffer::LogDynamicBuffer(
- int block_size, int max_size, const char* seal, int seal_size)
- : block_size_(block_size),
- max_size_(max_size - (max_size % block_size_)),
- seal_(seal),
- seal_size_(seal_size),
- blocks_(max_size_ / block_size_ + 1),
- write_pos_(0), block_index_(0), block_write_pos_(0), is_sealed_(false) {
- ASSERT(BlocksCount() > 0);
- AllocateBlock(0);
- for (int i = 1; i < BlocksCount(); ++i) {
- blocks_[i] = NULL;
- }
-}
-
-LogDynamicBuffer::~LogDynamicBuffer() {
- for (int i = 0; i < BlocksCount(); ++i) {
- DeleteArray(blocks_[i]);
- }
-}
+const char* Log::kLogToTemporaryFile = "&";
-int LogDynamicBuffer::Read(int from_pos, char* dest_buf, int buf_size) {
- if (buf_size == 0) return 0;
- int read_pos = from_pos;
- int block_read_index = BlockIndex(from_pos);
- int block_read_pos = PosInBlock(from_pos);
- int dest_buf_pos = 0;
- // Read until dest_buf is filled, or write_pos_ encountered.
- while (read_pos < write_pos_ && dest_buf_pos < buf_size) {
- const int read_size = Min(write_pos_ - read_pos,
- Min(buf_size - dest_buf_pos, block_size_ - block_read_pos));
- memcpy(dest_buf + dest_buf_pos,
- blocks_[block_read_index] + block_read_pos, read_size);
- block_read_pos += read_size;
- dest_buf_pos += read_size;
- read_pos += read_size;
- if (block_read_pos == block_size_) {
- block_read_pos = 0;
- ++block_read_index;
- }
- }
- return dest_buf_pos;
-}
-
-
-int LogDynamicBuffer::Seal() {
- WriteInternal(seal_, seal_size_);
- is_sealed_ = true;
- return 0;
-}
-
-
-int LogDynamicBuffer::Write(const char* data, int data_size) {
- if (is_sealed_) {
- return 0;
- }
- if ((write_pos_ + data_size) <= (max_size_ - seal_size_)) {
- return WriteInternal(data, data_size);
- } else {
- return Seal();
- }
-}
-
-
-int LogDynamicBuffer::WriteInternal(const char* data, int data_size) {
- int data_pos = 0;
- while (data_pos < data_size) {
- const int write_size =
- Min(data_size - data_pos, block_size_ - block_write_pos_);
- memcpy(blocks_[block_index_] + block_write_pos_, data + data_pos,
- write_size);
- block_write_pos_ += write_size;
- data_pos += write_size;
- if (block_write_pos_ == block_size_) {
- block_write_pos_ = 0;
- AllocateBlock(++block_index_);
- }
- }
- write_pos_ += data_size;
- return data_size;
-}
-
-// Must be the same message as in Logger::PauseProfiler.
-const char* const Log::kDynamicBufferSeal = "profiler,\"pause\"\n";
-
Log::Log(Logger* logger)
- : write_to_file_(false),
- is_stopped_(false),
+ : is_stopped_(false),
output_handle_(NULL),
ll_output_handle_(NULL),
- output_buffer_(NULL),
mutex_(NULL),
message_buffer_(NULL),
logger_(logger) {
@@ -142,7 +55,6 @@ static void AddIsolateIdIfNeeded(StringStream* stream) {
void Log::Initialize() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
mutex_ = OS::CreateMutex();
message_buffer_ = NewArray<char>(kMessageBufferSize);
@@ -166,19 +78,19 @@ void Log::Initialize() {
FLAG_prof_auto = false;
}
- bool start_logging = FLAG_log || FLAG_log_runtime || FLAG_log_api
+ bool open_log_file = FLAG_log || FLAG_log_runtime || FLAG_log_api
|| FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect
|| FLAG_log_regexp || FLAG_log_state_changes || FLAG_ll_prof;
- bool open_log_file = start_logging || FLAG_prof_lazy;
-
// If we're logging anything, we need to open the log file.
if (open_log_file) {
if (strcmp(FLAG_logfile, "-") == 0) {
OpenStdout();
} else if (strcmp(FLAG_logfile, "*") == 0) {
- OpenMemoryBuffer();
- } else {
+ // Does nothing for now. Will be removed.
+ } else if (strcmp(FLAG_logfile, kLogToTemporaryFile) == 0) {
+ OpenTemporaryFile();
+ } else {
if (strchr(FLAG_logfile, '%') != NULL ||
!Isolate::Current()->IsDefaultIsolate()) {
// If there's a '%' in the log file name we have to expand
@@ -222,14 +134,18 @@ void Log::Initialize() {
}
}
}
-#endif
}
void Log::OpenStdout() {
ASSERT(!IsEnabled());
output_handle_ = stdout;
- write_to_file_ = true;
+}
+
+
+void Log::OpenTemporaryFile() {
+ ASSERT(!IsEnabled());
+ output_handle_ = i::OS::OpenTemporaryFile();
}
@@ -244,7 +160,6 @@ static const int kLowLevelLogBufferSize = 2 * MB;
void Log::OpenFile(const char* name) {
ASSERT(!IsEnabled());
output_handle_ = OS::FOpen(name, OS::LogFileOpenMode);
- write_to_file_ = true;
if (FLAG_ll_prof) {
// Open the low-level log file.
size_t len = strlen(name);
@@ -257,25 +172,18 @@ void Log::OpenFile(const char* name) {
}
-void Log::OpenMemoryBuffer() {
- ASSERT(!IsEnabled());
- output_buffer_ = new LogDynamicBuffer(
- kDynamicBufferBlockSize, kMaxDynamicBufferSize,
- kDynamicBufferSeal, StrLength(kDynamicBufferSeal));
- write_to_file_ = false;
-}
-
-
-void Log::Close() {
- if (write_to_file_) {
- if (output_handle_ != NULL) fclose(output_handle_);
- output_handle_ = NULL;
- if (ll_output_handle_ != NULL) fclose(ll_output_handle_);
- ll_output_handle_ = NULL;
- } else {
- delete output_buffer_;
- output_buffer_ = NULL;
+FILE* Log::Close() {
+ FILE* result = NULL;
+ if (output_handle_ != NULL) {
+ if (strcmp(FLAG_logfile, kLogToTemporaryFile) != 0) {
+ fclose(output_handle_);
+ } else {
+ result = output_handle_;
+ }
}
+ output_handle_ = NULL;
+ if (ll_output_handle_ != NULL) fclose(ll_output_handle_);
+ ll_output_handle_ = NULL;
DeleteArray(message_buffer_);
message_buffer_ = NULL;
@@ -284,27 +192,7 @@ void Log::Close() {
mutex_ = NULL;
is_stopped_ = false;
-}
-
-
-int Log::GetLogLines(int from_pos, char* dest_buf, int max_size) {
- if (write_to_file_) return 0;
- ASSERT(output_buffer_ != NULL);
- ASSERT(from_pos >= 0);
- ASSERT(max_size >= 0);
- int actual_size = output_buffer_->Read(from_pos, dest_buf, max_size);
- ASSERT(actual_size <= max_size);
- if (actual_size == 0) return 0;
-
- // Find previous log line boundary.
- char* end_pos = dest_buf + actual_size - 1;
- while (end_pos >= dest_buf && *end_pos != '\n') --end_pos;
- actual_size = static_cast<int>(end_pos - dest_buf + 1);
- // If the assertion below is hit, it means that there was no line end
- // found --- something wrong has happened.
- ASSERT(actual_size > 0);
- ASSERT(actual_size <= max_size);
- return actual_size;
+ return result;
}
@@ -413,9 +301,7 @@ void LogMessageBuilder::AppendStringPart(const char* str, int len) {
void LogMessageBuilder::WriteToLogFile() {
ASSERT(pos_ <= Log::kMessageBufferSize);
- const int written = log_->write_to_file_ ?
- log_->WriteToFile(log_->message_buffer_, pos_) :
- log_->WriteToMemory(log_->message_buffer_, pos_);
+ const int written = log_->WriteToFile(log_->message_buffer_, pos_);
if (written != pos_) {
log_->stop();
log_->logger_->LogFailure();
@@ -423,6 +309,4 @@ void LogMessageBuilder::WriteToLogFile() {
}
-#endif // ENABLE_LOGGING_AND_PROFILING
-
} } // namespace v8::internal
diff --git a/deps/v8/src/log-utils.h b/deps/v8/src/log-utils.h
index 81bbf779f3..d336d714b9 100644
--- a/deps/v8/src/log-utils.h
+++ b/deps/v8/src/log-utils.h
@@ -33,69 +33,11 @@
namespace v8 {
namespace internal {
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
class Logger;
-// A memory buffer that increments its size as you write in it. Size
-// is incremented with 'block_size' steps, never exceeding 'max_size'.
-// During growth, memory contents are never copied. At the end of the
-// buffer an amount of memory specified in 'seal_size' is reserved.
-// When writing position reaches max_size - seal_size, buffer auto-seals
-// itself with 'seal' and allows no further writes. Data pointed by
-// 'seal' must be available during entire LogDynamicBuffer lifetime.
-//
-// An instance of this class is created dynamically by Log.
-class LogDynamicBuffer {
- public:
- LogDynamicBuffer(
- int block_size, int max_size, const char* seal, int seal_size);
-
- ~LogDynamicBuffer();
-
- // Reads contents of the buffer starting from 'from_pos'. Upon
- // return, 'dest_buf' is filled with the data. Actual amount of data
- // filled is returned, it is <= 'buf_size'.
- int Read(int from_pos, char* dest_buf, int buf_size);
-
- // Writes 'data' to the buffer, making it larger if necessary. If
- // data is too big to fit in the buffer, it doesn't get written at
- // all. In that case, buffer auto-seals itself and stops to accept
- // any incoming writes. Returns amount of data written (it is either
- // 'data_size', or 0, if 'data' is too big).
- int Write(const char* data, int data_size);
-
- private:
- void AllocateBlock(int index) {
- blocks_[index] = NewArray<char>(block_size_);
- }
-
- int BlockIndex(int pos) const { return pos / block_size_; }
-
- int BlocksCount() const { return BlockIndex(max_size_) + 1; }
-
- int PosInBlock(int pos) const { return pos % block_size_; }
-
- int Seal();
-
- int WriteInternal(const char* data, int data_size);
-
- const int block_size_;
- const int max_size_;
- const char* seal_;
- const int seal_size_;
- ScopedVector<char*> blocks_;
- int write_pos_;
- int block_index_;
- int block_write_pos_;
- bool is_sealed_;
-};
-
-
// Functions and data for performing output of log messages.
class Log {
public:
-
// Performs process-wide initialization.
void Initialize();
@@ -103,18 +45,21 @@ class Log {
void stop() { is_stopped_ = true; }
// Frees all resources acquired in Initialize and Open... functions.
- void Close();
-
- // See description in include/v8.h.
- int GetLogLines(int from_pos, char* dest_buf, int max_size);
+ // When a temporary file is used for the log, returns its stream descriptor,
+ // leaving the file open.
+ FILE* Close();
// Returns whether logging is enabled.
bool IsEnabled() {
- return !is_stopped_ && (output_handle_ != NULL || output_buffer_ != NULL);
+ return !is_stopped_ && output_handle_ != NULL;
}
// Size of buffer used for formatting log messages.
- static const int kMessageBufferSize = v8::V8::kMinimumSizeForLogLinesBuffer;
+ static const int kMessageBufferSize = 2048;
+
+ // This mode is only used in tests, as temporary files are automatically
+ // deleted on close and thus can't be accessed afterwards.
+ static const char* kLogToTemporaryFile;
private:
explicit Log(Logger* logger);
@@ -125,8 +70,8 @@ class Log {
// Opens file for logging.
void OpenFile(const char* name);
- // Opens memory buffer for logging.
- void OpenMemoryBuffer();
+ // Opens a temporary file for logging.
+ void OpenTemporaryFile();
// Implementation of writing to a log file.
int WriteToFile(const char* msg, int length) {
@@ -138,38 +83,16 @@ class Log {
return length;
}
- // Implementation of writing to a memory buffer.
- int WriteToMemory(const char* msg, int length) {
- ASSERT(output_buffer_ != NULL);
- return output_buffer_->Write(msg, length);
- }
-
- bool write_to_file_;
-
// Whether logging is stopped (e.g. due to insufficient resources).
bool is_stopped_;
- // When logging is active, either output_handle_ or output_buffer_ is used
- // to store a pointer to log destination. If logging was opened via OpenStdout
- // or OpenFile, then output_handle_ is used. If logging was opened
- // via OpenMemoryBuffer, then output_buffer_ is used.
- // mutex_ should be acquired before using output_handle_ or output_buffer_.
+ // When logging is active output_handle_ is used to store a pointer to log
+ // destination. mutex_ should be acquired before using output_handle_.
FILE* output_handle_;
// Used when low-level profiling is active.
FILE* ll_output_handle_;
- LogDynamicBuffer* output_buffer_;
-
- // Size of dynamic buffer block (and dynamic buffer initial size).
- static const int kDynamicBufferBlockSize = 65536;
-
- // Maximum size of dynamic buffer.
- static const int kMaxDynamicBufferSize = 50 * 1024 * 1024;
-
- // Message to "seal" dynamic buffer with.
- static const char* const kDynamicBufferSeal;
-
// mutex_ is a Mutex used for enforcing exclusive
// access to the formatting buffer and the log file or log memory buffer.
Mutex* mutex_;
@@ -224,8 +147,6 @@ class LogMessageBuilder BASE_EMBEDDED {
int pos_;
};
-#endif // ENABLE_LOGGING_AND_PROFILING
-
} } // namespace v8::internal
#endif // V8_LOG_UTILS_H_
diff --git a/deps/v8/src/log.cc b/deps/v8/src/log.cc
index 004e21a650..04fd22ef5c 100644
--- a/deps/v8/src/log.cc
+++ b/deps/v8/src/log.cc
@@ -43,8 +43,6 @@
namespace v8 {
namespace internal {
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
//
// Sliding state window. Updates counters to keep track of the last
// window of kBufferSize states. This is useful to track where we
@@ -554,71 +552,54 @@ void Logger::ProfilerBeginEvent() {
msg.WriteToLogFile();
}
-#endif // ENABLE_LOGGING_AND_PROFILING
-
void Logger::StringEvent(const char* name, const char* value) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log) UncheckedStringEvent(name, value);
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::UncheckedStringEvent(const char* name, const char* value) {
if (!log_->IsEnabled()) return;
LogMessageBuilder msg(this);
msg.Append("%s,\"%s\"\n", name, value);
msg.WriteToLogFile();
}
-#endif
void Logger::IntEvent(const char* name, int value) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log) UncheckedIntEvent(name, value);
-#endif
}
void Logger::IntPtrTEvent(const char* name, intptr_t value) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log) UncheckedIntPtrTEvent(name, value);
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::UncheckedIntEvent(const char* name, int value) {
if (!log_->IsEnabled()) return;
LogMessageBuilder msg(this);
msg.Append("%s,%d\n", name, value);
msg.WriteToLogFile();
}
-#endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::UncheckedIntPtrTEvent(const char* name, intptr_t value) {
if (!log_->IsEnabled()) return;
LogMessageBuilder msg(this);
msg.Append("%s,%" V8_PTR_PREFIX "d\n", name, value);
msg.WriteToLogFile();
}
-#endif
void Logger::HandleEvent(const char* name, Object** location) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_handles) return;
LogMessageBuilder msg(this);
msg.Append("%s,0x%" V8PRIxPTR "\n", name, location);
msg.WriteToLogFile();
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
// ApiEvent is private so all the calls come from the Logger class. It is the
// caller's responsibility to ensure that log is enabled and that
// FLAG_log_api is true.
@@ -631,11 +612,9 @@ void Logger::ApiEvent(const char* format, ...) {
va_end(ap);
msg.WriteToLogFile();
}
-#endif
void Logger::ApiNamedSecurityCheck(Object* key) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_api) return;
if (key->IsString()) {
SmartPointer<char> str =
@@ -646,14 +625,12 @@ void Logger::ApiNamedSecurityCheck(Object* key) {
} else {
ApiEvent("api,check-security,['no-name']\n");
}
-#endif
}
void Logger::SharedLibraryEvent(const char* library_path,
uintptr_t start,
uintptr_t end) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_prof) return;
LogMessageBuilder msg(this);
msg.Append("shared-library,\"%s\",0x%08" V8PRIxPTR ",0x%08" V8PRIxPTR "\n",
@@ -661,14 +638,12 @@ void Logger::SharedLibraryEvent(const char* library_path,
start,
end);
msg.WriteToLogFile();
-#endif
}
void Logger::SharedLibraryEvent(const wchar_t* library_path,
uintptr_t start,
uintptr_t end) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_prof) return;
LogMessageBuilder msg(this);
msg.Append("shared-library,\"%ls\",0x%08" V8PRIxPTR ",0x%08" V8PRIxPTR "\n",
@@ -676,11 +651,9 @@ void Logger::SharedLibraryEvent(const wchar_t* library_path,
start,
end);
msg.WriteToLogFile();
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::LogRegExpSource(Handle<JSRegExp> regexp) {
// Prints "/" + re.source + "/" +
// (re.global?"g":"") + (re.ignorecase?"i":"") + (re.multiline?"m":"")
@@ -721,23 +694,19 @@ void Logger::LogRegExpSource(Handle<JSRegExp> regexp) {
msg.WriteToLogFile();
}
-#endif // ENABLE_LOGGING_AND_PROFILING
void Logger::RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_regexp) return;
LogMessageBuilder msg(this);
msg.Append("regexp-compile,");
LogRegExpSource(regexp);
msg.Append(in_cache ? ",hit\n" : ",miss\n");
msg.WriteToLogFile();
-#endif
}
void Logger::LogRuntime(Vector<const char> format, JSArray* args) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_runtime) return;
HandleScope scope;
LogMessageBuilder msg(this);
@@ -778,22 +747,18 @@ void Logger::LogRuntime(Vector<const char> format, JSArray* args) {
}
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
void Logger::ApiIndexedSecurityCheck(uint32_t index) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_api) return;
ApiEvent("api,check-security,%u\n", index);
-#endif
}
void Logger::ApiNamedPropertyAccess(const char* tag,
JSObject* holder,
Object* name) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
ASSERT(name->IsString());
if (!log_->IsEnabled() || !FLAG_log_api) return;
String* class_name_obj = holder->class_name();
@@ -802,58 +767,47 @@ void Logger::ApiNamedPropertyAccess(const char* tag,
SmartPointer<char> property_name =
String::cast(name)->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
ApiEvent("api,%s,\"%s\",\"%s\"\n", tag, *class_name, *property_name);
-#endif
}
void Logger::ApiIndexedPropertyAccess(const char* tag,
JSObject* holder,
uint32_t index) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_api) return;
String* class_name_obj = holder->class_name();
SmartPointer<char> class_name =
class_name_obj->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
ApiEvent("api,%s,\"%s\",%u\n", tag, *class_name, index);
-#endif
}
void Logger::ApiObjectAccess(const char* tag, JSObject* object) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_api) return;
String* class_name_obj = object->class_name();
SmartPointer<char> class_name =
class_name_obj->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
ApiEvent("api,%s,\"%s\"\n", tag, *class_name);
-#endif
}
void Logger::ApiEntryCall(const char* name) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_api) return;
ApiEvent("api,%s\n", name);
-#endif
}
void Logger::NewEvent(const char* name, void* object, size_t size) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log) return;
LogMessageBuilder msg(this);
msg.Append("new,%s,0x%" V8PRIxPTR ",%u\n", name, object,
static_cast<unsigned int>(size));
msg.WriteToLogFile();
-#endif
}
void Logger::DeleteEvent(const char* name, void* object) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log) return;
LogMessageBuilder msg(this);
msg.Append("delete,%s,0x%" V8PRIxPTR "\n", name, object);
msg.WriteToLogFile();
-#endif
}
@@ -866,7 +820,6 @@ void Logger::DeleteEventStatic(const char* name, void* object) {
LOGGER->DeleteEvent(name, object);
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::CallbackEventInternal(const char* prefix, const char* name,
Address entry_point) {
if (!log_->IsEnabled() || !FLAG_log_code) return;
@@ -879,43 +832,35 @@ void Logger::CallbackEventInternal(const char* prefix, const char* name,
msg.Append('\n');
msg.WriteToLogFile();
}
-#endif
void Logger::CallbackEvent(String* name, Address entry_point) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_code) return;
SmartPointer<char> str =
name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
CallbackEventInternal("", *str, entry_point);
-#endif
}
void Logger::GetterCallbackEvent(String* name, Address entry_point) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_code) return;
SmartPointer<char> str =
name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
CallbackEventInternal("get ", *str, entry_point);
-#endif
}
void Logger::SetterCallbackEvent(String* name, Address entry_point) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_code) return;
SmartPointer<char> str =
name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
CallbackEventInternal("set ", *str, entry_point);
-#endif
}
void Logger::CodeCreateEvent(LogEventsAndTags tag,
Code* code,
const char* comment) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof || Serializer::enabled()) {
name_buffer_->Reset();
@@ -945,14 +890,12 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
msg.Append('"');
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
void Logger::CodeCreateEvent(LogEventsAndTags tag,
Code* code,
String* name) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof || Serializer::enabled()) {
name_buffer_->Reset();
@@ -977,11 +920,9 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
msg.Append('"');
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
// ComputeMarker must only be used when SharedFunctionInfo is known.
static const char* ComputeMarker(Code* code) {
switch (code->kind()) {
@@ -990,14 +931,12 @@ static const char* ComputeMarker(Code* code) {
default: return "";
}
}
-#endif
void Logger::CodeCreateEvent(LogEventsAndTags tag,
Code* code,
SharedFunctionInfo* shared,
String* name) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof || Serializer::enabled()) {
name_buffer_->Reset();
@@ -1029,7 +968,6 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
msg.Append(",%s", ComputeMarker(code));
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
@@ -1040,7 +978,6 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
Code* code,
SharedFunctionInfo* shared,
String* source, int line) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof || Serializer::enabled()) {
name_buffer_->Reset();
@@ -1078,12 +1015,10 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
msg.Append(",%s", ComputeMarker(code));
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof || Serializer::enabled()) {
name_buffer_->Reset();
@@ -1106,21 +1041,17 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count) {
msg.Append(",%d,\"args_count: %d\"", code->ExecutableSize(), args_count);
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
void Logger::CodeMovingGCEvent() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_ll_prof) return;
LowLevelLogWriteBytes(&kCodeMovingGCTag, sizeof(kCodeMovingGCTag));
OS::SignalCodeMovingGC();
-#endif
}
void Logger::RegExpCodeCreateEvent(Code* code, String* source) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof || Serializer::enabled()) {
name_buffer_->Reset();
@@ -1145,36 +1076,30 @@ void Logger::RegExpCodeCreateEvent(Code* code, String* source) {
msg.Append('\"');
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
void Logger::CodeMoveEvent(Address from, Address to) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) LowLevelCodeMoveEvent(from, to);
if (Serializer::enabled() && address_to_name_map_ != NULL) {
address_to_name_map_->Move(from, to);
}
MoveEventInternal(CODE_MOVE_EVENT, from, to);
-#endif
}
void Logger::CodeDeleteEvent(Address from) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) LowLevelCodeDeleteEvent(from);
if (Serializer::enabled() && address_to_name_map_ != NULL) {
address_to_name_map_->Remove(from);
}
DeleteEventInternal(CODE_DELETE_EVENT, from);
-#endif
}
void Logger::SnapshotPositionEvent(Address addr, int pos) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) LowLevelSnapshotPositionEvent(addr, pos);
if (Serializer::enabled() && address_to_name_map_ != NULL) {
@@ -1196,18 +1121,14 @@ void Logger::SnapshotPositionEvent(Address addr, int pos) {
msg.Append(",%d", pos);
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
void Logger::SharedFunctionInfoMoveEvent(Address from, Address to) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
MoveEventInternal(SHARED_FUNC_MOVE_EVENT, from, to);
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::MoveEventInternal(LogEventsAndTags event,
Address from,
Address to) {
@@ -1220,10 +1141,8 @@ void Logger::MoveEventInternal(LogEventsAndTags event,
msg.Append('\n');
msg.WriteToLogFile();
}
-#endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::DeleteEventInternal(LogEventsAndTags event, Address from) {
if (!log_->IsEnabled() || !FLAG_log_code) return;
LogMessageBuilder msg(this);
@@ -1232,11 +1151,9 @@ void Logger::DeleteEventInternal(LogEventsAndTags event, Address from) {
msg.Append('\n');
msg.WriteToLogFile();
}
-#endif
void Logger::ResourceEvent(const char* name, const char* tag) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log) return;
LogMessageBuilder msg(this);
msg.Append("%s,%s,", name, tag);
@@ -1249,12 +1166,10 @@ void Logger::ResourceEvent(const char* name, const char* tag) {
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
void Logger::SuspectReadEvent(String* name, Object* obj) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_suspect) return;
LogMessageBuilder msg(this);
String* class_name = obj->IsJSObject()
@@ -1268,12 +1183,10 @@ void Logger::SuspectReadEvent(String* name, Object* obj) {
msg.Append('"');
msg.Append('\n');
msg.WriteToLogFile();
-#endif
}
void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg(this);
// Using non-relative system time in order to be able to synchronize with
@@ -1281,42 +1194,34 @@ void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
msg.Append("heap-sample-begin,\"%s\",\"%s\",%.0f\n",
space, kind, OS::TimeCurrentMillis());
msg.WriteToLogFile();
-#endif
}
void Logger::HeapSampleEndEvent(const char* space, const char* kind) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg(this);
msg.Append("heap-sample-end,\"%s\",\"%s\"\n", space, kind);
msg.WriteToLogFile();
-#endif
}
void Logger::HeapSampleItemEvent(const char* type, int number, int bytes) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg(this);
msg.Append("heap-sample-item,%s,%d,%d\n", type, number, bytes);
msg.WriteToLogFile();
-#endif
}
void Logger::DebugTag(const char* call_site_tag) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log) return;
LogMessageBuilder msg(this);
msg.Append("debug-tag,%s\n", call_site_tag);
msg.WriteToLogFile();
-#endif
}
void Logger::DebugEvent(const char* event_type, Vector<uint16_t> parameter) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log) return;
StringBuilder s(parameter.length() + 1);
for (int i = 0; i < parameter.length(); ++i) {
@@ -1330,11 +1235,9 @@ void Logger::DebugEvent(const char* event_type, Vector<uint16_t> parameter) {
parameter_string);
DeleteArray(parameter_string);
msg.WriteToLogFile();
-#endif
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::TickEvent(TickSample* sample, bool overflow) {
if (!log_->IsEnabled() || !FLAG_prof) return;
LogMessageBuilder msg(this);
@@ -1378,7 +1281,6 @@ void Logger::PauseProfiler() {
ticker_->Stop();
}
FLAG_log_code = false;
- // Must be the same message as Log::kDynamicBufferSeal.
LOG(ISOLATE, UncheckedStringEvent("profiler", "pause"));
}
--logging_nesting_;
@@ -1420,11 +1322,6 @@ bool Logger::IsProfilerSamplerActive() {
}
-int Logger::GetLogLines(int from_pos, char* dest_buf, int max_size) {
- return log_->GetLogLines(from_pos, dest_buf, max_size);
-}
-
-
class EnumerateOptimizedFunctionsVisitor: public OptimizedFunctionVisitor {
public:
EnumerateOptimizedFunctionsVisitor(Handle<SharedFunctionInfo>* sfis,
@@ -1545,7 +1442,6 @@ void Logger::LogCodeObject(Object* object) {
void Logger::LogCodeInfo() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_ll_prof) return;
#if V8_TARGET_ARCH_IA32
const char arch[] = "ia32";
@@ -1557,7 +1453,6 @@ void Logger::LogCodeInfo() {
const char arch[] = "unknown";
#endif
LowLevelLogWriteBytes(arch, sizeof(arch));
-#endif // ENABLE_LOGGING_AND_PROFILING
}
@@ -1710,11 +1605,8 @@ void Logger::LogAccessorCallbacks() {
}
}
-#endif
-
bool Logger::Setup() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
// Tests and EnsureInitialize() can call this twice in a row. It's harmless.
if (is_initialized_) return true;
is_initialized_ = true;
@@ -1766,40 +1658,27 @@ bool Logger::Setup() {
}
return true;
-
-#else
- return false;
-#endif
}
Sampler* Logger::sampler() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
return ticker_;
-#else
- return NULL;
-#endif
}
void Logger::EnsureTickerStarted() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
ASSERT(ticker_ != NULL);
if (!ticker_->IsActive()) ticker_->Start();
-#endif
}
void Logger::EnsureTickerStopped() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (ticker_ != NULL && ticker_->IsActive()) ticker_->Stop();
-#endif
}
-void Logger::TearDown() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
- if (!is_initialized_) return;
+FILE* Logger::TearDown() {
+ if (!is_initialized_) return NULL;
is_initialized_ = false;
// Stop the profiler before closing the file.
@@ -1815,13 +1694,11 @@ void Logger::TearDown() {
delete ticker_;
ticker_ = NULL;
- log_->Close();
-#endif
+ return log_->Close();
}
void Logger::EnableSlidingStateWindow() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
// If the ticker is NULL, Logger::Setup has not been called yet. In
// that case, we set the sliding_state_window flag so that the
// sliding window computation will be started when Logger::Setup is
@@ -1835,7 +1712,6 @@ void Logger::EnableSlidingStateWindow() {
if (sliding_state_window_ == NULL) {
sliding_state_window_ = new SlidingStateWindow(Isolate::Current());
}
-#endif
}
@@ -1855,10 +1731,8 @@ bool SamplerRegistry::IterateActiveSamplers(VisitSampler func, void* param) {
static void ComputeCpuProfiling(Sampler* sampler, void* flag_ptr) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
bool* flag = reinterpret_cast<bool*>(flag_ptr);
*flag |= sampler->IsProfiling();
-#endif
}
diff --git a/deps/v8/src/log.h b/deps/v8/src/log.h
index 6ffd18c61b..02250595f8 100644
--- a/deps/v8/src/log.h
+++ b/deps/v8/src/log.h
@@ -78,7 +78,6 @@ class SlidingStateWindow;
class Ticker;
#undef LOG
-#ifdef ENABLE_LOGGING_AND_PROFILING
#define LOG(isolate, Call) \
do { \
v8::internal::Logger* logger = \
@@ -86,9 +85,6 @@ class Ticker;
if (logger->is_logging()) \
logger->Call; \
} while (false)
-#else
-#define LOG(isolate, Call) ((void) 0)
-#endif
#define LOG_EVENTS_AND_TAGS_LIST(V) \
V(CODE_CREATION_EVENT, "code-creation") \
@@ -161,7 +157,9 @@ class Logger {
Sampler* sampler();
// Frees resources acquired in Setup.
- void TearDown();
+ // When a temporary file is used for the log, returns its stream descriptor,
+ // leaving the file open.
+ FILE* TearDown();
// Enable the computation of a sliding window of states.
void EnableSlidingStateWindow();
@@ -272,7 +270,6 @@ class Logger {
// Log an event reported from generated code
void LogRuntime(Vector<const char> format, JSArray* args);
-#ifdef ENABLE_LOGGING_AND_PROFILING
bool is_logging() {
return logging_nesting_ > 0;
}
@@ -284,10 +281,6 @@ class Logger {
void ResumeProfiler();
bool IsProfilerPaused();
- // If logging is performed into a memory buffer, allows to
- // retrieve previously written messages. See v8.h.
- int GetLogLines(int from_pos, char* dest_buf, int max_size);
-
// Logs all compiled functions found in the heap.
void LogCompiledFunctions();
// Logs all accessor callbacks found in the heap.
@@ -424,9 +417,6 @@ class Logger {
Address prev_code_;
friend class CpuProfiler;
-#else
- bool is_logging() { return false; }
-#endif
};
diff --git a/deps/v8/src/mark-compact.cc b/deps/v8/src/mark-compact.cc
index 5792f6c403..0bf82863d7 100644
--- a/deps/v8/src/mark-compact.cc
+++ b/deps/v8/src/mark-compact.cc
@@ -1661,31 +1661,34 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
// Clear dead prototype transitions.
int number_of_transitions = map->NumberOfProtoTransitions();
- FixedArray* prototype_transitions = map->unchecked_prototype_transitions();
- int new_number_of_transitions = 0;
- const int header = Map::kProtoTransitionHeaderSize;
- const int proto_offset =
- header + Map::kProtoTransitionPrototypeOffset;
- const int map_offset = header + Map::kProtoTransitionMapOffset;
- const int step = Map::kProtoTransitionElementsPerEntry;
- for (int i = 0; i < number_of_transitions; i++) {
- Object* prototype = prototype_transitions->get(proto_offset + i * step);
- Object* cached_map = prototype_transitions->get(map_offset + i * step);
- if (HeapObject::cast(prototype)->IsMarked() &&
- HeapObject::cast(cached_map)->IsMarked()) {
- if (new_number_of_transitions != i) {
- prototype_transitions->set_unchecked(
- heap_,
- proto_offset + new_number_of_transitions * step,
- prototype,
- UPDATE_WRITE_BARRIER);
- prototype_transitions->set_unchecked(
- heap_,
- map_offset + new_number_of_transitions * step,
- cached_map,
- SKIP_WRITE_BARRIER);
+ if (number_of_transitions > 0) {
+ FixedArray* prototype_transitions =
+ map->unchecked_prototype_transitions();
+ int new_number_of_transitions = 0;
+ const int header = Map::kProtoTransitionHeaderSize;
+ const int proto_offset =
+ header + Map::kProtoTransitionPrototypeOffset;
+ const int map_offset = header + Map::kProtoTransitionMapOffset;
+ const int step = Map::kProtoTransitionElementsPerEntry;
+ for (int i = 0; i < number_of_transitions; i++) {
+ Object* prototype = prototype_transitions->get(proto_offset + i * step);
+ Object* cached_map = prototype_transitions->get(map_offset + i * step);
+ if (HeapObject::cast(prototype)->IsMarked() &&
+ HeapObject::cast(cached_map)->IsMarked()) {
+ if (new_number_of_transitions != i) {
+ prototype_transitions->set_unchecked(
+ heap_,
+ proto_offset + new_number_of_transitions * step,
+ prototype,
+ UPDATE_WRITE_BARRIER);
+ prototype_transitions->set_unchecked(
+ heap_,
+ map_offset + new_number_of_transitions * step,
+ cached_map,
+ SKIP_WRITE_BARRIER);
+ }
+ new_number_of_transitions++;
}
- new_number_of_transitions++;
}
// Fill slots that became free with undefined value.
@@ -3255,11 +3258,9 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj));
}
#endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (obj->IsCode()) {
PROFILE(isolate, CodeDeleteEvent(obj->address()));
}
-#endif
}
diff --git a/deps/v8/src/messages.js b/deps/v8/src/messages.js
index 841c5180a3..b9281070a7 100644
--- a/deps/v8/src/messages.js
+++ b/deps/v8/src/messages.js
@@ -195,6 +195,7 @@ function FormatMessage(message) {
non_extensible_proto: ["%0", " is not extensible"],
handler_non_object: ["Proxy.", "%0", " called with non-object as handler"],
handler_trap_missing: ["Proxy handler ", "%0", " has no '", "%1", "' trap"],
+ handler_failed: ["Proxy handler ", "%0", " returned false for '", "%1", "' trap"],
proxy_prop_not_configurable: ["Trap ", "%1", " of proxy handler ", "%0", " returned non-configurable descriptor for property ", "%2"],
proxy_non_object_prop_names: ["Trap ", "%1", " returned non-object ", "%0"],
proxy_repeated_prop_name: ["Trap ", "%1", " returned repeated property name ", "%2"],
diff --git a/deps/v8/src/mips/assembler-mips.h b/deps/v8/src/mips/assembler-mips.h
index 92c958b962..f3730d6f31 100644
--- a/deps/v8/src/mips/assembler-mips.h
+++ b/deps/v8/src/mips/assembler-mips.h
@@ -779,8 +779,13 @@ class Assembler : public AssemblerBase {
void fcmp(FPURegister src1, const double src2, FPUCondition cond);
// Check the code size generated from label to here.
- int InstructionsGeneratedSince(Label* l) {
- return (pc_offset() - l->pos()) / kInstrSize;
+ int SizeOfCodeGeneratedSince(Label* label) {
+ return pc_offset() - label->pos();
+ }
+
+ // Check the number of instructions generated from label to here.
+ int InstructionsGeneratedSince(Label* label) {
+ return SizeOfCodeGeneratedSince(label) / kInstrSize;
}
// Class for scoping postponing the trampoline pool generation.
diff --git a/deps/v8/src/mips/builtins-mips.cc b/deps/v8/src/mips/builtins-mips.cc
index 4bb1d8cba7..1555653f0a 100644
--- a/deps/v8/src/mips/builtins-mips.cc
+++ b/deps/v8/src/mips/builtins-mips.cc
@@ -634,7 +634,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
__ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kConstructStubOffset));
__ Addu(t9, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(Operand(t9));
+ __ Jump(t9);
// a0: number of arguments
// a1: called object
@@ -1075,8 +1075,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Invoke the code and pass argc as a0.
__ mov(a0, a3);
if (is_construct) {
- __ Call(masm->isolate()->builtins()->JSConstructCall(),
- RelocInfo::CODE_TARGET);
+ __ Call(masm->isolate()->builtins()->JSConstructCall());
} else {
ParameterCount actual(a0);
__ InvokeFunction(a1, actual, CALL_FUNCTION,
diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc
index d7fac867f3..d03443f272 100644
--- a/deps/v8/src/mips/code-stubs-mips.cc
+++ b/deps/v8/src/mips/code-stubs-mips.cc
@@ -305,12 +305,6 @@ class ConvertToDoubleStub : public CodeStub {
}
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "ConvertToDoubleStub"; }
-
-#ifdef DEBUG
- void Print() { PrintF("ConvertToDoubleStub\n"); }
-#endif
};
@@ -396,11 +390,11 @@ void FloatingPointHelper::LoadSmis(MacroAssembler* masm,
__ mov(scratch1, a0);
ConvertToDoubleStub stub1(a3, a2, scratch1, scratch2);
__ push(ra);
- __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
+ __ Call(stub1.GetCode());
// Write Smi from a1 to a1 and a0 in double format.
__ mov(scratch1, a1);
ConvertToDoubleStub stub2(a1, a0, scratch1, scratch2);
- __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
+ __ Call(stub2.GetCode());
__ pop(ra);
}
}
@@ -482,7 +476,7 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
__ mov(scratch1, object);
ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2);
__ push(ra);
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
+ __ Call(stub.GetCode());
__ pop(ra);
}
@@ -1107,7 +1101,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
__ mov(t6, rhs);
ConvertToDoubleStub stub1(a1, a0, t6, t5);
__ push(ra);
- __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
+ __ Call(stub1.GetCode());
__ pop(ra);
}
@@ -1142,7 +1136,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
__ mov(t6, lhs);
ConvertToDoubleStub stub2(a3, a2, t6, t5);
__ push(ra);
- __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
+ __ Call(stub2.GetCode());
__ pop(ra);
// Load rhs to a double in a1, a0.
if (rhs.is(a0)) {
@@ -1803,25 +1797,17 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
}
-const char* UnaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void UnaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name = NULL; // Make g++ happy.
switch (mode_) {
case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "UnaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- UnaryOpIC::GetName(operand_type_));
- return name_;
+ stream->Add("UnaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ UnaryOpIC::GetName(operand_type_));
}
@@ -2160,12 +2146,7 @@ void BinaryOpStub::Generate(MacroAssembler* masm) {
}
-const char* BinaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void BinaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name;
switch (mode_) {
@@ -2174,13 +2155,10 @@ const char* BinaryOpStub::GetName() {
case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
default: overwrite_name = "UnknownOverwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "BinaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- BinaryOpIC::GetName(operands_type_));
- return name_;
+ stream->Add("BinaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ BinaryOpIC::GetName(operands_type_));
}
@@ -3749,24 +3727,22 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// 4 args slots
// args
- #ifdef ENABLE_LOGGING_AND_PROFILING
- // If this is the outermost JS call, set js_entry_sp value.
- Label non_outermost_js;
- ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
- masm->isolate());
- __ li(t1, Operand(ExternalReference(js_entry_sp)));
- __ lw(t2, MemOperand(t1));
- __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
- __ sw(fp, MemOperand(t1));
- __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
- Label cont;
- __ b(&cont);
- __ nop(); // Branch delay slot nop.
- __ bind(&non_outermost_js);
- __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
- __ bind(&cont);
- __ push(t0);
- #endif
+ // If this is the outermost JS call, set js_entry_sp value.
+ Label non_outermost_js;
+ ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
+ masm->isolate());
+ __ li(t1, Operand(ExternalReference(js_entry_sp)));
+ __ lw(t2, MemOperand(t1));
+ __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
+ __ sw(fp, MemOperand(t1));
+ __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
+ Label cont;
+ __ b(&cont);
+ __ nop(); // Branch delay slot nop.
+ __ bind(&non_outermost_js);
+ __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
+ __ bind(&cont);
+ __ push(t0);
// Call a faked try-block that does the invoke.
__ bal(&invoke); // bal exposes branch delay slot.
@@ -3835,16 +3811,14 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ PopTryHandler();
__ bind(&exit); // v0 holds result
- #ifdef ENABLE_LOGGING_AND_PROFILING
- // Check if the current stack frame is marked as the outermost JS frame.
- Label non_outermost_js_2;
- __ pop(t1);
- __ Branch(&non_outermost_js_2, ne, t1,
- Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
- __ li(t1, Operand(ExternalReference(js_entry_sp)));
- __ sw(zero_reg, MemOperand(t1));
- __ bind(&non_outermost_js_2);
- #endif
+ // Check if the current stack frame is marked as the outermost JS frame.
+ Label non_outermost_js_2;
+ __ pop(t1);
+ __ Branch(&non_outermost_js_2, ne, t1,
+ Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
+ __ li(t1, Operand(ExternalReference(js_entry_sp)));
+ __ sw(zero_reg, MemOperand(t1));
+ __ bind(&non_outermost_js_2);
// Restore the top frame descriptors from the stack.
__ pop(t1);
@@ -4592,10 +4566,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ movz(t9, t0, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
// Check that the irregexp code has been generated for the actual string
- // encoding. If it has, the field contains a code object otherwise it
- // contains the hole.
- __ GetObjectType(t9, a0, a0);
- __ Branch(&runtime, ne, a0, Operand(CODE_TYPE));
+ // encoding. If it has, the field contains a code object otherwise it contains
+ // a smi (code flushing support).
+ __ JumpIfSmi(t9, &runtime);
// a3: encoding of subject string (1 if ASCII, 0 if two_byte);
// t9: code
@@ -4947,16 +4920,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
-const char* CompareStub::GetName() {
+void CompareStub::PrintName(StringStream* stream) {
ASSERT((lhs_.is(a0) && rhs_.is(a1)) ||
(lhs_.is(a1) && rhs_.is(a0)));
-
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
-
const char* cc_name;
switch (cc_) {
case lt: cc_name = "LT"; break;
@@ -4967,40 +4933,14 @@ const char* CompareStub::GetName() {
case ne: cc_name = "NE"; break;
default: cc_name = "UnknownCondition"; break;
}
-
- const char* lhs_name = lhs_.is(a0) ? "_a0" : "_a1";
- const char* rhs_name = rhs_.is(a0) ? "_a0" : "_a1";
-
- const char* strict_name = "";
- if (strict_ && (cc_ == eq || cc_ == ne)) {
- strict_name = "_STRICT";
- }
-
- const char* never_nan_nan_name = "";
- if (never_nan_nan_ && (cc_ == eq || cc_ == ne)) {
- never_nan_nan_name = "_NO_NAN";
- }
-
- const char* include_number_compare_name = "";
- if (!include_number_compare_) {
- include_number_compare_name = "_NO_NUMBER";
- }
-
- const char* include_smi_compare_name = "";
- if (!include_smi_compare_) {
- include_smi_compare_name = "_NO_SMI";
- }
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "CompareStub_%s%s%s%s%s%s",
- cc_name,
- lhs_name,
- rhs_name,
- strict_name,
- never_nan_nan_name,
- include_number_compare_name,
- include_smi_compare_name);
- return name_;
+ bool is_equality = cc_ == eq || cc_ == ne;
+ stream->Add("CompareStub_%s", cc_name);
+ stream->Add(lhs_.is(a0) ? "_a0" : "_a1");
+ stream->Add(rhs_.is(a0) ? "_a0" : "_a1");
+ if (strict_ && is_equality) stream->Add("_STRICT");
+ if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
+ if (!include_number_compare_) stream->Add("_NO_NUMBER");
+ if (!include_smi_compare_) stream->Add("_NO_SMI");
}
diff --git a/deps/v8/src/mips/code-stubs-mips.h b/deps/v8/src/mips/code-stubs-mips.h
index 6c70bdd70a..aa224bcfa6 100644
--- a/deps/v8/src/mips/code-stubs-mips.h
+++ b/deps/v8/src/mips/code-stubs-mips.h
@@ -66,8 +66,7 @@ class UnaryOpStub: public CodeStub {
UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
: op_(op),
mode_(mode),
- operand_type_(operand_type),
- name_(NULL) {
+ operand_type_(operand_type) {
}
private:
@@ -77,19 +76,7 @@ class UnaryOpStub: public CodeStub {
// Operand type information determined at runtime.
UnaryOpIC::TypeInfo operand_type_;
- char* name_;
-
- const char* GetName();
-
-#ifdef DEBUG
- void Print() {
- PrintF("UnaryOpStub %d (op %s), (mode %d, runtime_type_info %s)\n",
- MinorKey(),
- Token::String(op_),
- static_cast<int>(mode_),
- UnaryOpIC::GetName(operand_type_));
- }
-#endif
+ virtual void PrintName(StringStream* stream);
class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
class OpBits: public BitField<Token::Value, 1, 7> {};
@@ -143,8 +130,7 @@ class BinaryOpStub: public CodeStub {
: op_(op),
mode_(mode),
operands_type_(BinaryOpIC::UNINITIALIZED),
- result_type_(BinaryOpIC::UNINITIALIZED),
- name_(NULL) {
+ result_type_(BinaryOpIC::UNINITIALIZED) {
use_fpu_ = CpuFeatures::IsSupported(FPU);
ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
}
@@ -157,8 +143,7 @@ class BinaryOpStub: public CodeStub {
mode_(ModeBits::decode(key)),
use_fpu_(FPUBits::decode(key)),
operands_type_(operands_type),
- result_type_(result_type),
- name_(NULL) { }
+ result_type_(result_type) { }
private:
enum SmiCodeGenerateHeapNumberResults {
@@ -174,20 +159,7 @@ class BinaryOpStub: public CodeStub {
BinaryOpIC::TypeInfo operands_type_;
BinaryOpIC::TypeInfo result_type_;
- char* name_;
-
- const char* GetName();
-
-#ifdef DEBUG
- void Print() {
- PrintF("BinaryOpStub %d (op %s), "
- "(mode %d, runtime_type_info %s)\n",
- MinorKey(),
- Token::String(op_),
- static_cast<int>(mode_),
- BinaryOpIC::GetName(operands_type_));
- }
-#endif
+ virtual void PrintName(StringStream* stream);
// Minor key encoding in 16 bits RRRTTTVOOOOOOOMM.
class ModeBits: public BitField<OverwriteMode, 0, 2> {};
@@ -374,12 +346,6 @@ class WriteInt32ToHeapNumberStub : public CodeStub {
}
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "WriteInt32ToHeapNumberStub"; }
-
-#ifdef DEBUG
- void Print() { PrintF("WriteInt32ToHeapNumberStub\n"); }
-#endif
};
@@ -406,14 +372,6 @@ class NumberToStringStub: public CodeStub {
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "NumberToStringStub"; }
-
-#ifdef DEBUG
- void Print() {
- PrintF("NumberToStringStub\n");
- }
-#endif
};
@@ -431,8 +389,6 @@ class RegExpCEntryStub: public CodeStub {
int MinorKey() { return 0; }
bool NeedsImmovableCode() { return true; }
-
- const char* GetName() { return "RegExpCEntryStub"; }
};
// Trampoline stub to call into native code. To call safely into native code
@@ -453,8 +409,6 @@ class DirectCEntryStub: public CodeStub {
int MinorKey() { return 0; }
bool NeedsImmovableCode() { return true; }
-
- const char* GetName() { return "DirectCEntryStub"; }
};
class FloatingPointHelper : public AllStatic {
@@ -636,13 +590,6 @@ class StringDictionaryLookupStub: public CodeStub {
StringDictionary::kHeaderSize +
StringDictionary::kElementsStartIndex * kPointerSize;
-
-#ifdef DEBUG
- void Print() {
- PrintF("StringDictionaryLookupStub\n");
- }
-#endif
-
Major MajorKey() { return StringDictionaryNegativeLookup; }
int MinorKey() {
diff --git a/deps/v8/src/mips/codegen-mips.h b/deps/v8/src/mips/codegen-mips.h
index fecd321fad..a8de9c8610 100644
--- a/deps/v8/src/mips/codegen-mips.h
+++ b/deps/v8/src/mips/codegen-mips.h
@@ -60,9 +60,7 @@ class CodeGenerator: public AstVisitor {
// Print the code after compiling it.
static void PrintCode(Handle<Code> code, CompilationInfo* info);
-#ifdef ENABLE_LOGGING_AND_PROFILING
static bool ShouldGenerateLog(Expression* type);
-#endif
static void SetFunctionInfo(Handle<JSFunction> fun,
FunctionLiteral* lit,
diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc
index 5b9bbb5789..3f5ea7b914 100644
--- a/deps/v8/src/mips/full-codegen-mips.cc
+++ b/deps/v8/src/mips/full-codegen-mips.cc
@@ -783,7 +783,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
// IDs for bailouts from optimized code.
ASSERT(prop->obj()->AsVariableProxy() != NULL);
{ AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ EmitVariableLoad(prop->obj()->AsVariableProxy());
}
__ push(result_register());
@@ -798,7 +798,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- __ CallWithAstId(ic);
+ __ Call(ic);
// Value in v0 is ignored (declarations are statements).
}
}
@@ -873,7 +873,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, clause->CompareId());
+ __ Call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
patch_site.EmitPatchInfo();
__ Branch(&next_test, ne, v0, Operand(zero_reg));
@@ -1117,7 +1117,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Comment cmnt(masm_, "[ VariableProxy");
- EmitVariableLoad(expr->var());
+ EmitVariableLoad(expr);
}
@@ -1173,7 +1173,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ CallWithAstId(ic, mode);
+ __ Call(ic, mode);
}
@@ -1253,7 +1253,7 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
__ li(a0, Operand(key_literal->handle()));
Handle<Code> ic =
isolate()->builtins()->KeyedLoadIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
+ __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
__ Branch(done);
}
}
@@ -1262,7 +1262,11 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
}
-void FullCodeGenerator::EmitVariableLoad(Variable* var) {
+void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
+ // Record position before possible IC call.
+ SetSourcePosition(proxy->position());
+ Variable* var = proxy->var();
+
// Three cases: non-this global variables, lookup slots, and all other
// types of slots.
Slot* slot = var->AsSlot();
@@ -1275,7 +1279,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
__ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(var->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(v0);
} else if (slot->type() == Slot::LOOKUP) {
@@ -1421,7 +1425,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, key->id());
+ __ Call(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1598,7 +1602,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
{ AccumulatorValueContext context(this);
switch (assign_type) {
case VARIABLE:
- EmitVariableLoad(expr->target()->AsVariableProxy()->var());
+ EmitVariableLoad(expr->target()->AsVariableProxy());
PrepareForBailout(expr->target(), TOS_REG);
break;
case NAMED_PROPERTY:
@@ -1665,7 +1669,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
__ li(a2, Operand(key->handle()));
// Call load IC. It has arguments receiver and property name a0 and a2.
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
}
@@ -1674,7 +1678,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
__ mov(a0, result_register());
// Call keyed load IC. It has arguments key and receiver in a0 and a1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
}
@@ -1702,7 +1706,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
BinaryOpStub stub(op, mode);
- __ CallWithAstId(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
__ jmp(&done);
@@ -1785,7 +1789,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(a1);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- __ CallWithAstId(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
context()->Plug(v0);
}
@@ -1826,7 +1830,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- __ CallWithAstId(ic);
+ __ Call(ic);
break;
}
case KEYED_PROPERTY: {
@@ -1839,7 +1843,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- __ CallWithAstId(ic);
+ __ Call(ic);
break;
}
}
@@ -1864,7 +1868,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Like var declarations, const declarations are hoisted to function
@@ -1962,7 +1966,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, expr->id());
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2014,7 +2018,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, expr->id());
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2067,7 +2071,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
- __ CallWithAstId(ic, mode, expr->id());
+ __ Call(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2101,7 +2105,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
__ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, expr->id());
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2301,7 +2305,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
__ lw(a1, GlobalObjectOperand());
__ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
__ Push(v0, a1); // Function, receiver.
@@ -2780,13 +2784,12 @@ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
// with '%2s' (see Logger::LogRuntime for all the formats).
// 2 (array): Arguments to the format string.
ASSERT_EQ(args->length(), 3);
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallRuntime(Runtime::kLog, 2);
}
-#endif
+
// Finally, we're expected to leave a value on the top of the stack.
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
context()->Plug(v0);
@@ -3664,7 +3667,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
isolate()->stub_cache()->ComputeCallInitialize(arg_count,
NOT_IN_LOOP,
mode);
- __ CallWithAstId(ic, mode, expr->id());
+ __ Call(ic, mode, expr->id());
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
@@ -3807,7 +3810,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
__ mov(a0, result_register());
- __ CallWithAstId(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
context()->Plug(v0);
}
@@ -3839,7 +3842,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
if (assign_type == VARIABLE) {
ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
AccumulatorValueContext context(this);
- EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
+ EmitVariableLoad(expr->expression()->AsVariableProxy());
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
@@ -3918,7 +3921,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
SetSourcePosition(expr->position());
BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
- __ CallWithAstId(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -3951,7 +3954,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, expr->id());
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3969,7 +3972,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, expr->id());
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3993,7 +3996,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- __ CallWithAstId(ic);
+ __ Call(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(v0);
} else if (proxy != NULL &&
@@ -4190,7 +4193,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET, expr->id());
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
diff --git a/deps/v8/src/mips/ic-mips.cc b/deps/v8/src/mips/ic-mips.cc
index cbae8e46e6..da39962691 100644
--- a/deps/v8/src/mips/ic-mips.cc
+++ b/deps/v8/src/mips/ic-mips.cc
@@ -214,115 +214,6 @@ static void GenerateDictionaryStore(MacroAssembler* masm,
}
-static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
- Label* miss,
- Register elements,
- Register key,
- Register result,
- Register reg0,
- Register reg1,
- Register reg2) {
- // Register use:
- //
- // elements - holds the slow-case elements of the receiver on entry.
- // Unchanged unless 'result' is the same register.
- //
- // key - holds the smi key on entry.
- // Unchanged unless 'result' is the same register.
- //
- //
- // result - holds the result on exit if the load succeeded.
- // Allowed to be the same as 'key' or 'result'.
- // Unchanged on bailout so 'key' or 'result' can be used
- // in further computation.
- //
- // Scratch registers:
- //
- // reg0 - holds the untagged key on entry and holds the hash once computed.
- //
- // reg1 - Used to hold the capacity mask of the dictionary.
- //
- // reg2 - Used for the index into the dictionary.
- // at - Temporary (avoid MacroAssembler instructions also using 'at').
- Label done;
-
- // Compute the hash code from the untagged key. This must be kept in sync
- // with ComputeIntegerHash in utils.h.
- //
- // hash = ~hash + (hash << 15);
- __ nor(reg1, reg0, zero_reg);
- __ sll(at, reg0, 15);
- __ addu(reg0, reg1, at);
-
- // hash = hash ^ (hash >> 12);
- __ srl(at, reg0, 12);
- __ xor_(reg0, reg0, at);
-
- // hash = hash + (hash << 2);
- __ sll(at, reg0, 2);
- __ addu(reg0, reg0, at);
-
- // hash = hash ^ (hash >> 4);
- __ srl(at, reg0, 4);
- __ xor_(reg0, reg0, at);
-
- // hash = hash * 2057;
- __ li(reg1, Operand(2057));
- __ mul(reg0, reg0, reg1);
-
- // hash = hash ^ (hash >> 16);
- __ srl(at, reg0, 16);
- __ xor_(reg0, reg0, at);
-
- // Compute the capacity mask.
- __ lw(reg1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
- __ sra(reg1, reg1, kSmiTagSize);
- __ Subu(reg1, reg1, Operand(1));
-
- // Generate an unrolled loop that performs a few probes before giving up.
- static const int kProbes = 4;
- for (int i = 0; i < kProbes; i++) {
- // Use reg2 for index calculations and keep the hash intact in reg0.
- __ mov(reg2, reg0);
- // Compute the masked index: (hash + i + i * i) & mask.
- if (i > 0) {
- __ Addu(reg2, reg2, Operand(NumberDictionary::GetProbeOffset(i)));
- }
- __ and_(reg2, reg2, reg1);
-
- // Scale the index by multiplying by the element size.
- ASSERT(NumberDictionary::kEntrySize == 3);
- __ sll(at, reg2, 1); // 2x.
- __ addu(reg2, reg2, at); // reg2 = reg2 * 3.
-
- // Check if the key is identical to the name.
- __ sll(at, reg2, kPointerSizeLog2);
- __ addu(reg2, elements, at);
-
- __ lw(at, FieldMemOperand(reg2, NumberDictionary::kElementsStartOffset));
- if (i != kProbes - 1) {
- __ Branch(&done, eq, key, Operand(at));
- } else {
- __ Branch(miss, ne, key, Operand(at));
- }
- }
-
- __ bind(&done);
- // Check that the value is a normal property.
- // reg2: elements + (index * kPointerSize).
- const int kDetailsOffset =
- NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
- __ lw(reg1, FieldMemOperand(reg2, kDetailsOffset));
- __ And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::mask())));
- __ Branch(miss, ne, at, Operand(zero_reg));
-
- // Get the value at the masked, scaled index and return.
- const int kValueOffset =
- NumberDictionary::kElementsStartOffset + kPointerSize;
- __ lw(result, FieldMemOperand(reg2, kValueOffset));
-}
-
-
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a2 : name
@@ -751,7 +642,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
__ Branch(&slow_load, ne, a3, Operand(at));
__ sra(a0, a2, kSmiTagSize);
// a0: untagged index
- GenerateNumberDictionaryLoad(masm, &slow_load, t0, a2, a1, a0, a3, t1);
+ __ LoadFromNumberDictionary(&slow_load, t0, a2, a1, a0, a3, t1);
__ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, a0, a3);
__ jmp(&do_call);
@@ -963,6 +854,9 @@ static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
Register backing_store = parameter_map;
__ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
+ Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
+ __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
+ DONT_DO_SMI_CHECK);
__ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
__ Branch(slow_case, Ugreater_equal, key, Operand(scratch));
__ li(scratch, Operand(kPointerSize >> 1));
@@ -1136,7 +1030,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ LoadRoot(at, Heap::kHashTableMapRootIndex);
__ Branch(&slow, ne, a3, Operand(at));
__ sra(a2, a0, kSmiTagSize);
- GenerateNumberDictionaryLoad(masm, &slow, t0, a0, v0, a2, a3, t1);
+ __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
__ Ret();
// Slow case, key and receiver still in a0 and a1.
diff --git a/deps/v8/src/mips/macro-assembler-mips.cc b/deps/v8/src/mips/macro-assembler-mips.cc
index 7c085baac0..712ceec957 100644
--- a/deps/v8/src/mips/macro-assembler-mips.cc
+++ b/deps/v8/src/mips/macro-assembler-mips.cc
@@ -50,87 +50,6 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
}
-// Arguments macros.
-#define COND_TYPED_ARGS Condition cond, Register r1, const Operand& r2
-#define COND_ARGS cond, r1, r2
-
-#define REGISTER_TARGET_BODY(Name) \
-void MacroAssembler::Name(Register target, \
- BranchDelaySlot bd) { \
- Name(Operand(target), bd); \
-} \
-void MacroAssembler::Name(Register target, COND_TYPED_ARGS, \
- BranchDelaySlot bd) { \
- Name(Operand(target), COND_ARGS, bd); \
-}
-
-
-#define INT_PTR_TARGET_BODY(Name) \
-void MacroAssembler::Name(intptr_t target, RelocInfo::Mode rmode, \
- BranchDelaySlot bd) { \
- Name(Operand(target, rmode), bd); \
-} \
-void MacroAssembler::Name(intptr_t target, \
- RelocInfo::Mode rmode, \
- COND_TYPED_ARGS, \
- BranchDelaySlot bd) { \
- Name(Operand(target, rmode), COND_ARGS, bd); \
-}
-
-
-#define BYTE_PTR_TARGET_BODY(Name) \
-void MacroAssembler::Name(byte* target, RelocInfo::Mode rmode, \
- BranchDelaySlot bd) { \
- Name(reinterpret_cast<intptr_t>(target), rmode, bd); \
-} \
-void MacroAssembler::Name(byte* target, \
- RelocInfo::Mode rmode, \
- COND_TYPED_ARGS, \
- BranchDelaySlot bd) { \
- Name(reinterpret_cast<intptr_t>(target), rmode, COND_ARGS, bd); \
-}
-
-
-#define CODE_TARGET_BODY(Name) \
-void MacroAssembler::Name(Handle<Code> target, RelocInfo::Mode rmode, \
- BranchDelaySlot bd) { \
- Name(reinterpret_cast<intptr_t>(target.location()), rmode, bd); \
-} \
-void MacroAssembler::Name(Handle<Code> target, \
- RelocInfo::Mode rmode, \
- COND_TYPED_ARGS, \
- BranchDelaySlot bd) { \
- Name(reinterpret_cast<intptr_t>(target.location()), rmode, COND_ARGS, bd); \
-}
-
-
-REGISTER_TARGET_BODY(Jump)
-REGISTER_TARGET_BODY(Call)
-INT_PTR_TARGET_BODY(Jump)
-INT_PTR_TARGET_BODY(Call)
-BYTE_PTR_TARGET_BODY(Jump)
-BYTE_PTR_TARGET_BODY(Call)
-CODE_TARGET_BODY(Jump)
-CODE_TARGET_BODY(Call)
-
-#undef COND_TYPED_ARGS
-#undef COND_ARGS
-#undef REGISTER_TARGET_BODY
-#undef BYTE_PTR_TARGET_BODY
-#undef CODE_TARGET_BODY
-
-
-void MacroAssembler::Ret(BranchDelaySlot bd) {
- Jump(Operand(ra), bd);
-}
-
-
-void MacroAssembler::Ret(Condition cond, Register r1, const Operand& r2,
- BranchDelaySlot bd) {
- Jump(Operand(ra), cond, r1, r2, bd);
-}
-
-
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index) {
lw(destination, MemOperand(s6, index << kPointerSizeLog2));
@@ -424,6 +343,114 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
+void MacroAssembler::LoadFromNumberDictionary(Label* miss,
+ Register elements,
+ Register key,
+ Register result,
+ Register reg0,
+ Register reg1,
+ Register reg2) {
+ // Register use:
+ //
+ // elements - holds the slow-case elements of the receiver on entry.
+ // Unchanged unless 'result' is the same register.
+ //
+ // key - holds the smi key on entry.
+ // Unchanged unless 'result' is the same register.
+ //
+ //
+ // result - holds the result on exit if the load succeeded.
+ // Allowed to be the same as 'key' or 'result'.
+ // Unchanged on bailout so 'key' or 'result' can be used
+ // in further computation.
+ //
+ // Scratch registers:
+ //
+ // reg0 - holds the untagged key on entry and holds the hash once computed.
+ //
+ // reg1 - Used to hold the capacity mask of the dictionary.
+ //
+ // reg2 - Used for the index into the dictionary.
+ // at - Temporary (avoid MacroAssembler instructions also using 'at').
+ Label done;
+
+ // Compute the hash code from the untagged key. This must be kept in sync
+ // with ComputeIntegerHash in utils.h.
+ //
+ // hash = ~hash + (hash << 15);
+ nor(reg1, reg0, zero_reg);
+ sll(at, reg0, 15);
+ addu(reg0, reg1, at);
+
+ // hash = hash ^ (hash >> 12);
+ srl(at, reg0, 12);
+ xor_(reg0, reg0, at);
+
+ // hash = hash + (hash << 2);
+ sll(at, reg0, 2);
+ addu(reg0, reg0, at);
+
+ // hash = hash ^ (hash >> 4);
+ srl(at, reg0, 4);
+ xor_(reg0, reg0, at);
+
+ // hash = hash * 2057;
+ li(reg1, Operand(2057));
+ mul(reg0, reg0, reg1);
+
+ // hash = hash ^ (hash >> 16);
+ srl(at, reg0, 16);
+ xor_(reg0, reg0, at);
+
+ // Compute the capacity mask.
+ lw(reg1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
+ sra(reg1, reg1, kSmiTagSize);
+ Subu(reg1, reg1, Operand(1));
+
+ // Generate an unrolled loop that performs a few probes before giving up.
+ static const int kProbes = 4;
+ for (int i = 0; i < kProbes; i++) {
+ // Use reg2 for index calculations and keep the hash intact in reg0.
+ mov(reg2, reg0);
+ // Compute the masked index: (hash + i + i * i) & mask.
+ if (i > 0) {
+ Addu(reg2, reg2, Operand(NumberDictionary::GetProbeOffset(i)));
+ }
+ and_(reg2, reg2, reg1);
+
+ // Scale the index by multiplying by the element size.
+ ASSERT(NumberDictionary::kEntrySize == 3);
+ sll(at, reg2, 1); // 2x.
+ addu(reg2, reg2, at); // reg2 = reg2 * 3.
+
+ // Check if the key is identical to the name.
+ sll(at, reg2, kPointerSizeLog2);
+ addu(reg2, elements, at);
+
+ lw(at, FieldMemOperand(reg2, NumberDictionary::kElementsStartOffset));
+ if (i != kProbes - 1) {
+ Branch(&done, eq, key, Operand(at));
+ } else {
+ Branch(miss, ne, key, Operand(at));
+ }
+ }
+
+ bind(&done);
+ // Check that the value is a normal property.
+ // reg2: elements + (index * kPointerSize).
+ const int kDetailsOffset =
+ NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
+ lw(reg1, FieldMemOperand(reg2, kDetailsOffset));
+ And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::mask())));
+ Branch(miss, ne, at, Operand(zero_reg));
+
+ // Get the value at the masked, scaled index and return.
+ const int kValueOffset =
+ NumberDictionary::kElementsStartOffset + kPointerSize;
+ lw(result, FieldMemOperand(reg2, kValueOffset));
+}
+
+
// ---------------------------------------------------------------------------
// Instruction macros.
@@ -1901,6 +1928,176 @@ void MacroAssembler::BranchAndLinkShort(Label* L, Condition cond, Register rs,
}
+void MacroAssembler::Jump(Register target,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ BlockTrampolinePoolScope block_trampoline_pool(this);
+ if (cond == cc_always) {
+ jr(target);
+ } else {
+ BRANCH_ARGS_CHECK(cond, rs, rt);
+ Branch(2, NegateCondition(cond), rs, rt);
+ jr(target);
+ }
+ // Emit a nop in the branch delay slot if required.
+ if (bd == PROTECT)
+ nop();
+}
+
+
+void MacroAssembler::Jump(intptr_t target,
+ RelocInfo::Mode rmode,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ li(t9, Operand(target, rmode));
+ Jump(t9, cond, rs, rt, bd);
+}
+
+
+void MacroAssembler::Jump(Address target,
+ RelocInfo::Mode rmode,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ ASSERT(!RelocInfo::IsCodeTarget(rmode));
+ Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
+}
+
+
+void MacroAssembler::Jump(Handle<Code> code,
+ RelocInfo::Mode rmode,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ ASSERT(RelocInfo::IsCodeTarget(rmode));
+ Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd);
+}
+
+
+int MacroAssembler::CallSize(Register target,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ int size = 0;
+
+ if (cond == cc_always) {
+ size += 1;
+ } else {
+ size += 3;
+ }
+
+ if (bd == PROTECT)
+ size += 1;
+
+ return size * kInstrSize;
+}
+
+
+// Note: To call gcc-compiled C code on mips, you must call thru t9.
+void MacroAssembler::Call(Register target,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ BlockTrampolinePoolScope block_trampoline_pool(this);
+ Label start;
+ bind(&start);
+ if (cond == cc_always) {
+ jalr(target);
+ } else {
+ BRANCH_ARGS_CHECK(cond, rs, rt);
+ Branch(2, NegateCondition(cond), rs, rt);
+ jalr(target);
+ }
+ // Emit a nop in the branch delay slot if required.
+ if (bd == PROTECT)
+ nop();
+
+ ASSERT_EQ(CallSize(target, cond, rs, rt, bd),
+ SizeOfCodeGeneratedSince(&start));
+}
+
+
+int MacroAssembler::CallSize(Address target,
+ RelocInfo::Mode rmode,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ int size = CallSize(t9, cond, rs, rt, bd);
+ return size + 2 * kInstrSize;
+}
+
+
+void MacroAssembler::Call(Address target,
+ RelocInfo::Mode rmode,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ BlockTrampolinePoolScope block_trampoline_pool(this);
+ Label start;
+ bind(&start);
+ int32_t target_int = reinterpret_cast<int32_t>(target);
+ // Must record previous source positions before the
+ // li() generates a new code target.
+ positions_recorder()->WriteRecordedPositions();
+ li(t9, Operand(target_int, rmode), true);
+ Call(t9, cond, rs, rt, bd);
+ ASSERT_EQ(CallSize(target, rmode, cond, rs, rt, bd),
+ SizeOfCodeGeneratedSince(&start));
+}
+
+
+int MacroAssembler::CallSize(Handle<Code> code,
+ RelocInfo::Mode rmode,
+ unsigned ast_id,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ return CallSize(reinterpret_cast<Address>(code.location()),
+ rmode, cond, rs, rt, bd);
+}
+
+
+void MacroAssembler::Call(Handle<Code> code,
+ RelocInfo::Mode rmode,
+ unsigned ast_id,
+ Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ BlockTrampolinePoolScope block_trampoline_pool(this);
+ Label start;
+ bind(&start);
+ ASSERT(RelocInfo::IsCodeTarget(rmode));
+ if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
+ ASSERT(ast_id_for_reloc_info_ == kNoASTId);
+ ast_id_for_reloc_info_ = ast_id;
+ rmode = RelocInfo::CODE_TARGET_WITH_ID;
+ }
+ Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
+ ASSERT_EQ(CallSize(code, rmode, ast_id, cond, rs, rt),
+ SizeOfCodeGeneratedSince(&start));
+}
+
+
+void MacroAssembler::Ret(Condition cond,
+ Register rs,
+ const Operand& rt,
+ BranchDelaySlot bd) {
+ Jump(ra, cond, rs, rt, bd);
+}
+
+
void MacroAssembler::J(Label* L, BranchDelaySlot bdslot) {
BlockTrampolinePoolScope block_trampoline_pool(this);
@@ -1959,142 +2156,24 @@ void MacroAssembler::Jalr(Label* L, BranchDelaySlot bdslot) {
}
-void MacroAssembler::Jump(const Operand& target, BranchDelaySlot bdslot) {
- BlockTrampolinePoolScope block_trampoline_pool(this);
- if (target.is_reg()) {
- jr(target.rm());
- } else {
- if (!MustUseReg(target.rmode_)) {
- j(target.imm32_);
- } else {
- li(t9, target);
- jr(t9);
- }
- }
- // Emit a nop in the branch delay slot if required.
- if (bdslot == PROTECT)
- nop();
-}
-
-
-void MacroAssembler::Jump(const Operand& target,
- Condition cond, Register rs, const Operand& rt,
- BranchDelaySlot bdslot) {
- BlockTrampolinePoolScope block_trampoline_pool(this);
- BRANCH_ARGS_CHECK(cond, rs, rt);
- if (target.is_reg()) {
- if (cond == cc_always) {
- jr(target.rm());
- } else {
- Branch(2, NegateCondition(cond), rs, rt);
- jr(target.rm());
- }
- } else { // Not register target.
- if (!MustUseReg(target.rmode_)) {
- if (cond == cc_always) {
- j(target.imm32_);
- } else {
- Branch(2, NegateCondition(cond), rs, rt);
- j(target.imm32_); // Will generate only one instruction.
- }
- } else { // MustUseReg(target).
- li(t9, target);
- if (cond == cc_always) {
- jr(t9);
- } else {
- Branch(2, NegateCondition(cond), rs, rt);
- jr(t9); // Will generate only one instruction.
- }
- }
- }
- // Emit a nop in the branch delay slot if required.
- if (bdslot == PROTECT)
- nop();
-}
-
-
-int MacroAssembler::CallSize(Handle<Code> code, RelocInfo::Mode rmode) {
- return 4 * kInstrSize;
-}
-
-
-int MacroAssembler::CallSize(Register reg) {
- return 2 * kInstrSize;
-}
-
-
-// Note: To call gcc-compiled C code on mips, you must call thru t9.
-void MacroAssembler::Call(const Operand& target, BranchDelaySlot bdslot) {
- BlockTrampolinePoolScope block_trampoline_pool(this);
- if (target.is_reg()) {
- jalr(target.rm());
- } else { // !target.is_reg().
- if (!MustUseReg(target.rmode_)) {
- jal(target.imm32_);
- } else { // MustUseReg(target).
- // Must record previous source positions before the
- // li() generates a new code target.
- positions_recorder()->WriteRecordedPositions();
- li(t9, target);
- jalr(t9);
- }
- }
- // Emit a nop in the branch delay slot if required.
- if (bdslot == PROTECT)
- nop();
-}
-
-
-// Note: To call gcc-compiled C code on mips, you must call thru t9.
-void MacroAssembler::Call(const Operand& target,
- Condition cond, Register rs, const Operand& rt,
- BranchDelaySlot bdslot) {
- BlockTrampolinePoolScope block_trampoline_pool(this);
- BRANCH_ARGS_CHECK(cond, rs, rt);
- if (target.is_reg()) {
- if (cond == cc_always) {
- jalr(target.rm());
- } else {
- Branch(2, NegateCondition(cond), rs, rt);
- jalr(target.rm());
- }
- } else { // !target.is_reg().
- if (!MustUseReg(target.rmode_)) {
- if (cond == cc_always) {
- jal(target.imm32_);
- } else {
- Branch(2, NegateCondition(cond), rs, rt);
- jal(target.imm32_); // Will generate only one instruction.
- }
- } else { // MustUseReg(target)
- li(t9, target);
- if (cond == cc_always) {
- jalr(t9);
- } else {
- Branch(2, NegateCondition(cond), rs, rt);
- jalr(t9); // Will generate only one instruction.
- }
- }
+void MacroAssembler::DropAndRet(int drop,
+ Condition cond,
+ Register r1,
+ const Operand& r2) {
+ // This is a workaround to make sure only one branch instruction is
+ // generated. It relies on Drop and Ret not creating branches if
+ // cond == cc_always.
+ Label skip;
+ if (cond != cc_always) {
+ Branch(&skip, NegateCondition(cond), r1, r2);
}
- // Emit a nop in the branch delay slot if required.
- if (bdslot == PROTECT)
- nop();
-}
+ Drop(drop);
+ Ret();
-void MacroAssembler::CallWithAstId(Handle<Code> code,
- RelocInfo::Mode rmode,
- unsigned ast_id,
- Condition cond,
- Register r1,
- const Operand& r2) {
- ASSERT(RelocInfo::IsCodeTarget(rmode));
- if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
- ASSERT(ast_id_for_reloc_info_ == kNoASTId);
- ast_id_for_reloc_info_ = ast_id;
- rmode = RelocInfo::CODE_TARGET_WITH_ID;
+ if (cond != cc_always) {
+ bind(&skip);
}
- Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond, r1, r2);
}
@@ -2109,12 +2188,10 @@ void MacroAssembler::Drop(int count,
Label skip;
if (cond != al) {
- Branch(&skip, NegateCondition(cond), reg, op);
+ Branch(&skip, NegateCondition(cond), reg, op);
}
- if (count > 0) {
- addiu(sp, sp, count * kPointerSize);
- }
+ addiu(sp, sp, count * kPointerSize);
if (cond != al) {
bind(&skip);
@@ -2122,26 +2199,6 @@ void MacroAssembler::Drop(int count,
}
-void MacroAssembler::DropAndRet(int drop,
- Condition cond,
- Register r1,
- const Operand& r2) {
- // This is a workaround to make sure only one branch instruction is
- // generated. It relies on Drop and Ret not creating branches if
- // cond == cc_always.
- Label skip;
- if (cond != cc_always) {
- Branch(&skip, NegateCondition(cond), r1, r2);
- }
-
- Drop(drop);
- Ret();
-
- if (cond != cc_always) {
- bind(&skip);
- }
-}
-
void MacroAssembler::Swap(Register reg1,
Register reg2,
@@ -2804,7 +2861,7 @@ void MacroAssembler::CheckFastElements(Register map,
Register scratch,
Label* fail) {
STATIC_ASSERT(JSObject::FAST_ELEMENTS == 0);
- lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
+ lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Branch(fail, hi, scratch, Operand(Map::kMaximumBitField2FastElementValue));
}
@@ -2979,9 +3036,9 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
Handle<Code> adaptor =
isolate()->builtins()->ArgumentsAdaptorTrampoline();
if (flag == CALL_FUNCTION) {
- call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
+ call_wrapper.BeforeCall(CallSize(adaptor));
SetCallKind(t1, call_kind);
- Call(adaptor, RelocInfo::CODE_TARGET);
+ Call(adaptor);
call_wrapper.AfterCall();
jmp(done);
} else {
@@ -3178,7 +3235,7 @@ void MacroAssembler::GetObjectType(Register object,
void MacroAssembler::CallStub(CodeStub* stub, Condition cond,
Register r1, const Operand& r2) {
ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
- Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2);
+ Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond, r1, r2);
}
@@ -3189,7 +3246,8 @@ MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond,
{ MaybeObject* maybe_result = stub->TryGetCode();
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2);
+ Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET,
+ kNoASTId, cond, r1, r2);
return result;
}
diff --git a/deps/v8/src/mips/macro-assembler-mips.h b/deps/v8/src/mips/macro-assembler-mips.h
index 985ef0c830..4994516ea7 100644
--- a/deps/v8/src/mips/macro-assembler-mips.h
+++ b/deps/v8/src/mips/macro-assembler-mips.h
@@ -99,44 +99,11 @@ class MacroAssembler: public Assembler {
// macro assembler.
MacroAssembler(Isolate* isolate, void* buffer, int size);
-// Arguments macros.
+ // Arguments macros.
#define COND_TYPED_ARGS Condition cond, Register r1, const Operand& r2
#define COND_ARGS cond, r1, r2
-// Prototypes.
-
-// Prototypes for functions with no target (eg Ret()).
-#define DECLARE_NOTARGET_PROTOTYPE(Name) \
- void Name(BranchDelaySlot bd = PROTECT); \
- void Name(COND_TYPED_ARGS, BranchDelaySlot bd = PROTECT); \
- inline void Name(BranchDelaySlot bd, COND_TYPED_ARGS) { \
- Name(COND_ARGS, bd); \
- }
-
-// Prototypes for functions with a target.
-
-// Cases when relocation may be needed.
-#define DECLARE_RELOC_PROTOTYPE(Name, target_type) \
- void Name(target_type target, \
- RelocInfo::Mode rmode, \
- BranchDelaySlot bd = PROTECT); \
- inline void Name(BranchDelaySlot bd, \
- target_type target, \
- RelocInfo::Mode rmode) { \
- Name(target, rmode, bd); \
- } \
- void Name(target_type target, \
- RelocInfo::Mode rmode, \
- COND_TYPED_ARGS, \
- BranchDelaySlot bd = PROTECT); \
- inline void Name(BranchDelaySlot bd, \
- target_type target, \
- RelocInfo::Mode rmode, \
- COND_TYPED_ARGS) { \
- Name(target, rmode, COND_ARGS, bd); \
- }
-
-// Cases when relocation is not needed.
+ // Cases when relocation is not needed.
#define DECLARE_NORELOC_PROTOTYPE(Name, target_type) \
void Name(target_type target, BranchDelaySlot bd = PROTECT); \
inline void Name(BranchDelaySlot bd, target_type target) { \
@@ -151,44 +118,44 @@ class MacroAssembler: public Assembler {
Name(target, COND_ARGS, bd); \
}
-// Target prototypes.
-
-#define DECLARE_JUMP_CALL_PROTOTYPES(Name) \
- DECLARE_NORELOC_PROTOTYPE(Name, Register) \
- DECLARE_NORELOC_PROTOTYPE(Name, const Operand&) \
- DECLARE_RELOC_PROTOTYPE(Name, byte*) \
- DECLARE_RELOC_PROTOTYPE(Name, Handle<Code>)
-
#define DECLARE_BRANCH_PROTOTYPES(Name) \
DECLARE_NORELOC_PROTOTYPE(Name, Label*) \
DECLARE_NORELOC_PROTOTYPE(Name, int16_t)
+ DECLARE_BRANCH_PROTOTYPES(Branch)
+ DECLARE_BRANCH_PROTOTYPES(BranchAndLink)
-DECLARE_JUMP_CALL_PROTOTYPES(Jump)
-DECLARE_JUMP_CALL_PROTOTYPES(Call)
-
-DECLARE_BRANCH_PROTOTYPES(Branch)
-DECLARE_BRANCH_PROTOTYPES(BranchAndLink)
-
-DECLARE_NOTARGET_PROTOTYPE(Ret)
-
+#undef DECLARE_BRANCH_PROTOTYPES
#undef COND_TYPED_ARGS
#undef COND_ARGS
-#undef DECLARE_NOTARGET_PROTOTYPE
-#undef DECLARE_NORELOC_PROTOTYPE
-#undef DECLARE_RELOC_PROTOTYPE
-#undef DECLARE_JUMP_CALL_PROTOTYPES
-#undef DECLARE_BRANCH_PROTOTYPES
- void CallWithAstId(Handle<Code> code,
- RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
- unsigned ast_id = kNoASTId,
- Condition cond = al,
- Register r1 = zero_reg,
- const Operand& r2 = Operand(zero_reg));
- int CallSize(Register reg);
- int CallSize(Handle<Code> code, RelocInfo::Mode rmode);
+ // Jump, Call, and Ret pseudo instructions implementing inter-working.
+#define COND_ARGS Condition cond = al, Register rs = zero_reg, \
+ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
+
+ void Jump(Register target, COND_ARGS);
+ void Jump(intptr_t target, RelocInfo::Mode rmode, COND_ARGS);
+ void Jump(Address target, RelocInfo::Mode rmode, COND_ARGS);
+ void Jump(Handle<Code> code, RelocInfo::Mode rmode, COND_ARGS);
+ int CallSize(Register target, COND_ARGS);
+ void Call(Register target, COND_ARGS);
+ int CallSize(Address target, RelocInfo::Mode rmode, COND_ARGS);
+ void Call(Address target, RelocInfo::Mode rmode, COND_ARGS);
+ int CallSize(Handle<Code> code,
+ RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
+ unsigned ast_id = kNoASTId,
+ COND_ARGS);
+ void Call(Handle<Code> code,
+ RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
+ unsigned ast_id = kNoASTId,
+ COND_ARGS);
+ void Ret(COND_ARGS);
+ inline void Ret(BranchDelaySlot bd) {
+ Ret(al, zero_reg, Operand(zero_reg), bd);
+ }
+
+#undef COND_ARGS
// Emit code to discard a non-negative number of pointer-sized elements
// from the stack, clobbering only the sp register.
@@ -299,6 +266,16 @@ DECLARE_NOTARGET_PROTOTYPE(Ret)
Register scratch,
Label* miss);
+
+ void LoadFromNumberDictionary(Label* miss,
+ Register elements,
+ Register key,
+ Register result,
+ Register reg0,
+ Register reg1,
+ Register reg2);
+
+
inline void MarkCode(NopMarkerTypes type) {
nop(type);
}
@@ -1125,17 +1102,6 @@ DECLARE_NOTARGET_PROTOTYPE(Ret)
void Jr(Label* L, BranchDelaySlot bdslot);
void Jalr(Label* L, BranchDelaySlot bdslot);
- void Jump(intptr_t target, RelocInfo::Mode rmode,
- BranchDelaySlot bd = PROTECT);
- void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
- Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg),
- BranchDelaySlot bd = PROTECT);
- void Call(intptr_t target, RelocInfo::Mode rmode,
- BranchDelaySlot bd = PROTECT);
- void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
- Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg),
- BranchDelaySlot bd = PROTECT);
-
// Helper functions for generating invokes.
void InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.cc b/deps/v8/src/mips/regexp-macro-assembler-mips.cc
index cfc8f651c7..9935ef9b5b 100644
--- a/deps/v8/src/mips/regexp-macro-assembler-mips.cc
+++ b/deps/v8/src/mips/regexp-macro-assembler-mips.cc
@@ -179,7 +179,7 @@ void RegExpMacroAssemblerMIPS::Backtrack() {
// Pop Code* offset from backtrack stack, add Code* and jump to location.
Pop(a0);
__ Addu(a0, a0, code_pointer());
- __ Jump(Operand(a0));
+ __ Jump(a0);
}
@@ -1238,7 +1238,7 @@ void RegExpCEntryStub::Generate(MacroAssembler* masm_) {
__ Call(t9);
__ lw(ra, MemOperand(sp, 0));
__ Addu(sp, sp, Operand(stack_alignment));
- __ Jump(Operand(ra));
+ __ Jump(ra);
}
diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.h b/deps/v8/src/mips/regexp-macro-assembler-mips.h
index ad7ada5473..7fe0c8865e 100644
--- a/deps/v8/src/mips/regexp-macro-assembler-mips.h
+++ b/deps/v8/src/mips/regexp-macro-assembler-mips.h
@@ -29,6 +29,12 @@
#ifndef V8_MIPS_REGEXP_MACRO_ASSEMBLER_MIPS_H_
#define V8_MIPS_REGEXP_MACRO_ASSEMBLER_MIPS_H_
+#include "mips/assembler-mips.h"
+#include "mips/assembler-mips-inl.h"
+#include "macro-assembler.h"
+#include "code.h"
+#include "mips/macro-assembler-mips.h"
+
namespace v8 {
namespace internal {
@@ -249,4 +255,3 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
}} // namespace v8::internal
#endif // V8_MIPS_REGEXP_MACRO_ASSEMBLER_MIPS_H_
-
diff --git a/deps/v8/src/mips/stub-cache-mips.cc b/deps/v8/src/mips/stub-cache-mips.cc
index 3e5a0091ca..919bdc40c2 100644
--- a/deps/v8/src/mips/stub-cache-mips.cc
+++ b/deps/v8/src/mips/stub-cache-mips.cc
@@ -3099,7 +3099,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
// -- a1 : receiver
// -----------------------------------
Code* stub;
- MaybeObject* maybe_stub = ComputeSharedKeyedLoadElementStub(receiver_map);
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
+ MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(a1,
a2,
@@ -3190,7 +3191,10 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
// -- a3 : scratch
// -----------------------------------
Code* stub;
- MaybeObject* maybe_stub = ComputeSharedKeyedStoreElementStub(receiver_map);
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
+ bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
+ MaybeObject* maybe_stub =
+ KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(a2,
a3,
@@ -3390,6 +3394,54 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
#define __ ACCESS_MASM(masm)
+void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
+ MacroAssembler* masm) {
+ // ---------- S t a t e --------------
+ // -- ra : return address
+ // -- a0 : key
+ // -- a1 : receiver
+ // -----------------------------------
+ Label slow, miss_force_generic;
+
+ Register key = a0;
+ Register receiver = a1;
+
+ __ JumpIfNotSmi(key, &miss_force_generic);
+ __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ sra(a2, a0, kSmiTagSize);
+ __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
+ __ Ret();
+
+ // Slow case, key and receiver still in a0 and a1.
+ __ bind(&slow);
+ __ IncrementCounter(
+ masm->isolate()->counters()->keyed_load_external_array_slow(),
+ 1, a2, a3);
+ // Entry registers are intact.
+ // ---------- S t a t e --------------
+ // -- ra : return address
+ // -- a0 : key
+ // -- a1 : receiver
+ // -----------------------------------
+ Handle<Code> slow_ic =
+ masm->isolate()->builtins()->KeyedLoadIC_Slow();
+ __ Jump(slow_ic, RelocInfo::CODE_TARGET);
+
+ // Miss case, call the runtime.
+ __ bind(&miss_force_generic);
+
+ // ---------- S t a t e --------------
+ // -- ra : return address
+ // -- a0 : key
+ // -- a1 : receiver
+ // -----------------------------------
+
+ Handle<Code> miss_ic =
+ masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
+ __ Jump(miss_ic, RelocInfo::CODE_TARGET);
+}
+
+
static bool IsElementTypeSigned(JSObject::ElementsKind elements_kind) {
switch (elements_kind) {
case JSObject::EXTERNAL_BYTE_ELEMENTS:
@@ -4201,7 +4253,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
- __ JumpIfNotSmi(a0, &miss_force_generic);
+ __ JumpIfNotSmi(key_reg, &miss_force_generic);
// Get the elements array and make sure it is a fast element array, not 'cow'.
__ lw(elements_reg,
diff --git a/deps/v8/src/mksnapshot.cc b/deps/v8/src/mksnapshot.cc
index 1ed610341e..c5ce12f0ec 100644
--- a/deps/v8/src/mksnapshot.cc
+++ b/deps/v8/src/mksnapshot.cc
@@ -296,10 +296,9 @@ class BZip2Decompressor : public StartupDataDecompressor {
int main(int argc, char** argv) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
// By default, log code create information in the snapshot.
i::FLAG_log_code = true;
-#endif
+
// Print the usage if an error occurs when parsing the command line
// flags or if the help flag is set.
int result = i::FlagList::SetFlagsFromCommandLine(&argc, argv, true);
diff --git a/deps/v8/src/objects-visiting.h b/deps/v8/src/objects-visiting.h
index a2b1c4fc24..cc64763b5f 100644
--- a/deps/v8/src/objects-visiting.h
+++ b/deps/v8/src/objects-visiting.h
@@ -30,6 +30,22 @@
#include "allocation.h"
+#if V8_TARGET_ARCH_IA32
+#include "ia32/assembler-ia32.h"
+#include "ia32/assembler-ia32-inl.h"
+#elif V8_TARGET_ARCH_X64
+#include "x64/assembler-x64.h"
+#include "x64/assembler-x64-inl.h"
+#elif V8_TARGET_ARCH_ARM
+#include "arm/assembler-arm.h"
+#include "arm/assembler-arm-inl.h"
+#elif V8_TARGET_ARCH_MIPS
+#include "mips/assembler-mips.h"
+#include "mips/assembler-mips-inl.h"
+#else
+#error Unsupported target architecture.
+#endif
+
// This file provides base classes and auxiliary methods for defining
// static object visitors used during GC.
// Visiting HeapObject body with a normal ObjectVisitor requires performing
diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc
index 6242198ec3..ca780dbe0e 100644
--- a/deps/v8/src/objects.cc
+++ b/deps/v8/src/objects.cc
@@ -1883,13 +1883,9 @@ void JSObject::LookupCallbackSetterInPrototypes(String* name,
pt = pt->GetPrototype()) {
JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
if (result->IsProperty()) {
- if (result->IsReadOnly()) {
- result->NotFound();
- return;
- }
- if (result->type() == CALLBACKS) {
- return;
- }
+ if (result->type() == CALLBACKS && !result->IsReadOnly()) return;
+ // Found non-callback or read-only callback, stop looking.
+ break;
}
}
result->NotFound();
@@ -2273,10 +2269,10 @@ MUST_USE_RESULT PropertyAttributes JSProxy::GetPropertyAttributeWithHandler(
MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
- String* name,
- Object* value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode) {
+ String* name,
+ Object* value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode) {
Heap* heap = GetHeap();
// Make sure that the top context does not change when doing callbacks or
// interceptor calls.
@@ -3068,7 +3064,9 @@ MaybeObject* JSObject::DeleteDictionaryElement(uint32_t index,
Isolate* isolate = GetIsolate();
Heap* heap = isolate->heap();
FixedArray* backing_store = FixedArray::cast(elements());
- if (backing_store->map() == heap->non_strict_arguments_elements_map()) {
+ bool is_arguments =
+ (GetElementsKind() == JSObject::NON_STRICT_ARGUMENTS_ELEMENTS);
+ if (is_arguments) {
backing_store = FixedArray::cast(backing_store->get(1));
}
NumberDictionary* dictionary = NumberDictionary::cast(backing_store);
@@ -3081,7 +3079,11 @@ MaybeObject* JSObject::DeleteDictionaryElement(uint32_t index,
if (!maybe_elements->To(&new_elements)) {
return maybe_elements;
}
- set_elements(new_elements);
+ if (is_arguments) {
+ FixedArray::cast(elements())->set(1, new_elements);
+ } else {
+ set_elements(new_elements);
+ }
}
if (mode == STRICT_DELETION && result == heap->false_value()) {
// In strict mode, attempting to delete a non-configurable property
@@ -3375,23 +3377,22 @@ MaybeObject* JSObject::PreventExtensions() {
}
// If there are fast elements we normalize.
- if (HasFastElements()) {
- MaybeObject* result = NormalizeElements();
- if (result->IsFailure()) return result;
+ NumberDictionary* dictionary = NULL;
+ { MaybeObject* maybe = NormalizeElements();
+ if (!maybe->To<NumberDictionary>(&dictionary)) return maybe;
}
- // TODO(kmillikin): Handle arguments object with dictionary elements.
- ASSERT(HasDictionaryElements());
+ ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
// Make sure that we never go back to fast case.
- element_dictionary()->set_requires_slow_elements();
+ dictionary->set_requires_slow_elements();
// Do a map transition, other objects with this map may still
// be extensible.
- Object* new_map;
- { MaybeObject* maybe_new_map = map()->CopyDropTransitions();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
+ Map* new_map;
+ { MaybeObject* maybe = map()->CopyDropTransitions();
+ if (!maybe->To<Map>(&new_map)) return maybe;
}
- Map::cast(new_map)->set_is_extensible(false);
- set_map(Map::cast(new_map));
+ new_map->set_is_extensible(false);
+ set_map(new_map);
ASSERT(!map()->is_extensible());
return new_map;
}
@@ -4117,6 +4118,8 @@ void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
}
}
if (!map_done) continue;
+ } else {
+ map_or_index_field = NULL;
}
// That was the regular transitions, now for the prototype transitions.
FixedArray* prototype_transitions =
@@ -9428,7 +9431,7 @@ void JSObject::GetLocalPropertyNames(FixedArray* storage, int index) {
}
ASSERT(storage->length() >= index);
} else {
- property_dictionary()->CopyKeysTo(storage);
+ property_dictionary()->CopyKeysTo(storage, StringDictionary::UNSORTED);
}
}
@@ -9505,33 +9508,49 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
break;
case DICTIONARY_ELEMENTS: {
if (storage != NULL) {
- element_dictionary()->CopyKeysTo(storage, filter);
+ element_dictionary()->CopyKeysTo(storage,
+ filter,
+ NumberDictionary::SORTED);
}
counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
break;
}
case NON_STRICT_ARGUMENTS_ELEMENTS: {
FixedArray* parameter_map = FixedArray::cast(elements());
- int length = parameter_map->length();
- for (int i = 2; i < length; ++i) {
- if (!parameter_map->get(i)->IsTheHole()) {
- if (storage != NULL) storage->set(i - 2, Smi::FromInt(i - 2));
- ++counter;
- }
- }
+ int mapped_length = parameter_map->length() - 2;
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
if (arguments->IsDictionary()) {
+ // Copy the keys from arguments first, because Dictionary::CopyKeysTo
+ // will insert in storage starting at index 0.
NumberDictionary* dictionary = NumberDictionary::cast(arguments);
- if (storage != NULL) dictionary->CopyKeysTo(storage, filter);
+ if (storage != NULL) {
+ dictionary->CopyKeysTo(storage, filter, NumberDictionary::UNSORTED);
+ }
counter += dictionary->NumberOfElementsFilterAttributes(filter);
+ for (int i = 0; i < mapped_length; ++i) {
+ if (!parameter_map->get(i + 2)->IsTheHole()) {
+ if (storage != NULL) storage->set(counter, Smi::FromInt(i));
+ ++counter;
+ }
+ }
+ if (storage != NULL) storage->SortPairs(storage, counter);
+
} else {
- int length = arguments->length();
- for (int i = 0; i < length; ++i) {
- if (!arguments->get(i)->IsTheHole()) {
- if (storage != NULL) storage->set(i, Smi::FromInt(i));
+ int backing_length = arguments->length();
+ int i = 0;
+ for (; i < mapped_length; ++i) {
+ if (!parameter_map->get(i + 2)->IsTheHole()) {
+ if (storage != NULL) storage->set(counter, Smi::FromInt(i));
+ ++counter;
+ } else if (i < backing_length && !arguments->get(i)->IsTheHole()) {
+ if (storage != NULL) storage->set(counter, Smi::FromInt(i));
++counter;
}
}
+ for (; i < backing_length; ++i) {
+ if (storage != NULL) storage->set(counter, Smi::FromInt(i));
+ ++counter;
+ }
}
break;
}
@@ -10132,7 +10151,9 @@ template Object* Dictionary<StringDictionaryShape, String*>::SlowReverseLookup(
Object*);
template void Dictionary<NumberDictionaryShape, uint32_t>::CopyKeysTo(
- FixedArray*, PropertyAttributes);
+ FixedArray*,
+ PropertyAttributes,
+ Dictionary<NumberDictionaryShape, uint32_t>::SortMode);
template Object* Dictionary<StringDictionaryShape, String*>::DeleteProperty(
int, JSObject::DeleteMode);
@@ -10147,7 +10168,8 @@ template MaybeObject* Dictionary<NumberDictionaryShape, uint32_t>::Shrink(
uint32_t);
template void Dictionary<StringDictionaryShape, String*>::CopyKeysTo(
- FixedArray*);
+ FixedArray*,
+ Dictionary<StringDictionaryShape, String*>::SortMode);
template int
Dictionary<StringDictionaryShape, String*>::NumberOfElementsFilterAttributes(
@@ -11199,8 +11221,10 @@ int Dictionary<Shape, Key>::NumberOfEnumElements() {
template<typename Shape, typename Key>
-void Dictionary<Shape, Key>::CopyKeysTo(FixedArray* storage,
- PropertyAttributes filter) {
+void Dictionary<Shape, Key>::CopyKeysTo(
+ FixedArray* storage,
+ PropertyAttributes filter,
+ typename Dictionary<Shape, Key>::SortMode sort_mode) {
ASSERT(storage->length() >= NumberOfEnumElements());
int capacity = HashTable<Shape, Key>::Capacity();
int index = 0;
@@ -11213,7 +11237,9 @@ void Dictionary<Shape, Key>::CopyKeysTo(FixedArray* storage,
if ((attr & filter) == 0) storage->set(index++, k);
}
}
- storage->SortPairs(storage, index);
+ if (sort_mode == Dictionary<Shape, Key>::SORTED) {
+ storage->SortPairs(storage, index);
+ }
ASSERT(storage->length() >= index);
}
@@ -11239,7 +11265,9 @@ void StringDictionary::CopyEnumKeysTo(FixedArray* storage,
template<typename Shape, typename Key>
-void Dictionary<Shape, Key>::CopyKeysTo(FixedArray* storage) {
+void Dictionary<Shape, Key>::CopyKeysTo(
+ FixedArray* storage,
+ typename Dictionary<Shape, Key>::SortMode sort_mode) {
ASSERT(storage->length() >= NumberOfElementsFilterAttributes(
static_cast<PropertyAttributes>(NONE)));
int capacity = HashTable<Shape, Key>::Capacity();
@@ -11252,6 +11280,9 @@ void Dictionary<Shape, Key>::CopyKeysTo(FixedArray* storage) {
storage->set(index++, k);
}
}
+ if (sort_mode == Dictionary<Shape, Key>::SORTED) {
+ storage->SortPairs(storage, index);
+ }
ASSERT(storage->length() >= index);
}
diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h
index c34efdd4c7..9765fe2a0a 100644
--- a/deps/v8/src/objects.h
+++ b/deps/v8/src/objects.h
@@ -2770,10 +2770,13 @@ class Dictionary: public HashTable<Shape, Key> {
// Returns the number of enumerable elements in the dictionary.
int NumberOfEnumElements();
+ enum SortMode { UNSORTED, SORTED };
// Copies keys to preallocated fixed array.
- void CopyKeysTo(FixedArray* storage, PropertyAttributes filter);
+ void CopyKeysTo(FixedArray* storage,
+ PropertyAttributes filter,
+ SortMode sort_mode);
// Fill in details for properties into storage.
- void CopyKeysTo(FixedArray* storage);
+ void CopyKeysTo(FixedArray* storage, SortMode sort_mode);
// Accessors for next enumeration index.
void SetNextEnumerationIndex(int index) {
diff --git a/deps/v8/src/parser.cc b/deps/v8/src/parser.cc
index 184f0a2a27..3085ef86be 100644
--- a/deps/v8/src/parser.cc
+++ b/deps/v8/src/parser.cc
@@ -823,14 +823,24 @@ class ParserFinder {
// form expr.a = ...; expr.b = ...; etc.
class InitializationBlockFinder : public ParserFinder {
public:
- InitializationBlockFinder()
- : first_in_block_(NULL), last_in_block_(NULL), block_size_(0) {}
+ // We find and mark the initialization blocks in top level
+ // non-looping code only. This is because the optimization prevents
+ // reuse of the map transitions, so it should be used only for code
+ // that will only be run once.
+ InitializationBlockFinder(Scope* top_scope, Target* target)
+ : enabled_(top_scope->DeclarationScope()->is_global_scope() &&
+ !IsLoopTarget(target)),
+ first_in_block_(NULL),
+ last_in_block_(NULL),
+ block_size_(0) {}
~InitializationBlockFinder() {
+ if (!enabled_) return;
if (InBlock()) EndBlock();
}
void Update(Statement* stat) {
+ if (!enabled_) return;
Assignment* assignment = AsAssignment(stat);
if (InBlock()) {
if (BlockContinues(assignment)) {
@@ -851,6 +861,14 @@ class InitializationBlockFinder : public ParserFinder {
// the overhead exceeds the savings below this limit.
static const int kMinInitializationBlock = 3;
+ static bool IsLoopTarget(Target* target) {
+ while (target != NULL) {
+ if (target->node()->AsIterationStatement() != NULL) return true;
+ target = target->previous();
+ }
+ return false;
+ }
+
// Returns true if the expressions appear to denote the same object.
// In the context of initialization blocks, we only consider expressions
// of the form 'expr.x' or expr["x"].
@@ -913,6 +931,7 @@ class InitializationBlockFinder : public ParserFinder {
bool InBlock() { return first_in_block_ != NULL; }
+ const bool enabled_;
Assignment* first_in_block_;
Assignment* last_in_block_;
int block_size_;
@@ -1078,7 +1097,7 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
TargetScope scope(&this->target_stack_);
ASSERT(processor != NULL);
- InitializationBlockFinder block_finder;
+ InitializationBlockFinder block_finder(top_scope_, target_stack_);
ThisNamedPropertyAssigmentFinder this_property_assignment_finder(isolate());
bool directive_prologue = true; // Parsing directive prologue.
@@ -1133,12 +1152,7 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
}
}
- // We find and mark the initialization blocks on top level code only.
- // This is because the optimization prevents reuse of the map transitions,
- // so it should be used only for code that will only be run once.
- if (top_scope_->is_global_scope()) {
- block_finder.Update(stat);
- }
+ block_finder.Update(stat);
// Find and mark all assignments to named properties in this (this.x =)
if (top_scope_->is_function_scope()) {
this_property_assignment_finder.Update(top_scope_, stat);
@@ -1478,9 +1492,13 @@ Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) {
Block* result = new(zone()) Block(labels, 16, false);
Target target(&this->target_stack_, result);
Expect(Token::LBRACE, CHECK_OK);
+ InitializationBlockFinder block_finder(top_scope_, target_stack_);
while (peek() != Token::RBRACE) {
Statement* stat = ParseStatement(NULL, CHECK_OK);
- if (stat && !stat->IsEmpty()) result->AddStatement(stat);
+ if (stat && !stat->IsEmpty()) {
+ result->AddStatement(stat);
+ block_finder.Update(stat);
+ }
}
Expect(Token::RBRACE, CHECK_OK);
return result;
diff --git a/deps/v8/src/platform-cygwin.cc b/deps/v8/src/platform-cygwin.cc
index 0242f7b2bd..5f283c3571 100644
--- a/deps/v8/src/platform-cygwin.cc
+++ b/deps/v8/src/platform-cygwin.cc
@@ -166,23 +166,6 @@ void OS::Free(void* address, const size_t size) {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void OS::Protect(void* address, size_t size) {
- // TODO(1240712): mprotect has a return value which is ignored here.
- mprotect(address, size, PROT_READ);
-}
-
-
-void OS::Unprotect(void* address, size_t size, bool is_executable) {
- // TODO(1240712): mprotect has a return value which is ignored here.
- int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
- mprotect(address, size, prot);
-}
-
-#endif
-
-
void OS::Sleep(int milliseconds) {
unsigned int ms = static_cast<unsigned int>(milliseconds);
usleep(1000 * ms);
@@ -249,7 +232,6 @@ PosixMemoryMappedFile::~PosixMemoryMappedFile() {
void OS::LogSharedLibraryAddresses() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
// This function assumes that the layout of the file is as follows:
// hex_start_addr-hex_end_addr rwxp <unused data> [binary_file_name]
// If we encounter an unexpected situation we abort scanning further entries.
@@ -306,7 +288,6 @@ void OS::LogSharedLibraryAddresses() {
}
free(lib_name);
fclose(fp);
-#endif
}
@@ -591,8 +572,6 @@ Semaphore* OS::CreateSemaphore(int count) {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
// ----------------------------------------------------------------------------
// Cygwin profiler support.
//
@@ -769,7 +748,5 @@ void Sampler::Stop() {
SetActive(false);
}
-#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
-
diff --git a/deps/v8/src/platform-freebsd.cc b/deps/v8/src/platform-freebsd.cc
index 755475a8a0..9d9f1b795a 100644
--- a/deps/v8/src/platform-freebsd.cc
+++ b/deps/v8/src/platform-freebsd.cc
@@ -181,20 +181,6 @@ void OS::Free(void* buf, const size_t length) {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void OS::Protect(void* address, size_t size) {
- UNIMPLEMENTED();
-}
-
-
-void OS::Unprotect(void* address, size_t size, bool is_executable) {
- UNIMPLEMENTED();
-}
-
-#endif
-
-
void OS::Sleep(int milliseconds) {
unsigned int ms = static_cast<unsigned int>(milliseconds);
usleep(1000 * ms);
@@ -266,15 +252,12 @@ PosixMemoryMappedFile::~PosixMemoryMappedFile() {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
static unsigned StringToLong(char* buffer) {
return static_cast<unsigned>(strtol(buffer, NULL, 16)); // NOLINT
}
-#endif
void OS::LogSharedLibraryAddresses() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
static const int MAP_LENGTH = 1024;
int fd = open("/proc/self/maps", O_RDONLY);
if (fd < 0) return;
@@ -311,7 +294,6 @@ void OS::LogSharedLibraryAddresses() {
LOG(i::Isolate::Current(), SharedLibraryEvent(start_of_path, start, end));
}
close(fd);
-#endif
}
@@ -588,8 +570,6 @@ Semaphore* OS::CreateSemaphore(int count) {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
static pthread_t GetThreadID() {
pthread_t thread_id = pthread_self();
return thread_id;
@@ -817,6 +797,5 @@ void Sampler::Stop() {
SetActive(false);
}
-#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
diff --git a/deps/v8/src/platform-linux.cc b/deps/v8/src/platform-linux.cc
index d2866cae45..ab22a79cd4 100644
--- a/deps/v8/src/platform-linux.cc
+++ b/deps/v8/src/platform-linux.cc
@@ -390,23 +390,6 @@ void OS::Free(void* address, const size_t size) {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void OS::Protect(void* address, size_t size) {
- // TODO(1240712): mprotect has a return value which is ignored here.
- mprotect(address, size, PROT_READ);
-}
-
-
-void OS::Unprotect(void* address, size_t size, bool is_executable) {
- // TODO(1240712): mprotect has a return value which is ignored here.
- int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
- mprotect(address, size, prot);
-}
-
-#endif
-
-
void OS::Sleep(int milliseconds) {
unsigned int ms = static_cast<unsigned int>(milliseconds);
usleep(1000 * ms);
@@ -483,7 +466,6 @@ PosixMemoryMappedFile::~PosixMemoryMappedFile() {
void OS::LogSharedLibraryAddresses() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
// This function assumes that the layout of the file is as follows:
// hex_start_addr-hex_end_addr rwxp <unused data> [binary_file_name]
// If we encounter an unexpected situation we abort scanning further entries.
@@ -540,7 +522,6 @@ void OS::LogSharedLibraryAddresses() {
}
free(lib_name);
fclose(fp);
-#endif
}
@@ -548,7 +529,6 @@ static const char kGCFakeMmap[] = "/tmp/__v8_gc__";
void OS::SignalCodeMovingGC() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
// Support for ll_prof.py.
//
// The Linux profiler built into the kernel logs all mmap's with
@@ -564,7 +544,6 @@ void OS::SignalCodeMovingGC() {
ASSERT(addr != MAP_FAILED);
munmap(addr, size);
fclose(f);
-#endif
}
@@ -859,8 +838,6 @@ Semaphore* OS::CreateSemaphore(int count) {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#if !defined(__GLIBC__) && (defined(__arm__) || defined(__thumb__))
// Android runs a fairly new Linux kernel, so signal info is there,
// but the C library doesn't have the structs defined.
@@ -1148,6 +1125,5 @@ void Sampler::Stop() {
SetActive(false);
}
-#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
diff --git a/deps/v8/src/platform-macos.cc b/deps/v8/src/platform-macos.cc
index 104729af5d..be6e1572dc 100644
--- a/deps/v8/src/platform-macos.cc
+++ b/deps/v8/src/platform-macos.cc
@@ -169,20 +169,6 @@ void OS::Free(void* address, const size_t size) {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void OS::Protect(void* address, size_t size) {
- UNIMPLEMENTED();
-}
-
-
-void OS::Unprotect(void* address, size_t size, bool is_executable) {
- UNIMPLEMENTED();
-}
-
-#endif
-
-
void OS::Sleep(int milliseconds) {
usleep(1000 * milliseconds);
}
@@ -248,7 +234,6 @@ PosixMemoryMappedFile::~PosixMemoryMappedFile() {
void OS::LogSharedLibraryAddresses() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
unsigned int images_count = _dyld_image_count();
for (unsigned int i = 0; i < images_count; ++i) {
const mach_header* header = _dyld_get_image_header(i);
@@ -270,7 +255,6 @@ void OS::LogSharedLibraryAddresses() {
LOG(Isolate::Current(),
SharedLibraryEvent(_dyld_get_image_name(i), start, start + size));
}
-#endif // ENABLE_LOGGING_AND_PROFILING
}
@@ -644,8 +628,6 @@ Semaphore* OS::CreateSemaphore(int count) {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
class Sampler::PlatformData : public Malloced {
public:
PlatformData() : profiled_thread_(mach_thread_self()) {}
@@ -821,6 +803,5 @@ void Sampler::Stop() {
SetActive(false);
}
-#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
diff --git a/deps/v8/src/platform-nullos.cc b/deps/v8/src/platform-nullos.cc
index d309806ec7..08480ca800 100644
--- a/deps/v8/src/platform-nullos.cc
+++ b/deps/v8/src/platform-nullos.cc
@@ -217,20 +217,6 @@ void OS::Free(void* buf, const size_t length) {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void OS::Protect(void* address, size_t size) {
- UNIMPLEMENTED();
-}
-
-
-void OS::Unprotect(void* address, size_t size, bool is_executable) {
- UNIMPLEMENTED();
-}
-
-#endif
-
-
void OS::Sleep(int milliseconds) {
UNIMPLEMENTED();
}
@@ -437,7 +423,6 @@ Semaphore* OS::CreateSemaphore(int count) {
return new NullSemaphore(count);
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
class ProfileSampler::PlatformData : public Malloced {
public:
@@ -472,6 +457,5 @@ void ProfileSampler::Stop() {
UNIMPLEMENTED();
}
-#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
diff --git a/deps/v8/src/platform-openbsd.cc b/deps/v8/src/platform-openbsd.cc
index ceabb51f10..973329b9b1 100644
--- a/deps/v8/src/platform-openbsd.cc
+++ b/deps/v8/src/platform-openbsd.cc
@@ -179,20 +179,6 @@ void OS::Free(void* buf, const size_t length) {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void OS::Protect(void* address, size_t size) {
- UNIMPLEMENTED();
-}
-
-
-void OS::Unprotect(void* address, size_t size, bool is_executable) {
- UNIMPLEMENTED();
-}
-
-#endif
-
-
void OS::Sleep(int milliseconds) {
unsigned int ms = static_cast<unsigned int>(milliseconds);
usleep(1000 * ms);
@@ -264,15 +250,12 @@ PosixMemoryMappedFile::~PosixMemoryMappedFile() {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
static unsigned StringToLong(char* buffer) {
return static_cast<unsigned>(strtol(buffer, NULL, 16)); // NOLINT
}
-#endif
void OS::LogSharedLibraryAddresses() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
static const int MAP_LENGTH = 1024;
int fd = open("/proc/self/maps", O_RDONLY);
if (fd < 0) return;
@@ -309,7 +292,6 @@ void OS::LogSharedLibraryAddresses() {
LOG(i::Isolate::Current(), SharedLibraryEvent(start_of_path, start, end));
}
close(fd);
-#endif
}
@@ -590,8 +572,6 @@ Semaphore* OS::CreateSemaphore(int count) {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
static pthread_t GetThreadID() {
pthread_t thread_id = pthread_self();
return thread_id;
@@ -818,6 +798,5 @@ void Sampler::Stop() {
SetActive(false);
}
-#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
diff --git a/deps/v8/src/platform-posix.cc b/deps/v8/src/platform-posix.cc
index 83f6c8112e..5be305af92 100644
--- a/deps/v8/src/platform-posix.cc
+++ b/deps/v8/src/platform-posix.cc
@@ -37,6 +37,7 @@
#include <sys/resource.h>
#include <sys/time.h>
#include <sys/types.h>
+#include <sys/stat.h>
#include <arpa/inet.h>
#include <netinet/in.h>
@@ -130,7 +131,14 @@ int OS::GetLastError() {
//
FILE* OS::FOpen(const char* path, const char* mode) {
- return fopen(path, mode);
+ FILE* file = fopen(path, mode);
+ if (file == NULL) return NULL;
+ struct stat file_stat;
+ if (fstat(fileno(file), &file_stat) != 0) return NULL;
+ bool is_regular_file = ((file_stat.st_mode & S_IFREG) != 0);
+ if (is_regular_file) return file;
+ fclose(file);
+ return NULL;
}
@@ -139,6 +147,11 @@ bool OS::Remove(const char* path) {
}
+FILE* OS::OpenTemporaryFile() {
+ return tmpfile();
+}
+
+
const char* const OS::LogFileOpenMode = "w";
diff --git a/deps/v8/src/platform-solaris.cc b/deps/v8/src/platform-solaris.cc
index ca15b07f11..1e79f102f5 100644
--- a/deps/v8/src/platform-solaris.cc
+++ b/deps/v8/src/platform-solaris.cc
@@ -192,23 +192,6 @@ void OS::Free(void* address, const size_t size) {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void OS::Protect(void* address, size_t size) {
- // TODO(1240712): mprotect has a return value which is ignored here.
- mprotect(address, size, PROT_READ);
-}
-
-
-void OS::Unprotect(void* address, size_t size, bool is_executable) {
- // TODO(1240712): mprotect has a return value which is ignored here.
- int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
- mprotect(address, size, prot);
-}
-
-#endif
-
-
void OS::Sleep(int milliseconds) {
useconds_t ms = static_cast<useconds_t>(milliseconds);
usleep(1000 * ms);
@@ -589,8 +572,6 @@ Semaphore* OS::CreateSemaphore(int count) {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
static pthread_t GetThreadID() {
return pthread_self();
}
@@ -817,6 +798,4 @@ void Sampler::Stop() {
SetActive(false);
}
-#endif // ENABLE_LOGGING_AND_PROFILING
-
} } // namespace v8::internal
diff --git a/deps/v8/src/platform-tls.h b/deps/v8/src/platform-tls.h
index 564917540b..32516636be 100644
--- a/deps/v8/src/platform-tls.h
+++ b/deps/v8/src/platform-tls.h
@@ -30,7 +30,7 @@
#ifndef V8_PLATFORM_TLS_H_
#define V8_PLATFORM_TLS_H_
-#ifdef V8_FAST_TLS
+#ifndef V8_NO_FAST_TLS
// When fast TLS is requested we include the appropriate
// implementation header.
diff --git a/deps/v8/src/platform-win32.cc b/deps/v8/src/platform-win32.cc
index c226e2f3f8..35b1a8eced 100644
--- a/deps/v8/src/platform-win32.cc
+++ b/deps/v8/src/platform-win32.cc
@@ -44,11 +44,6 @@
namespace v8 {
namespace internal {
-intptr_t OS::MaxVirtualMemory() {
- return 0;
-}
-
-
// Test for finite value - usually defined in math.h
int isfinite(double x) {
return _finite(x);
@@ -143,16 +138,39 @@ int fopen_s(FILE** pFile, const char* filename, const char* mode) {
}
+#define _TRUNCATE 0
+#define STRUNCATE 80
+
int _vsnprintf_s(char* buffer, size_t sizeOfBuffer, size_t count,
const char* format, va_list argptr) {
+ ASSERT(count == _TRUNCATE);
return _vsnprintf(buffer, sizeOfBuffer, format, argptr);
}
-#define _TRUNCATE 0
-int strncpy_s(char* strDest, size_t numberOfElements,
- const char* strSource, size_t count) {
- strncpy(strDest, strSource, count);
+int strncpy_s(char* dest, size_t dest_size, const char* source, size_t count) {
+ CHECK(source != NULL);
+ CHECK(dest != NULL);
+ CHECK_GT(dest_size, 0);
+
+ if (count == _TRUNCATE) {
+ while (dest_size > 0 && *source != 0) {
+ *(dest++) = *(source++);
+ --dest_size;
+ }
+ if (dest_size == 0) {
+ *(dest - 1) = 0;
+ return STRUNCATE;
+ }
+ } else {
+ while (dest_size > 0 && count > 0 && *source != 0) {
+ *(dest++) = *(source++);
+ --dest_size;
+ --count;
+ }
+ }
+ CHECK_GT(dest_size, 0);
+ *dest = 0;
return 0;
}
@@ -174,6 +192,11 @@ int random() {
namespace v8 {
namespace internal {
+intptr_t OS::MaxVirtualMemory() {
+ return 0;
+}
+
+
double ceiling(double x) {
return ceil(x);
}
@@ -717,6 +740,24 @@ bool OS::Remove(const char* path) {
}
+FILE* OS::OpenTemporaryFile() {
+ // tmpfile_s tries to use the root dir, don't use it.
+ char tempPathBuffer[MAX_PATH];
+ DWORD path_result = 0;
+ path_result = GetTempPathA(MAX_PATH, tempPathBuffer);
+ if (path_result > MAX_PATH || path_result == 0) return NULL;
+ UINT name_result = 0;
+ char tempNameBuffer[MAX_PATH];
+ name_result = GetTempFileNameA(tempPathBuffer, "", 0, tempNameBuffer);
+ if (name_result == 0) return NULL;
+ FILE* result = FOpen(tempNameBuffer, "w+"); // Same mode as tmpfile uses.
+ if (result != NULL) {
+ Remove(tempNameBuffer); // Delete on close.
+ }
+ return result;
+}
+
+
// Open log file in binary mode to avoid /n -> /r/n conversion.
const char* const OS::LogFileOpenMode = "wb";
@@ -916,25 +957,6 @@ void OS::Free(void* address, const size_t size) {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void OS::Protect(void* address, size_t size) {
- // TODO(1240712): VirtualProtect has a return value which is ignored here.
- DWORD old_protect;
- VirtualProtect(address, size, PAGE_READONLY, &old_protect);
-}
-
-
-void OS::Unprotect(void* address, size_t size, bool is_executable) {
- // TODO(1240712): VirtualProtect has a return value which is ignored here.
- DWORD new_protect = is_executable ? PAGE_EXECUTE_READWRITE : PAGE_READWRITE;
- DWORD old_protect;
- VirtualProtect(address, size, new_protect, &old_protect);
-}
-
-#endif
-
-
void OS::Sleep(int milliseconds) {
::Sleep(milliseconds);
}
@@ -1835,8 +1857,6 @@ Socket* OS::CreateSocket() {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
// ----------------------------------------------------------------------------
// Win32 profiler support.
@@ -2011,6 +2031,5 @@ void Sampler::Stop() {
SetActive(false);
}
-#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
diff --git a/deps/v8/src/platform.h b/deps/v8/src/platform.h
index 06d3ca4676..c7fe984c45 100644
--- a/deps/v8/src/platform.h
+++ b/deps/v8/src/platform.h
@@ -177,6 +177,9 @@ class OS {
static FILE* FOpen(const char* path, const char* mode);
static bool Remove(const char* path);
+ // Opens a temporary file, the file is auto removed on close.
+ static FILE* OpenTemporaryFile();
+
// Log file open mode is platform-dependent due to line ends issues.
static const char* const LogFileOpenMode;
@@ -206,12 +209,6 @@ class OS {
// Get the Alignment guaranteed by Allocate().
static size_t AllocateAlignment();
-#ifdef ENABLE_HEAP_PROTECTION
- // Protect/unprotect a block of memory by marking it read-only/writable.
- static void Protect(void* address, size_t size);
- static void Unprotect(void* address, size_t size, bool is_executable);
-#endif
-
// Returns an indication of whether a pointer is in a space that
// has been allocated by Allocate(). This method may conservatively
// always return false, but giving more accurate information may
@@ -603,7 +600,6 @@ class TickSample {
bool has_external_callback : 1;
};
-#ifdef ENABLE_LOGGING_AND_PROFILING
class Sampler {
public:
// Initialize sampler.
@@ -662,8 +658,6 @@ class Sampler {
};
-#endif // ENABLE_LOGGING_AND_PROFILING
-
} } // namespace v8::internal
#endif // V8_PLATFORM_H_
diff --git a/deps/v8/src/profile-generator-inl.h b/deps/v8/src/profile-generator-inl.h
index 747e5c7271..8f4bc6c1f2 100644
--- a/deps/v8/src/profile-generator-inl.h
+++ b/deps/v8/src/profile-generator-inl.h
@@ -28,8 +28,6 @@
#ifndef V8_PROFILE_GENERATOR_INL_H_
#define V8_PROFILE_GENERATOR_INL_H_
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include "profile-generator.h"
namespace v8 {
@@ -123,6 +121,4 @@ uint64_t HeapEntry::id() {
} } // namespace v8::internal
-#endif // ENABLE_LOGGING_AND_PROFILING
-
#endif // V8_PROFILE_GENERATOR_INL_H_
diff --git a/deps/v8/src/profile-generator.cc b/deps/v8/src/profile-generator.cc
index 34d7aa6347..07426f2939 100644
--- a/deps/v8/src/profile-generator.cc
+++ b/deps/v8/src/profile-generator.cc
@@ -25,8 +25,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include "v8.h"
#include "profile-generator-inl.h"
@@ -3259,5 +3257,3 @@ String* GetConstructorNameForHeapProfile(JSObject* object) {
}
} } // namespace v8::internal
-
-#endif // ENABLE_LOGGING_AND_PROFILING
diff --git a/deps/v8/src/profile-generator.h b/deps/v8/src/profile-generator.h
index 3d0584b630..d1c2b3804a 100644
--- a/deps/v8/src/profile-generator.h
+++ b/deps/v8/src/profile-generator.h
@@ -28,8 +28,6 @@
#ifndef V8_PROFILE_GENERATOR_H_
#define V8_PROFILE_GENERATOR_H_
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include "allocation.h"
#include "hashmap.h"
#include "../include/v8-profiler.h"
@@ -1126,6 +1124,4 @@ String* GetConstructorNameForHeapProfile(JSObject* object);
} } // namespace v8::internal
-#endif // ENABLE_LOGGING_AND_PROFILING
-
#endif // V8_PROFILE_GENERATOR_H_
diff --git a/deps/v8/src/property.h b/deps/v8/src/property.h
index 87f9ea3d54..ddecc92198 100644
--- a/deps/v8/src/property.h
+++ b/deps/v8/src/property.h
@@ -206,6 +206,7 @@ class LookupResult BASE_EMBEDDED {
lookup_type_ = HANDLER_TYPE;
holder_ = NULL;
details_ = PropertyDetails(NONE, HANDLER);
+ cacheable_ = false;
}
void InterceptorResult(JSObject* holder) {
diff --git a/deps/v8/src/proxy.js b/deps/v8/src/proxy.js
index cb9c020e35..27524bd918 100644
--- a/deps/v8/src/proxy.js
+++ b/deps/v8/src/proxy.js
@@ -135,3 +135,15 @@ function DerivedSetTrap(receiver, name, val) {
function DerivedHasTrap(name) {
return !!this.getPropertyDescriptor(name)
}
+
+function DerivedKeysTrap() {
+ var names = this.getOwnPropertyNames()
+ var enumerableNames = []
+ for (var i = 0, count = 0; i < names.length; ++i) {
+ var name = names[i]
+ if (this.getOwnPropertyDescriptor(TO_STRING_INLINE(name)).enumerable) {
+ enumerableNames[count++] = names[i]
+ }
+ }
+ return enumerableNames
+}
diff --git a/deps/v8/src/runtime-profiler.cc b/deps/v8/src/runtime-profiler.cc
index 7a0dd91f70..917f6d0d66 100644
--- a/deps/v8/src/runtime-profiler.cc
+++ b/deps/v8/src/runtime-profiler.cc
@@ -61,9 +61,7 @@ static const int kSizeLimit = 1500;
Atomic32 RuntimeProfiler::state_ = 0;
// TODO(isolates): Create the semaphore lazily and clean it up when no
// longer required.
-#ifdef ENABLE_LOGGING_AND_PROFILING
Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0);
-#endif
#ifdef DEBUG
bool RuntimeProfiler::has_been_globally_setup_ = false;
@@ -245,9 +243,7 @@ void RuntimeProfiler::OptimizeNow() {
void RuntimeProfiler::NotifyTick() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
isolate_->stack_guard()->RequestRuntimeProfilerTick();
-#endif
}
@@ -295,7 +291,6 @@ void RuntimeProfiler::UpdateSamplesAfterScavenge() {
void RuntimeProfiler::HandleWakeUp(Isolate* isolate) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
// The profiler thread must still be waiting.
ASSERT(NoBarrier_Load(&state_) >= 0);
// In IsolateEnteredJS we have already incremented the counter and
@@ -303,7 +298,6 @@ void RuntimeProfiler::HandleWakeUp(Isolate* isolate) {
// to get the right count of active isolates.
NoBarrier_AtomicIncrement(&state_, 1);
semaphore_->Signal();
-#endif
}
@@ -313,18 +307,15 @@ bool RuntimeProfiler::IsSomeIsolateInJS() {
bool RuntimeProfiler::WaitForSomeIsolateToEnterJS() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
Atomic32 old_state = NoBarrier_CompareAndSwap(&state_, 0, -1);
ASSERT(old_state >= -1);
if (old_state != 0) return false;
semaphore_->Wait();
-#endif
return true;
}
void RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(Thread* thread) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
// Do a fake increment. If the profiler is waiting on the semaphore,
// the returned state is 0, which can be left as an initial state in
// case profiling is restarted later. If the profiler is not
@@ -343,7 +334,6 @@ void RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(Thread* thread) {
if (new_state != 0) {
NoBarrier_AtomicIncrement(&state_, -1);
}
-#endif
}
@@ -365,11 +355,9 @@ void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
bool RuntimeProfilerRateLimiter::SuspendIfNecessary() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (!RuntimeProfiler::IsSomeIsolateInJS()) {
return RuntimeProfiler::WaitForSomeIsolateToEnterJS();
}
-#endif
return false;
}
diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc
index 5e96ef8e1c..56507aeb42 100644
--- a/deps/v8/src/runtime.cc
+++ b/deps/v8/src/runtime.cc
@@ -628,11 +628,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPrototype) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
Object* obj = args[0];
- obj = obj->GetPrototype();
- while (obj->IsJSObject() &&
- JSObject::cast(obj)->map()->is_hidden_prototype()) {
+ do {
obj = obj->GetPrototype();
- }
+ } while (obj->IsJSObject() &&
+ JSObject::cast(obj)->map()->is_hidden_prototype());
return obj;
}
@@ -10060,8 +10059,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
int position =
it.frame()->LookupCode()->SourcePosition(it.frame()->pc());
- // Check for constructor frame.
- bool constructor = it.frame()->IsConstructor();
+ // Check for constructor frame. Inlined frames cannot be construct calls.
+ bool inlined_frame =
+ it.frame()->is_optimized() && deoptimized_frame_index != 0;
+ bool constructor = !inlined_frame && it.frame()->IsConstructor();
// Get scope info and read from it for local variable information.
Handle<JSFunction> function(JSFunction::cast(it.frame()->function()));
@@ -10151,8 +10152,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// Find the number of arguments to fill. At least fill the number of
// parameters for the function and fill more if more parameters are provided.
int argument_count = info.number_of_parameters();
- if (argument_count < it.frame()->ComputeParametersCount()) {
- argument_count = it.frame()->ComputeParametersCount();
+ if (it.frame()->is_optimized()) {
+ ASSERT_EQ(argument_count, deoptimized_frame->parameters_count());
+ } else {
+ if (argument_count < it.frame()->ComputeParametersCount()) {
+ argument_count = it.frame()->ComputeParametersCount();
+ }
}
// Calculate the size of the result.
@@ -10165,7 +10170,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
details->set(kFrameDetailsFrameIdIndex, *frame_id);
// Add the function (same as in function frame).
- details->set(kFrameDetailsFunctionIndex, it.frame()->function());
+ if (it.frame()->is_optimized()) {
+ // Get the function from the deoptimized frame.
+ details->set(kFrameDetailsFunctionIndex, deoptimized_frame->GetFunction());
+ } else {
+ // Get the function from the stack.
+ details->set(kFrameDetailsFunctionIndex, it.frame()->function());
+ }
// Add the arguments count.
details->set(kFrameDetailsArgumentCountIndex, Smi::FromInt(argument_count));
@@ -10215,16 +10226,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
details->set(details_index++, heap->undefined_value());
}
- // Parameter value. If we are inspecting an optimized frame, use
- // undefined as the value.
- //
- // TODO(3141533): We should be able to get the actual parameter
- // value for optimized frames.
- if (!it.frame()->is_optimized() &&
- (i < it.frame()->ComputeParametersCount())) {
- details->set(details_index++, it.frame()->GetParameter(i));
+ // Parameter value.
+ if (it.frame()->is_optimized()) {
+ // Get the value from the deoptimized frame.
+ details->set(details_index++, deoptimized_frame->GetParameter(i));
} else {
- details->set(details_index++, heap->undefined_value());
+ if (i < it.frame()->ComputeParametersCount()) {
+ // Get the value from the stack.
+ details->set(details_index++, it.frame()->GetParameter(i));
+ } else {
+ details->set(details_index++, heap->undefined_value());
+ }
}
}
@@ -12133,7 +12145,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SummarizeLOL) {
#endif // ENABLE_DEBUGGER_SUPPORT
-#ifdef ENABLE_LOGGING_AND_PROFILING
RUNTIME_FUNCTION(MaybeObject*, Runtime_ProfilerResume) {
NoHandleAllocation ha;
v8::V8::ResumeProfiler();
@@ -12147,7 +12158,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ProfilerPause) {
return isolate->heap()->undefined_value();
}
-#endif // ENABLE_LOGGING_AND_PROFILING
// Finds the script object from the script data. NOTE: This operation uses
// heap traversal to find the function generated for the source position
diff --git a/deps/v8/src/runtime.h b/deps/v8/src/runtime.h
index 0900fd3606..e59c82cd64 100644
--- a/deps/v8/src/runtime.h
+++ b/deps/v8/src/runtime.h
@@ -349,7 +349,10 @@ namespace internal {
F(HasExternalIntElements, 1, 1) \
F(HasExternalUnsignedIntElements, 1, 1) \
F(HasExternalFloatElements, 1, 1) \
- F(HasExternalDoubleElements, 1, 1)
+ F(HasExternalDoubleElements, 1, 1) \
+ /* profiler */ \
+ F(ProfilerResume, 0, 1) \
+ F(ProfilerPause, 0, 1)
#ifdef ENABLE_DEBUGGER_SUPPORT
@@ -427,14 +430,6 @@ namespace internal {
#define RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F)
#endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
-#define RUNTIME_FUNCTION_LIST_PROFILER_SUPPORT(F) \
- F(ProfilerResume, 0, 1) \
- F(ProfilerPause, 0, 1)
-#else
-#define RUNTIME_FUNCTION_LIST_PROFILER_SUPPORT(F)
-#endif
-
#ifdef DEBUG
#define RUNTIME_FUNCTION_LIST_DEBUG(F) \
/* Testing */ \
@@ -452,8 +447,7 @@ namespace internal {
RUNTIME_FUNCTION_LIST_ALWAYS_1(F) \
RUNTIME_FUNCTION_LIST_ALWAYS_2(F) \
RUNTIME_FUNCTION_LIST_DEBUG(F) \
- RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F) \
- RUNTIME_FUNCTION_LIST_PROFILER_SUPPORT(F)
+ RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F)
// ----------------------------------------------------------------------------
// INLINE_FUNCTION_LIST defines all inlined functions accessed
diff --git a/deps/v8/src/scopeinfo.cc b/deps/v8/src/scopeinfo.cc
index ccc2cc8200..3e18368f74 100644
--- a/deps/v8/src/scopeinfo.cc
+++ b/deps/v8/src/scopeinfo.cc
@@ -32,6 +32,8 @@
#include "scopeinfo.h"
#include "scopes.h"
+#include "allocation-inl.h"
+
namespace v8 {
namespace internal {
diff --git a/deps/v8/src/scopes.cc b/deps/v8/src/scopes.cc
index 5546875c4b..c6e2a4650e 100644
--- a/deps/v8/src/scopes.cc
+++ b/deps/v8/src/scopes.cc
@@ -34,6 +34,8 @@
#include "prettyprinter.h"
#include "scopeinfo.h"
+#include "allocation-inl.h"
+
namespace v8 {
namespace internal {
diff --git a/deps/v8/src/serialize.cc b/deps/v8/src/serialize.cc
index d960afde40..8cde580fbb 100644
--- a/deps/v8/src/serialize.cc
+++ b/deps/v8/src/serialize.cc
@@ -284,7 +284,6 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
const char* AddressNames[] = {
#define C(name) "Isolate::" #name,
ISOLATE_ADDRESS_LIST(C)
- ISOLATE_ADDRESS_LIST_PROF(C)
NULL
#undef C
};
diff --git a/deps/v8/src/spaces-inl.h b/deps/v8/src/spaces-inl.h
index 070f970577..ca1177f33c 100644
--- a/deps/v8/src/spaces-inl.h
+++ b/deps/v8/src/spaces-inl.h
@@ -378,35 +378,6 @@ bool MemoryAllocator::InInitialChunk(Address address) {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void MemoryAllocator::Protect(Address start, size_t size) {
- OS::Protect(start, size);
-}
-
-
-void MemoryAllocator::Unprotect(Address start,
- size_t size,
- Executability executable) {
- OS::Unprotect(start, size, executable);
-}
-
-
-void MemoryAllocator::ProtectChunkFromPage(Page* page) {
- int id = GetChunkId(page);
- OS::Protect(chunks_[id].address(), chunks_[id].size());
-}
-
-
-void MemoryAllocator::UnprotectChunkFromPage(Page* page) {
- int id = GetChunkId(page);
- OS::Unprotect(chunks_[id].address(), chunks_[id].size(),
- chunks_[id].owner()->executable() == EXECUTABLE);
-}
-
-#endif
-
-
// --------------------------------------------------------------------------
// PagedSpace
diff --git a/deps/v8/src/spaces.cc b/deps/v8/src/spaces.cc
index 23c87cd0c5..d41ce5589c 100644
--- a/deps/v8/src/spaces.cc
+++ b/deps/v8/src/spaces.cc
@@ -868,30 +868,6 @@ void PagedSpace::TearDown() {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void PagedSpace::Protect() {
- Page* page = first_page_;
- while (page->is_valid()) {
- Isolate::Current()->memory_allocator()->ProtectChunkFromPage(page);
- page = Isolate::Current()->memory_allocator()->
- FindLastPageInSameChunk(page)->next_page();
- }
-}
-
-
-void PagedSpace::Unprotect() {
- Page* page = first_page_;
- while (page->is_valid()) {
- Isolate::Current()->memory_allocator()->UnprotectChunkFromPage(page);
- page = Isolate::Current()->memory_allocator()->
- FindLastPageInSameChunk(page)->next_page();
- }
-}
-
-#endif
-
-
void PagedSpace::MarkAllPagesClean() {
PageIterator it(this, PageIterator::ALL_PAGES);
while (it.has_next()) {
@@ -1196,7 +1172,6 @@ bool NewSpace::Setup(Address start, int size) {
ASSERT(IsPowerOf2(maximum_semispace_capacity));
// Allocate and setup the histogram arrays if necessary.
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
allocated_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1);
promoted_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1);
@@ -1204,7 +1179,6 @@ bool NewSpace::Setup(Address start, int size) {
promoted_histogram_[name].set_name(#name);
INSTANCE_TYPE_LIST(SET_NAME)
#undef SET_NAME
-#endif
ASSERT(size == 2 * heap()->ReservedSemiSpaceSize());
ASSERT(IsAddressAligned(start, size, 0));
@@ -1236,7 +1210,6 @@ bool NewSpace::Setup(Address start, int size) {
void NewSpace::TearDown() {
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
if (allocated_histogram_) {
DeleteArray(allocated_histogram_);
allocated_histogram_ = NULL;
@@ -1245,7 +1218,6 @@ void NewSpace::TearDown() {
DeleteArray(promoted_histogram_);
promoted_histogram_ = NULL;
}
-#endif
start_ = NULL;
allocation_info_.top = NULL;
@@ -1258,24 +1230,6 @@ void NewSpace::TearDown() {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void NewSpace::Protect() {
- heap()->isolate()->memory_allocator()->Protect(ToSpaceLow(), Capacity());
- heap()->isolate()->memory_allocator()->Protect(FromSpaceLow(), Capacity());
-}
-
-
-void NewSpace::Unprotect() {
- heap()->isolate()->memory_allocator()->Unprotect(ToSpaceLow(), Capacity(),
- to_space_.executable());
- heap()->isolate()->memory_allocator()->Unprotect(FromSpaceLow(), Capacity(),
- from_space_.executable());
-}
-
-#endif
-
-
void NewSpace::Flip() {
SemiSpace tmp = from_space_;
from_space_ = to_space_;
@@ -1638,7 +1592,6 @@ static void ReportHistogram(bool print_spill) {
// Support for statistics gathering for --heap-stats and --log-gc.
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
void NewSpace::ClearHistograms() {
for (int i = 0; i <= LAST_TYPE; i++) {
allocated_histogram_[i].clear();
@@ -1648,9 +1601,7 @@ void NewSpace::ClearHistograms() {
// Because the copying collector does not touch garbage objects, we iterate
// the new space before a collection to get a histogram of allocated objects.
-// This only happens (1) when compiled with DEBUG and the --heap-stats flag is
-// set, or when compiled with ENABLE_LOGGING_AND_PROFILING and the --log-gc
-// flag is set.
+// This only happens when --log-gc flag is set.
void NewSpace::CollectStatistics() {
ClearHistograms();
SemiSpaceIterator it(this);
@@ -1659,7 +1610,6 @@ void NewSpace::CollectStatistics() {
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
static void DoReportStatistics(Isolate* isolate,
HistogramInfo* info, const char* description) {
LOG(isolate, HeapSampleBeginEvent("NewSpace", description));
@@ -1686,7 +1636,6 @@ static void DoReportStatistics(Isolate* isolate,
}
LOG(isolate, HeapSampleEndEvent("NewSpace", description));
}
-#endif // ENABLE_LOGGING_AND_PROFILING
void NewSpace::ReportStatistics() {
@@ -1709,13 +1658,11 @@ void NewSpace::ReportStatistics() {
}
#endif // DEBUG
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log_gc) {
Isolate* isolate = ISOLATE;
DoReportStatistics(isolate, allocated_histogram_, "allocated");
DoReportStatistics(isolate, promoted_histogram_, "promoted");
}
-#endif // ENABLE_LOGGING_AND_PROFILING
}
@@ -1733,7 +1680,6 @@ void NewSpace::RecordPromotion(HeapObject* obj) {
promoted_histogram_[type].increment_number(1);
promoted_histogram_[type].increment_bytes(obj->Size());
}
-#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// -----------------------------------------------------------------------------
@@ -2809,31 +2755,6 @@ void LargeObjectSpace::TearDown() {
}
-#ifdef ENABLE_HEAP_PROTECTION
-
-void LargeObjectSpace::Protect() {
- LargeObjectChunk* chunk = first_chunk_;
- while (chunk != NULL) {
- heap()->isolate()->memory_allocator()->Protect(chunk->address(),
- chunk->size());
- chunk = chunk->next();
- }
-}
-
-
-void LargeObjectSpace::Unprotect() {
- LargeObjectChunk* chunk = first_chunk_;
- while (chunk != NULL) {
- bool is_code = chunk->GetObject()->IsCode();
- heap()->isolate()->memory_allocator()->Unprotect(chunk->address(),
- chunk->size(), is_code ? EXECUTABLE : NOT_EXECUTABLE);
- chunk = chunk->next();
- }
-}
-
-#endif
-
-
MaybeObject* LargeObjectSpace::AllocateRawInternal(int requested_size,
int object_size,
Executability executable) {
diff --git a/deps/v8/src/spaces.h b/deps/v8/src/spaces.h
index 4024387cd8..c554a37771 100644
--- a/deps/v8/src/spaces.h
+++ b/deps/v8/src/spaces.h
@@ -380,12 +380,6 @@ class Space : public Malloced {
// (e.g. see LargeObjectSpace).
virtual intptr_t SizeOfObjects() { return Size(); }
-#ifdef ENABLE_HEAP_PROTECTION
- // Protect/unprotect the space by marking it read-only/writable.
- virtual void Protect() = 0;
- virtual void Unprotect() = 0;
-#endif
-
#ifdef DEBUG
virtual void Print() = 0;
#endif
@@ -641,17 +635,6 @@ class MemoryAllocator {
Page** last_page,
Page** last_page_in_use);
-#ifdef ENABLE_HEAP_PROTECTION
- // Protect/unprotect a block of memory by marking it read-only/writable.
- inline void Protect(Address start, size_t size);
- inline void Unprotect(Address start, size_t size,
- Executability executable);
-
- // Protect/unprotect a chunk given a page in the chunk.
- inline void ProtectChunkFromPage(Page* page);
- inline void UnprotectChunkFromPage(Page* page);
-#endif
-
#ifdef DEBUG
// Reports statistic info of the space.
void ReportStatistics();
@@ -1157,12 +1140,6 @@ class PagedSpace : public Space {
// Ensures that the capacity is at least 'capacity'. Returns false on failure.
bool EnsureCapacity(int capacity);
-#ifdef ENABLE_HEAP_PROTECTION
- // Protect/unprotect the space by marking it read-only/writable.
- void Protect();
- void Unprotect();
-#endif
-
#ifdef DEBUG
// Print meta info and objects in this space.
virtual void Print();
@@ -1270,7 +1247,6 @@ class PagedSpace : public Space {
};
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
class NumberAndSizeInfo BASE_EMBEDDED {
public:
NumberAndSizeInfo() : number_(0), bytes_(0) {}
@@ -1293,9 +1269,7 @@ class NumberAndSizeInfo BASE_EMBEDDED {
// HistogramInfo class for recording a single "bar" of a histogram. This
-// class is used for collecting statistics to print to stdout (when compiled
-// with DEBUG) or to the log file (when compiled with
-// ENABLE_LOGGING_AND_PROFILING).
+// class is used for collecting statistics to print to the log file.
class HistogramInfo: public NumberAndSizeInfo {
public:
HistogramInfo() : NumberAndSizeInfo() {}
@@ -1306,7 +1280,6 @@ class HistogramInfo: public NumberAndSizeInfo {
private:
const char* name_;
};
-#endif
// -----------------------------------------------------------------------------
@@ -1392,12 +1365,6 @@ class SemiSpace : public Space {
bool Commit();
bool Uncommit();
-#ifdef ENABLE_HEAP_PROTECTION
- // Protect/unprotect the space by marking it read-only/writable.
- virtual void Protect() {}
- virtual void Unprotect() {}
-#endif
-
#ifdef DEBUG
virtual void Print();
virtual void Verify();
@@ -1628,12 +1595,6 @@ class NewSpace : public Space {
template <typename StringType>
inline void ShrinkStringAtAllocationBoundary(String* string, int len);
-#ifdef ENABLE_HEAP_PROTECTION
- // Protect/unprotect the space by marking it read-only/writable.
- virtual void Protect();
- virtual void Unprotect();
-#endif
-
#ifdef DEBUG
// Verify the active semispace.
virtual void Verify();
@@ -1641,7 +1602,6 @@ class NewSpace : public Space {
virtual void Print() { to_space_.Print(); }
#endif
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Iterates the active semispace to collect statistics.
void CollectStatistics();
// Reports previously collected statistics of the active semispace.
@@ -1654,7 +1614,6 @@ class NewSpace : public Space {
// to space during a scavenge GC.
void RecordAllocation(HeapObject* obj);
void RecordPromotion(HeapObject* obj);
-#endif
// Return whether the operation succeded.
bool CommitFromSpaceIfNeeded() {
@@ -1683,10 +1642,8 @@ class NewSpace : public Space {
AllocationInfo allocation_info_;
AllocationInfo mc_forwarding_info_;
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
HistogramInfo* allocated_histogram_;
HistogramInfo* promoted_histogram_;
-#endif
// Implementation of AllocateRaw and MCAllocateRaw.
MUST_USE_RESULT inline MaybeObject* AllocateRawInternal(
@@ -2296,12 +2253,6 @@ class LargeObjectSpace : public Space {
// may use some memory, leaving less for large objects.
virtual bool ReserveSpace(int bytes);
-#ifdef ENABLE_HEAP_PROTECTION
- // Protect/unprotect the space by marking it read-only/writable.
- void Protect();
- void Unprotect();
-#endif
-
#ifdef DEBUG
virtual void Verify();
virtual void Print();
diff --git a/deps/v8/src/string-stream.cc b/deps/v8/src/string-stream.cc
index aea142042b..9002593bdd 100644
--- a/deps/v8/src/string-stream.cc
+++ b/deps/v8/src/string-stream.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -30,6 +30,8 @@
#include "factory.h"
#include "string-stream.h"
+#include "allocation-inl.h"
+
namespace v8 {
namespace internal {
diff --git a/deps/v8/src/stub-cache.cc b/deps/v8/src/stub-cache.cc
index eb813814d4..79cd7a0d22 100644
--- a/deps/v8/src/stub-cache.cc
+++ b/deps/v8/src/stub-cache.cc
@@ -1686,23 +1686,6 @@ MaybeObject* KeyedLoadStubCompiler::GetCode(PropertyType type,
}
-MaybeObject* KeyedLoadStubCompiler::ComputeSharedKeyedLoadElementStub(
- Map* receiver_map) {
- MaybeObject* maybe_stub = NULL;
- if (receiver_map->has_fast_elements()) {
- maybe_stub = KeyedLoadFastElementStub().TryGetCode();
- } else if (receiver_map->has_external_array_elements()) {
- JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
- maybe_stub = KeyedLoadExternalArrayStub(elements_kind).TryGetCode();
- } else if (receiver_map->has_dictionary_elements()) {
- maybe_stub = isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Slow);
- } else {
- UNREACHABLE();
- }
- return maybe_stub;
-}
-
-
MaybeObject* StoreStubCompiler::GetCode(PropertyType type, String* name) {
Code::Flags flags = Code::ComputeMonomorphicFlags(
Code::STORE_IC, type, strict_mode_);
@@ -1739,21 +1722,9 @@ MaybeObject* KeyedStoreStubCompiler::GetCode(PropertyType type,
}
-MaybeObject* KeyedStoreStubCompiler::ComputeSharedKeyedStoreElementStub(
- Map* receiver_map) {
- MaybeObject* maybe_stub = NULL;
- if (receiver_map->has_fast_elements()) {
- bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
- maybe_stub = KeyedStoreFastElementStub(is_js_array).TryGetCode();
- } else if (receiver_map->has_external_array_elements()) {
- JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
- maybe_stub = KeyedStoreExternalArrayStub(elements_kind).TryGetCode();
- } else if (receiver_map->has_dictionary_elements()) {
- maybe_stub = isolate()->builtins()->builtin(Builtins::kKeyedStoreIC_Slow);
- } else {
- UNREACHABLE();
- }
- return maybe_stub;
+void KeyedStoreStubCompiler::GenerateStoreDictionaryElement(
+ MacroAssembler* masm) {
+ KeyedStoreIC::GenerateSlow(masm);
}
diff --git a/deps/v8/src/stub-cache.h b/deps/v8/src/stub-cache.h
index fa2676061d..93c50fa988 100644
--- a/deps/v8/src/stub-cache.h
+++ b/deps/v8/src/stub-cache.h
@@ -662,12 +662,12 @@ class KeyedLoadStubCompiler: public StubCompiler {
static void GenerateLoadFastElement(MacroAssembler* masm);
+ static void GenerateLoadDictionaryElement(MacroAssembler* masm);
+
private:
MaybeObject* GetCode(PropertyType type,
String* name,
InlineCacheState state = MONOMORPHIC);
-
- MaybeObject* ComputeSharedKeyedLoadElementStub(Map* receiver_map);
};
@@ -720,13 +720,13 @@ class KeyedStoreStubCompiler: public StubCompiler {
static void GenerateStoreExternalArray(MacroAssembler* masm,
JSObject::ElementsKind elements_kind);
+ static void GenerateStoreDictionaryElement(MacroAssembler* masm);
+
private:
MaybeObject* GetCode(PropertyType type,
String* name,
InlineCacheState state = MONOMORPHIC);
- MaybeObject* ComputeSharedKeyedStoreElementStub(Map* receiver_map);
-
StrictModeFlag strict_mode_;
};
diff --git a/deps/v8/src/type-info.h b/deps/v8/src/type-info.h
index 75aabe8c62..0a8c935dfa 100644
--- a/deps/v8/src/type-info.h
+++ b/deps/v8/src/type-info.h
@@ -30,7 +30,6 @@
#include "allocation.h"
#include "globals.h"
-#include "zone.h"
#include "zone-inl.h"
namespace v8 {
diff --git a/deps/v8/src/v8globals.h b/deps/v8/src/v8globals.h
index a23ca194aa..d86f299f4e 100644
--- a/deps/v8/src/v8globals.h
+++ b/deps/v8/src/v8globals.h
@@ -395,12 +395,11 @@ struct AccessorDescriptor {
};
-// Logging and profiling.
-// A StateTag represents a possible state of the VM. When compiled with
-// ENABLE_VMSTATE_TRACKING, the logger maintains a stack of these.
-// Creating a VMState object enters a state by pushing on the stack, and
-// destroying a VMState object leaves a state by popping the current state
-// from the stack.
+// Logging and profiling. A StateTag represents a possible state of
+// the VM. The logger maintains a stack of these. Creating a VMState
+// object enters a state by pushing on the stack, and destroying a
+// VMState object leaves a state by popping the current state from the
+// stack.
#define STATE_TAG_LIST(V) \
V(JS) \
diff --git a/deps/v8/src/v8natives.js b/deps/v8/src/v8natives.js
index 0afe231c8c..53a03173b3 100644
--- a/deps/v8/src/v8natives.js
+++ b/deps/v8/src/v8natives.js
@@ -308,6 +308,13 @@ function ObjectLookupSetter(name) {
function ObjectKeys(obj) {
if (!IS_SPEC_OBJECT(obj))
throw MakeTypeError("obj_ctor_property_non_object", ["keys"]);
+ if (%IsJSProxy(obj)) {
+ var handler = %GetHandler(obj);
+ var keys = handler.keys;
+ if (IS_UNDEFINED(keys)) keys = DerivedKeysTrap;
+ var names = %_CallFunction(handler, keys);
+ return ToStringArray(names);
+ }
return %LocalKeys(obj);
}
@@ -315,14 +322,14 @@ function ObjectKeys(obj) {
// ES5 8.10.1.
function IsAccessorDescriptor(desc) {
if (IS_UNDEFINED(desc)) return false;
- return desc.hasGetter_ || desc.hasSetter_;
+ return desc.hasGetter() || desc.hasSetter();
}
// ES5 8.10.2.
function IsDataDescriptor(desc) {
if (IS_UNDEFINED(desc)) return false;
- return desc.hasValue_ || desc.hasWritable_;
+ return desc.hasValue() || desc.hasWritable();
}
@@ -354,6 +361,19 @@ function FromPropertyDescriptor(desc) {
return obj;
}
+// Harmony Proxies
+function FromGenericPropertyDescriptor(desc) {
+ if (IS_UNDEFINED(desc)) return desc;
+ var obj = new $Object();
+ if (desc.hasValue()) obj.value = desc.getValue();
+ if (desc.hasWritable()) obj.writable = desc.isWritable();
+ if (desc.hasGetter()) obj.get = desc.getGet();
+ if (desc.hasSetter()) obj.set = desc.getSet();
+ if (desc.hasEnumerable()) obj.enumerable = desc.isEnumerable();
+ if (desc.hasConfigurable()) obj.configurable = desc.isConfigurable();
+ return obj;
+}
+
// ES5 8.10.5.
function ToPropertyDescriptor(obj) {
if (!IS_SPEC_OBJECT(obj)) {
@@ -404,15 +424,15 @@ function ToPropertyDescriptor(obj) {
function ToCompletePropertyDescriptor(obj) {
var desc = ToPropertyDescriptor(obj)
if (IsGenericDescriptor(desc) || IsDataDescriptor(desc)) {
- if (!("value" in desc)) desc.value = void 0;
- if (!("writable" in desc)) desc.writable = false;
+ if (!desc.hasValue()) desc.setValue(void 0);
+ if (!desc.hasWritable()) desc.setWritable(false);
} else {
// Is accessor descriptor.
- if (!("get" in desc)) desc.get = void 0;
- if (!("set" in desc)) desc.set = void 0;
+ if (!desc.hasGetter()) desc.setGet(void 0);
+ if (!desc.hasSetter()) desc.setSet(void 0);
}
- if (!("enumerable" in desc)) desc.enumerable = false;
- if (!("configurable" in desc)) desc.configurable = false;
+ if (!desc.hasEnumerable()) desc.setEnumerable(false);
+ if (!desc.hasConfigurable()) desc.setConfigurable(false);
return desc;
}
@@ -572,10 +592,10 @@ function GetProperty(obj, p) {
throw MakeTypeError("handler_trap_missing",
[handler, "getPropertyDescriptor"]);
}
- var descriptor = getProperty.call(handler, p);
+ var descriptor = %_CallFunction(handler, p, getProperty);
if (IS_UNDEFINED(descriptor)) return descriptor;
var desc = ToCompletePropertyDescriptor(descriptor);
- if (!desc.configurable) {
+ if (!desc.isConfigurable()) {
throw MakeTypeError("proxy_prop_not_configurable",
[handler, "getPropertyDescriptor", p, descriptor]);
}
@@ -595,7 +615,7 @@ function HasProperty(obj, p) {
var handler = %GetHandler(obj);
var has = handler.has;
if (IS_UNDEFINED(has)) has = DerivedHasTrap;
- return ToBoolean(has.call(handler, obj, p));
+ return ToBoolean(%_CallFunction(handler, obj, p, has));
}
var desc = GetProperty(obj, p);
return IS_UNDEFINED(desc) ? false : true;
@@ -604,6 +624,23 @@ function HasProperty(obj, p) {
// ES5 section 8.12.1.
function GetOwnProperty(obj, p) {
+ if (%IsJSProxy(obj)) {
+ var handler = %GetHandler(obj);
+ var getOwnProperty = handler.getOwnPropertyDescriptor;
+ if (IS_UNDEFINED(getOwnProperty)) {
+ throw MakeTypeError("handler_trap_missing",
+ [handler, "getOwnPropertyDescriptor"]);
+ }
+ var descriptor = %_CallFunction(handler, p, getOwnProperty);
+ if (IS_UNDEFINED(descriptor)) return descriptor;
+ var desc = ToCompletePropertyDescriptor(descriptor);
+ if (!desc.isConfigurable()) {
+ throw MakeTypeError("proxy_prop_not_configurable",
+ [handler, "getOwnPropertyDescriptor", p, descriptor]);
+ }
+ return desc;
+ }
+
// GetOwnProperty returns an array indexed by the constants
// defined in macros.py.
// If p is not a property on obj undefined is returned.
@@ -616,8 +653,32 @@ function GetOwnProperty(obj, p) {
}
+// Harmony proxies.
+function DefineProxyProperty(obj, p, attributes, should_throw) {
+ var handler = %GetHandler(obj);
+ var defineProperty = handler.defineProperty;
+ if (IS_UNDEFINED(defineProperty)) {
+ throw MakeTypeError("handler_trap_missing", [handler, "defineProperty"]);
+ }
+ var result = %_CallFunction(handler, p, attributes, defineProperty);
+ if (!ToBoolean(result)) {
+ if (should_throw) {
+ throw MakeTypeError("handler_failed", [handler, "defineProperty"]);
+ } else {
+ return false;
+ }
+ }
+ return true;
+}
+
+
// ES5 8.12.9.
function DefineOwnProperty(obj, p, desc, should_throw) {
+ if (%IsJSProxy(obj)) {
+ var attributes = FromGenericPropertyDescriptor(desc);
+ return DefineProxyProperty(obj, p, attributes, should_throw);
+ }
+
var current_or_access = %GetOwnProperty(ToObject(obj), ToString(p));
// A false value here means that access checks failed.
if (current_or_access === false) return void 0;
@@ -792,7 +853,8 @@ function ObjectGetPrototypeOf(obj) {
// ES5 section 15.2.3.3
function ObjectGetOwnPropertyDescriptor(obj, p) {
if (!IS_SPEC_OBJECT(obj))
- throw MakeTypeError("obj_ctor_property_non_object", ["getOwnPropertyDescriptor"]);
+ throw MakeTypeError("obj_ctor_property_non_object",
+ ["getOwnPropertyDescriptor"]);
var desc = GetOwnProperty(obj, p);
return FromPropertyDescriptor(desc);
}
@@ -831,7 +893,7 @@ function ObjectGetOwnPropertyNames(obj) {
throw MakeTypeError("handler_trap_missing",
[handler, "getOwnPropertyNames"]);
}
- var names = getOwnPropertyNames.call(handler);
+ var names = %_CallFunction(handler, getOwnPropertyNames);
return ToStringArray(names, "getOwnPropertyNames");
}
@@ -900,8 +962,37 @@ function ObjectDefineProperty(obj, p, attributes) {
throw MakeTypeError("obj_ctor_property_non_object", ["defineProperty"]);
}
var name = ToString(p);
- var desc = ToPropertyDescriptor(attributes);
- DefineOwnProperty(obj, name, desc, true);
+ if (%IsJSProxy(obj)) {
+ // Clone the attributes object for protection.
+ // TODO(rossberg): not spec'ed yet, so not sure if this should involve
+ // non-own properties as it does (or non-enumerable ones, as it doesn't?).
+ var attributesClone = {}
+ for (var a in attributes) {
+ attributesClone[a] = attributes[a];
+ }
+ DefineProxyProperty(obj, name, attributesClone, true);
+ // The following would implement the spec as in the current proposal,
+ // but after recent comments on es-discuss, is most likely obsolete.
+ /*
+ var defineObj = FromGenericPropertyDescriptor(desc);
+ var names = ObjectGetOwnPropertyNames(attributes);
+ var standardNames =
+ {value: 0, writable: 0, get: 0, set: 0, enumerable: 0, configurable: 0};
+ for (var i = 0; i < names.length; i++) {
+ var N = names[i];
+ if (!(%HasLocalProperty(standardNames, N))) {
+ var attr = GetOwnProperty(attributes, N);
+ DefineOwnProperty(descObj, N, attr, true);
+ }
+ }
+ // This is really confusing the types, but it is what the proxies spec
+ // currently requires:
+ desc = descObj;
+ */
+ } else {
+ var desc = ToPropertyDescriptor(attributes);
+ DefineOwnProperty(obj, name, desc, true);
+ }
return obj;
}
diff --git a/deps/v8/src/v8utils.cc b/deps/v8/src/v8utils.cc
index 89f9d953ee..bf0e05d05b 100644
--- a/deps/v8/src/v8utils.cc
+++ b/deps/v8/src/v8utils.cc
@@ -110,11 +110,11 @@ char* ReadLine(const char* prompt) {
}
-char* ReadCharsFromFile(const char* filename,
+char* ReadCharsFromFile(FILE* file,
int* size,
int extra_space,
- bool verbose) {
- FILE* file = OS::FOpen(filename, "rb");
+ bool verbose,
+ const char* filename) {
if (file == NULL || fseek(file, 0, SEEK_END) != 0) {
if (verbose) {
OS::PrintError("Cannot read from file %s.\n", filename);
@@ -127,16 +127,26 @@ char* ReadCharsFromFile(const char* filename,
rewind(file);
char* result = NewArray<char>(*size + extra_space);
- for (int i = 0; i < *size;) {
+ for (int i = 0; i < *size && feof(file) == 0;) {
int read = static_cast<int>(fread(&result[i], 1, *size - i, file));
- if (read <= 0) {
+ if (read != (*size - i) && ferror(file) != 0) {
fclose(file);
DeleteArray(result);
return NULL;
}
i += read;
}
- fclose(file);
+ return result;
+}
+
+
+char* ReadCharsFromFile(const char* filename,
+ int* size,
+ int extra_space,
+ bool verbose) {
+ FILE* file = OS::FOpen(filename, "rb");
+ char* result = ReadCharsFromFile(file, size, extra_space, verbose, filename);
+ if (file != NULL) fclose(file);
return result;
}
@@ -147,18 +157,34 @@ byte* ReadBytes(const char* filename, int* size, bool verbose) {
}
+static Vector<const char> SetVectorContents(char* chars,
+ int size,
+ bool* exists) {
+ if (!chars) {
+ *exists = false;
+ return Vector<const char>::empty();
+ }
+ chars[size] = '\0';
+ *exists = true;
+ return Vector<const char>(chars, size);
+}
+
+
Vector<const char> ReadFile(const char* filename,
bool* exists,
bool verbose) {
int size;
char* result = ReadCharsFromFile(filename, &size, 1, verbose);
- if (!result) {
- *exists = false;
- return Vector<const char>::empty();
- }
- result[size] = '\0';
- *exists = true;
- return Vector<const char>(result, size);
+ return SetVectorContents(result, size, exists);
+}
+
+
+Vector<const char> ReadFile(FILE* file,
+ bool* exists,
+ bool verbose) {
+ int size;
+ char* result = ReadCharsFromFile(file, &size, 1, verbose, "");
+ return SetVectorContents(result, size, exists);
}
diff --git a/deps/v8/src/v8utils.h b/deps/v8/src/v8utils.h
index 498e23dc7b..aada521e4c 100644
--- a/deps/v8/src/v8utils.h
+++ b/deps/v8/src/v8utils.h
@@ -188,6 +188,9 @@ class AsciiStringAdapter: public v8::String::ExternalAsciiStringResource {
Vector<const char> ReadFile(const char* filename,
bool* exists,
bool verbose = true);
+Vector<const char> ReadFile(FILE* file,
+ bool* exists,
+ bool verbose = true);
diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc
index f744d4e1d6..ee5411d888 100644
--- a/deps/v8/src/version.cc
+++ b/deps/v8/src/version.cc
@@ -34,8 +34,8 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 4
-#define BUILD_NUMBER 10
-#define PATCH_LEVEL 0
+#define BUILD_NUMBER 12
+#define PATCH_LEVEL 1
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/deps/v8/src/vm-state-inl.h b/deps/v8/src/vm-state-inl.h
index 1f363de626..c647e56c97 100644
--- a/deps/v8/src/vm-state-inl.h
+++ b/deps/v8/src/vm-state-inl.h
@@ -39,7 +39,6 @@ namespace internal {
// logger and partially threaded through the call stack. States are pushed by
// VMState construction and popped by destruction.
//
-#ifdef ENABLE_VMSTATE_TRACKING
inline const char* StateToString(StateTag state) {
switch (state) {
case JS:
@@ -61,32 +60,16 @@ inline const char* StateToString(StateTag state) {
VMState::VMState(Isolate* isolate, StateTag tag)
: isolate_(isolate), previous_tag_(isolate->current_vm_state()) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log_state_changes) {
LOG(isolate, UncheckedStringEvent("Entering", StateToString(tag)));
LOG(isolate, UncheckedStringEvent("From", StateToString(previous_tag_)));
}
-#endif
isolate_->SetCurrentVMState(tag);
-
-#ifdef ENABLE_HEAP_PROTECTION
- if (FLAG_protect_heap) {
- if (tag == EXTERNAL) {
- // We are leaving V8.
- ASSERT(previous_tag_ != EXTERNAL);
- isolate_->heap()->Protect();
- } else if (previous_tag_ = EXTERNAL) {
- // We are entering V8.
- isolate_->heap()->Unprotect();
- }
- }
-#endif
}
VMState::~VMState() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log_state_changes) {
LOG(isolate_,
UncheckedStringEvent("Leaving",
@@ -94,32 +77,10 @@ VMState::~VMState() {
LOG(isolate_,
UncheckedStringEvent("To", StateToString(previous_tag_)));
}
-#endif // ENABLE_LOGGING_AND_PROFILING
-
-#ifdef ENABLE_HEAP_PROTECTION
- StateTag tag = isolate_->current_vm_state();
-#endif
isolate_->SetCurrentVMState(previous_tag_);
-
-#ifdef ENABLE_HEAP_PROTECTION
- if (FLAG_protect_heap) {
- if (tag == EXTERNAL) {
- // We are reentering V8.
- ASSERT(previous_tag_ != EXTERNAL);
- isolate_->heap()->Unprotect();
- } else if (previous_tag_ == EXTERNAL) {
- // We are leaving V8.
- isolate_->heap()->Protect();
- }
- }
-#endif // ENABLE_HEAP_PROTECTION
}
-#endif // ENABLE_VMSTATE_TRACKING
-
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
ExternalCallbackScope::ExternalCallbackScope(Isolate* isolate, Address callback)
: isolate_(isolate), previous_callback_(isolate->external_callback()) {
@@ -130,8 +91,6 @@ ExternalCallbackScope::~ExternalCallbackScope() {
isolate_->set_external_callback(previous_callback_);
}
-#endif // ENABLE_LOGGING_AND_PROFILING
-
} } // namespace v8::internal
diff --git a/deps/v8/src/vm-state.h b/deps/v8/src/vm-state.h
index 2062340f9b..831e2d396b 100644
--- a/deps/v8/src/vm-state.h
+++ b/deps/v8/src/vm-state.h
@@ -35,7 +35,6 @@ namespace v8 {
namespace internal {
class VMState BASE_EMBEDDED {
-#ifdef ENABLE_VMSTATE_TRACKING
public:
inline VMState(Isolate* isolate, StateTag tag);
inline ~VMState();
@@ -43,26 +42,16 @@ class VMState BASE_EMBEDDED {
private:
Isolate* isolate_;
StateTag previous_tag_;
-
-#else
- public:
- VMState(Isolate* isolate, StateTag state) {}
-#endif
};
class ExternalCallbackScope BASE_EMBEDDED {
-#ifdef ENABLE_LOGGING_AND_PROFILING
public:
inline ExternalCallbackScope(Isolate* isolate, Address callback);
inline ~ExternalCallbackScope();
private:
Isolate* isolate_;
Address previous_callback_;
-#else
- public:
- ExternalCallbackScope(Isolate* isolate, Address callback) {}
-#endif
};
} } // namespace v8::internal
diff --git a/deps/v8/src/x64/code-stubs-x64.cc b/deps/v8/src/x64/code-stubs-x64.cc
index 2f40251841..3cf7840d54 100644
--- a/deps/v8/src/x64/code-stubs-x64.cc
+++ b/deps/v8/src/x64/code-stubs-x64.cc
@@ -642,25 +642,17 @@ void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
}
-const char* UnaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void UnaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name = NULL; // Make g++ happy.
switch (mode_) {
case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "UnaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- UnaryOpIC::GetName(operand_type_));
- return name_;
+ stream->Add("UnaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ UnaryOpIC::GetName(operand_type_));
}
@@ -721,12 +713,7 @@ void BinaryOpStub::Generate(MacroAssembler* masm) {
}
-const char* BinaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void BinaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name;
switch (mode_) {
@@ -735,13 +722,10 @@ const char* BinaryOpStub::GetName() {
case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
default: overwrite_name = "UnknownOverwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "BinaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- BinaryOpIC::GetName(operands_type_));
- return name_;
+ stream->Add("BinaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ BinaryOpIC::GetName(operands_type_));
}
@@ -3450,9 +3434,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
Label invoke, exit;
-#ifdef ENABLE_LOGGING_AND_PROFILING
Label not_outermost_js, not_outermost_js_2;
-#endif
{ // NOLINT. Scope block confuses linter.
MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
// Setup frame.
@@ -3497,7 +3479,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ push(c_entry_fp_operand);
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
__ Load(rax, js_entry_sp);
@@ -3511,7 +3492,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ bind(&not_outermost_js);
__ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
__ bind(&cont);
-#endif
// Call a faked try-block that does the invoke.
__ call(&invoke);
@@ -3555,7 +3535,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ PopTryHandler();
__ bind(&exit);
-#ifdef ENABLE_LOGGING_AND_PROFILING
// Check if the current stack frame is marked as the outermost JS frame.
__ pop(rbx);
__ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
@@ -3563,7 +3542,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ movq(kScratchRegister, js_entry_sp);
__ movq(Operand(kScratchRegister, 0), Immediate(0));
__ bind(&not_outermost_js_2);
-#endif
// Restore the top frame descriptor from the stack.
{ Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
@@ -3772,15 +3750,8 @@ int CompareStub::MinorKey() {
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
-const char* CompareStub::GetName() {
+void CompareStub::PrintName(StringStream* stream) {
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
-
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
-
const char* cc_name;
switch (cc_) {
case less: cc_name = "LT"; break;
@@ -3791,35 +3762,12 @@ const char* CompareStub::GetName() {
case not_equal: cc_name = "NE"; break;
default: cc_name = "UnknownCondition"; break;
}
-
- const char* strict_name = "";
- if (strict_ && (cc_ == equal || cc_ == not_equal)) {
- strict_name = "_STRICT";
- }
-
- const char* never_nan_nan_name = "";
- if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
- never_nan_nan_name = "_NO_NAN";
- }
-
- const char* include_number_compare_name = "";
- if (!include_number_compare_) {
- include_number_compare_name = "_NO_NUMBER";
- }
-
- const char* include_smi_compare_name = "";
- if (!include_smi_compare_) {
- include_smi_compare_name = "_NO_SMI";
- }
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "CompareStub_%s%s%s%s",
- cc_name,
- strict_name,
- never_nan_nan_name,
- include_number_compare_name,
- include_smi_compare_name);
- return name_;
+ bool is_equality = cc_ == equal || cc_ == not_equal;
+ stream->Add("CompareStub_%s", cc_name);
+ if (strict_ && is_equality) stream->Add("_STRICT");
+ if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
+ if (!include_number_compare_) stream->Add("_NO_NUMBER");
+ if (!include_smi_compare_) stream->Add("_NO_SMI");
}
diff --git a/deps/v8/src/x64/code-stubs-x64.h b/deps/v8/src/x64/code-stubs-x64.h
index 6a07b3b84d..4058118eef 100644
--- a/deps/v8/src/x64/code-stubs-x64.h
+++ b/deps/v8/src/x64/code-stubs-x64.h
@@ -66,8 +66,7 @@ class UnaryOpStub: public CodeStub {
UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
: op_(op),
mode_(mode),
- operand_type_(operand_type),
- name_(NULL) {
+ operand_type_(operand_type) {
}
private:
@@ -77,19 +76,7 @@ class UnaryOpStub: public CodeStub {
// Operand type information determined at runtime.
UnaryOpIC::TypeInfo operand_type_;
- char* name_;
-
- const char* GetName();
-
-#ifdef DEBUG
- void Print() {
- PrintF("UnaryOpStub %d (op %s), (mode %d, runtime_type_info %s)\n",
- MinorKey(),
- Token::String(op_),
- static_cast<int>(mode_),
- UnaryOpIC::GetName(operand_type_));
- }
-#endif
+ virtual void PrintName(StringStream* stream);
class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
class OpBits: public BitField<Token::Value, 1, 7> {};
@@ -149,8 +136,7 @@ class BinaryOpStub: public CodeStub {
: op_(op),
mode_(mode),
operands_type_(BinaryOpIC::UNINITIALIZED),
- result_type_(BinaryOpIC::UNINITIALIZED),
- name_(NULL) {
+ result_type_(BinaryOpIC::UNINITIALIZED) {
ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
}
@@ -161,8 +147,7 @@ class BinaryOpStub: public CodeStub {
: op_(OpBits::decode(key)),
mode_(ModeBits::decode(key)),
operands_type_(operands_type),
- result_type_(result_type),
- name_(NULL) { }
+ result_type_(result_type) { }
private:
enum SmiCodeGenerateHeapNumberResults {
@@ -177,20 +162,7 @@ class BinaryOpStub: public CodeStub {
BinaryOpIC::TypeInfo operands_type_;
BinaryOpIC::TypeInfo result_type_;
- char* name_;
-
- const char* GetName();
-
-#ifdef DEBUG
- void Print() {
- PrintF("BinaryOpStub %d (op %s), "
- "(mode %d, runtime_type_info %s)\n",
- MinorKey(),
- Token::String(op_),
- static_cast<int>(mode_),
- BinaryOpIC::GetName(operands_type_));
- }
-#endif
+ virtual void PrintName(StringStream* stream);
// Minor key encoding in 15 bits RRRTTTOOOOOOOMM.
class ModeBits: public BitField<OverwriteMode, 0, 2> {};
@@ -410,14 +382,6 @@ class NumberToStringStub: public CodeStub {
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
-
- const char* GetName() { return "NumberToStringStub"; }
-
-#ifdef DEBUG
- void Print() {
- PrintF("NumberToStringStub\n");
- }
-#endif
};
@@ -461,13 +425,6 @@ class StringDictionaryLookupStub: public CodeStub {
StringDictionary::kHeaderSize +
StringDictionary::kElementsStartIndex * kPointerSize;
-
-#ifdef DEBUG
- void Print() {
- PrintF("StringDictionaryLookupStub\n");
- }
-#endif
-
Major MajorKey() { return StringDictionaryNegativeLookup; }
int MinorKey() {
diff --git a/deps/v8/src/x64/codegen-x64.h b/deps/v8/src/x64/codegen-x64.h
index 94c7850289..a0648cec64 100644
--- a/deps/v8/src/x64/codegen-x64.h
+++ b/deps/v8/src/x64/codegen-x64.h
@@ -58,9 +58,7 @@ class CodeGenerator: public AstVisitor {
// Print the code after compiling it.
static void PrintCode(Handle<Code> code, CompilationInfo* info);
-#ifdef ENABLE_LOGGING_AND_PROFILING
static bool ShouldGenerateLog(Expression* type);
-#endif
static bool RecordPositions(MacroAssembler* masm,
int pos,
diff --git a/deps/v8/src/x64/full-codegen-x64.cc b/deps/v8/src/x64/full-codegen-x64.cc
index 6629927675..a54bff59bd 100644
--- a/deps/v8/src/x64/full-codegen-x64.cc
+++ b/deps/v8/src/x64/full-codegen-x64.cc
@@ -741,7 +741,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
// IDs for bailouts from optimized code.
ASSERT(prop->obj()->AsVariableProxy() != NULL);
{ AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ EmitVariableLoad(prop->obj()->AsVariableProxy());
}
__ push(rax);
VisitForAccumulatorValue(function);
@@ -1071,7 +1071,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Comment cmnt(masm_, "[ VariableProxy");
- EmitVariableLoad(expr->var());
+ EmitVariableLoad(expr);
}
@@ -1222,7 +1222,11 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
}
-void FullCodeGenerator::EmitVariableLoad(Variable* var) {
+void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
+ // Record position before possible IC call.
+ SetSourcePosition(proxy->position());
+ Variable* var = proxy->var();
+
// Three cases: non-this global variables, lookup slots, and all other
// types of slots.
Slot* slot = var->AsSlot();
@@ -1548,7 +1552,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
{ AccumulatorValueContext context(this);
switch (assign_type) {
case VARIABLE:
- EmitVariableLoad(expr->target()->AsVariableProxy()->var());
+ EmitVariableLoad(expr->target()->AsVariableProxy());
PrepareForBailout(expr->target(), TOS_REG);
break;
case NAMED_PROPERTY:
@@ -2664,13 +2668,11 @@ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
// with '%2s' (see Logger::LogRuntime for all the formats).
// 2 (array): Arguments to the format string.
ASSERT_EQ(args->length(), 3);
-#ifdef ENABLE_LOGGING_AND_PROFILING
if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallRuntime(Runtime::kLog, 2);
}
-#endif
// Finally, we're expected to leave a value on the top of the stack.
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
context()->Plug(rax);
@@ -3746,7 +3748,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
if (assign_type == VARIABLE) {
ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
AccumulatorValueContext context(this);
- EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
+ EmitVariableLoad(expr->expression()->AsVariableProxy());
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
diff --git a/deps/v8/src/x64/ic-x64.cc b/deps/v8/src/x64/ic-x64.cc
index 342f672e64..339d2c19ce 100644
--- a/deps/v8/src/x64/ic-x64.cc
+++ b/deps/v8/src/x64/ic-x64.cc
@@ -225,110 +225,6 @@ static void GenerateDictionaryStore(MacroAssembler* masm,
}
-static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
- Label* miss,
- Register elements,
- Register key,
- Register r0,
- Register r1,
- Register r2,
- Register result) {
- // Register use:
- //
- // elements - holds the slow-case elements of the receiver on entry.
- // Unchanged unless 'result' is the same register.
- //
- // key - holds the smi key on entry.
- // Unchanged unless 'result' is the same register.
- //
- // Scratch registers:
- //
- // r0 - holds the untagged key on entry and holds the hash once computed.
- //
- // r1 - used to hold the capacity mask of the dictionary
- //
- // r2 - used for the index into the dictionary.
- //
- // result - holds the result on exit if the load succeeded.
- // Allowed to be the same as 'key' or 'result'.
- // Unchanged on bailout so 'key' or 'result' can be used
- // in further computation.
-
- Label done;
-
- // Compute the hash code from the untagged key. This must be kept in sync
- // with ComputeIntegerHash in utils.h.
- //
- // hash = ~hash + (hash << 15);
- __ movl(r1, r0);
- __ notl(r0);
- __ shll(r1, Immediate(15));
- __ addl(r0, r1);
- // hash = hash ^ (hash >> 12);
- __ movl(r1, r0);
- __ shrl(r1, Immediate(12));
- __ xorl(r0, r1);
- // hash = hash + (hash << 2);
- __ leal(r0, Operand(r0, r0, times_4, 0));
- // hash = hash ^ (hash >> 4);
- __ movl(r1, r0);
- __ shrl(r1, Immediate(4));
- __ xorl(r0, r1);
- // hash = hash * 2057;
- __ imull(r0, r0, Immediate(2057));
- // hash = hash ^ (hash >> 16);
- __ movl(r1, r0);
- __ shrl(r1, Immediate(16));
- __ xorl(r0, r1);
-
- // Compute capacity mask.
- __ SmiToInteger32(r1,
- FieldOperand(elements, NumberDictionary::kCapacityOffset));
- __ decl(r1);
-
- // Generate an unrolled loop that performs a few probes before giving up.
- const int kProbes = 4;
- for (int i = 0; i < kProbes; i++) {
- // Use r2 for index calculations and keep the hash intact in r0.
- __ movq(r2, r0);
- // Compute the masked index: (hash + i + i * i) & mask.
- if (i > 0) {
- __ addl(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
- }
- __ and_(r2, r1);
-
- // Scale the index by multiplying by the entry size.
- ASSERT(NumberDictionary::kEntrySize == 3);
- __ lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
-
- // Check if the key matches.
- __ cmpq(key, FieldOperand(elements,
- r2,
- times_pointer_size,
- NumberDictionary::kElementsStartOffset));
- if (i != (kProbes - 1)) {
- __ j(equal, &done);
- } else {
- __ j(not_equal, miss);
- }
- }
-
- __ bind(&done);
- // Check that the value is a normal propety.
- const int kDetailsOffset =
- NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
- ASSERT_EQ(NORMAL, 0);
- __ Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
- Smi::FromInt(PropertyDetails::TypeField::mask()));
- __ j(not_zero, miss);
-
- // Get the value at the masked, scaled index.
- const int kValueOffset =
- NumberDictionary::kElementsStartOffset + kPointerSize;
- __ movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
-}
-
-
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : receiver
@@ -535,7 +431,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
Heap::kHashTableMapRootIndex);
__ j(not_equal, &slow);
- GenerateNumberDictionaryLoad(masm, &slow, rcx, rax, rbx, r9, rdi, rax);
+ __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
__ ret(0);
__ bind(&slow);
@@ -1099,7 +995,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
__ j(not_equal, &slow_load);
__ SmiToInteger32(rbx, rcx);
// ebx: untagged index
- GenerateNumberDictionaryLoad(masm, &slow_load, rax, rcx, rbx, r9, rdi, rdi);
+ __ LoadFromNumberDictionary(&slow_load, rax, rcx, rbx, r9, rdi, rdi);
__ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
__ jmp(&do_call);
diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc
index 3ebdc7cee7..98667ce87e 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.cc
+++ b/deps/v8/src/x64/lithium-codegen-x64.cc
@@ -1341,6 +1341,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ __ nop(); // Signals no inlined code.
}
diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc
index dbed6e0fda..1df0228434 100644
--- a/deps/v8/src/x64/macro-assembler-x64.cc
+++ b/deps/v8/src/x64/macro-assembler-x64.cc
@@ -3204,6 +3204,109 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
+void MacroAssembler::LoadFromNumberDictionary(Label* miss,
+ Register elements,
+ Register key,
+ Register r0,
+ Register r1,
+ Register r2,
+ Register result) {
+ // Register use:
+ //
+ // elements - holds the slow-case elements of the receiver on entry.
+ // Unchanged unless 'result' is the same register.
+ //
+ // key - holds the smi key on entry.
+ // Unchanged unless 'result' is the same register.
+ //
+ // Scratch registers:
+ //
+ // r0 - holds the untagged key on entry and holds the hash once computed.
+ //
+ // r1 - used to hold the capacity mask of the dictionary
+ //
+ // r2 - used for the index into the dictionary.
+ //
+ // result - holds the result on exit if the load succeeded.
+ // Allowed to be the same as 'key' or 'result'.
+ // Unchanged on bailout so 'key' or 'result' can be used
+ // in further computation.
+
+ Label done;
+
+ // Compute the hash code from the untagged key. This must be kept in sync
+ // with ComputeIntegerHash in utils.h.
+ //
+ // hash = ~hash + (hash << 15);
+ movl(r1, r0);
+ notl(r0);
+ shll(r1, Immediate(15));
+ addl(r0, r1);
+ // hash = hash ^ (hash >> 12);
+ movl(r1, r0);
+ shrl(r1, Immediate(12));
+ xorl(r0, r1);
+ // hash = hash + (hash << 2);
+ leal(r0, Operand(r0, r0, times_4, 0));
+ // hash = hash ^ (hash >> 4);
+ movl(r1, r0);
+ shrl(r1, Immediate(4));
+ xorl(r0, r1);
+ // hash = hash * 2057;
+ imull(r0, r0, Immediate(2057));
+ // hash = hash ^ (hash >> 16);
+ movl(r1, r0);
+ shrl(r1, Immediate(16));
+ xorl(r0, r1);
+
+ // Compute capacity mask.
+ SmiToInteger32(r1,
+ FieldOperand(elements, NumberDictionary::kCapacityOffset));
+ decl(r1);
+
+ // Generate an unrolled loop that performs a few probes before giving up.
+ const int kProbes = 4;
+ for (int i = 0; i < kProbes; i++) {
+ // Use r2 for index calculations and keep the hash intact in r0.
+ movq(r2, r0);
+ // Compute the masked index: (hash + i + i * i) & mask.
+ if (i > 0) {
+ addl(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
+ }
+ and_(r2, r1);
+
+ // Scale the index by multiplying by the entry size.
+ ASSERT(NumberDictionary::kEntrySize == 3);
+ lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
+
+ // Check if the key matches.
+ cmpq(key, FieldOperand(elements,
+ r2,
+ times_pointer_size,
+ NumberDictionary::kElementsStartOffset));
+ if (i != (kProbes - 1)) {
+ j(equal, &done);
+ } else {
+ j(not_equal, miss);
+ }
+ }
+
+ bind(&done);
+ // Check that the value is a normal propety.
+ const int kDetailsOffset =
+ NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
+ ASSERT_EQ(NORMAL, 0);
+ Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
+ Smi::FromInt(PropertyDetails::TypeField::mask()));
+ j(not_zero, miss);
+
+ // Get the value at the masked, scaled index.
+ const int kValueOffset =
+ NumberDictionary::kElementsStartOffset + kPointerSize;
+ movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
+}
+
+
void MacroAssembler::LoadAllocationTopHelper(Register result,
Register scratch,
AllocationFlags flags) {
diff --git a/deps/v8/src/x64/macro-assembler-x64.h b/deps/v8/src/x64/macro-assembler-x64.h
index f09fafc202..47ce01bd0c 100644
--- a/deps/v8/src/x64/macro-assembler-x64.h
+++ b/deps/v8/src/x64/macro-assembler-x64.h
@@ -846,6 +846,15 @@ class MacroAssembler: public Assembler {
Label* miss);
+ void LoadFromNumberDictionary(Label* miss,
+ Register elements,
+ Register key,
+ Register r0,
+ Register r1,
+ Register r2,
+ Register result);
+
+
// ---------------------------------------------------------------------------
// Allocation support
diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.h b/deps/v8/src/x64/regexp-macro-assembler-x64.h
index 02b510fa07..7102225e64 100644
--- a/deps/v8/src/x64/regexp-macro-assembler-x64.h
+++ b/deps/v8/src/x64/regexp-macro-assembler-x64.h
@@ -28,6 +28,12 @@
#ifndef V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_
#define V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_
+#include "x64/assembler-x64.h"
+#include "x64/assembler-x64-inl.h"
+#include "macro-assembler.h"
+#include "code.h"
+#include "x64/macro-assembler-x64.h"
+
namespace v8 {
namespace internal {
diff --git a/deps/v8/src/x64/stub-cache-x64.cc b/deps/v8/src/x64/stub-cache-x64.cc
index da27fdf050..71ce856169 100644
--- a/deps/v8/src/x64/stub-cache-x64.cc
+++ b/deps/v8/src/x64/stub-cache-x64.cc
@@ -2538,7 +2538,10 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
// -- rsp[0] : return address
// -----------------------------------
Code* stub;
- MaybeObject* maybe_stub = ComputeSharedKeyedStoreElementStub(receiver_map);
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
+ bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
+ MaybeObject* maybe_stub =
+ KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(rdx,
Handle<Map>(receiver_map),
@@ -2994,7 +2997,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
// -- rsp[0] : return address
// -----------------------------------
Code* stub;
- MaybeObject* maybe_stub = ComputeSharedKeyedLoadElementStub(receiver_map);
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
+ MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(rdx,
Handle<Map>(receiver_map),
@@ -3177,6 +3181,51 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
#define __ ACCESS_MASM(masm)
+void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
+ MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- rax : key
+ // -- rdx : receiver
+ // -- rsp[0] : return address
+ // -----------------------------------
+ Label slow, miss_force_generic;
+
+ // This stub is meant to be tail-jumped to, the receiver must already
+ // have been verified by the caller to not be a smi.
+
+ __ JumpIfNotSmi(rax, &miss_force_generic);
+ __ SmiToInteger32(rbx, rax);
+ __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
+
+ // Check whether the elements is a number dictionary.
+ // rdx: receiver
+ // rax: key
+ // rbx: key as untagged int32
+ // rcx: elements
+ __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
+ __ ret(0);
+
+ __ bind(&slow);
+ // ----------- S t a t e -------------
+ // -- rax : key
+ // -- rdx : receiver
+ // -- rsp[0] : return address
+ // -----------------------------------
+ Handle<Code> slow_ic =
+ masm->isolate()->builtins()->KeyedLoadIC_Slow();
+ __ jmp(slow_ic, RelocInfo::CODE_TARGET);
+
+ __ bind(&miss_force_generic);
+ // ----------- S t a t e -------------
+ // -- rax : key
+ // -- rdx : receiver
+ // -- rsp[0] : return address
+ // -----------------------------------
+ Handle<Code> miss_ic =
+ masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
+ __ jmp(miss_ic, RelocInfo::CODE_TARGET);
+}
+
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
MacroAssembler* masm,
JSObject::ElementsKind elements_kind) {
diff --git a/deps/v8/src/zone.h b/deps/v8/src/zone.h
index a5e437f7fa..af9c916d70 100644
--- a/deps/v8/src/zone.h
+++ b/deps/v8/src/zone.h
@@ -164,7 +164,7 @@ class AssertNoZoneAllocation {
class ZoneListAllocationPolicy {
public:
// Allocate 'size' bytes of memory in the zone.
- INLINE(static void* New(int size));
+ static void* New(int size);
// De-allocation attempts are silently ignored.
static void Delete(void* p) { }
diff --git a/deps/v8/test/cctest/SConscript b/deps/v8/test/cctest/SConscript
index 0197178cf2..0ef5667abd 100644
--- a/deps/v8/test/cctest/SConscript
+++ b/deps/v8/test/cctest/SConscript
@@ -29,7 +29,7 @@ import sys
from os.path import join, dirname, abspath
root_dir = dirname(File('SConstruct').rfile().abspath)
sys.path.append(join(root_dir, 'tools'))
-Import('context object_files')
+Import('context object_files tools')
SOURCES = {
@@ -65,7 +65,6 @@ SOURCES = {
'test-liveedit.cc',
'test-lock.cc',
'test-lockers.cc',
- 'test-log-utils.cc',
'test-log.cc',
'test-mark-compact.cc',
'test-parsing.cc',
@@ -107,7 +106,7 @@ SOURCES = {
def Build():
cctest_files = context.GetRelevantSources(SOURCES)
- env = Environment()
+ env = Environment(tools=tools)
env.Replace(**context.flags['cctest'])
context.ApplyEnvOverrides(env)
# There seems to be a glitch in the way scons decides where to put
diff --git a/deps/v8/test/cctest/cctest.gyp b/deps/v8/test/cctest/cctest.gyp
index 1d54e8cf55..0a74ce3ca8 100644
--- a/deps/v8/test/cctest/cctest.gyp
+++ b/deps/v8/test/cctest/cctest.gyp
@@ -71,7 +71,6 @@
'test-lock.cc',
'test-lockers.cc',
'test-log.cc',
- 'test-log-utils.cc',
'test-mark-compact.cc',
'test-parsing.cc',
'test-profile-generator.cc',
diff --git a/deps/v8/test/cctest/log-eq-of-logging-and-traversal.js b/deps/v8/test/cctest/log-eq-of-logging-and-traversal.js
new file mode 100644
index 0000000000..e661efe023
--- /dev/null
+++ b/deps/v8/test/cctest/log-eq-of-logging-and-traversal.js
@@ -0,0 +1,191 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This is a supplementary file for test-log/EquivalenceOfLoggingAndTraversal.
+
+function parseState(s) {
+ switch (s) {
+ case "": return Profile.CodeState.COMPILED;
+ case "~": return Profile.CodeState.OPTIMIZABLE;
+ case "*": return Profile.CodeState.OPTIMIZED;
+ }
+ throw new Error("unknown code state: " + s);
+}
+
+function LogProcessor() {
+ LogReader.call(this, {
+ 'code-creation': {
+ parsers: [null, parseInt, parseInt, null, 'var-args'],
+ processor: this.processCodeCreation },
+ 'code-move': { parsers: [parseInt, parseInt],
+ processor: this.processCodeMove },
+ 'code-delete': { parsers: [parseInt],
+ processor: this.processCodeDelete },
+ 'sfi-move': { parsers: [parseInt, parseInt],
+ processor: this.processFunctionMove },
+ 'shared-library': null,
+ 'profiler': null,
+ 'tick': null });
+ this.profile = new Profile();
+
+}
+LogProcessor.prototype.__proto__ = LogReader.prototype;
+
+LogProcessor.prototype.processCodeCreation = function(
+ type, start, size, name, maybe_func) {
+ if (type != "LazyCompile" && type != "Script" && type != "Function") return;
+ // Discard types to avoid discrepancies in "LazyCompile" vs. "Function".
+ type = "";
+ if (maybe_func.length) {
+ var funcAddr = parseInt(maybe_func[0]);
+ var state = parseState(maybe_func[1]);
+ this.profile.addFuncCode(type, name, start, size, funcAddr, state);
+ } else {
+ this.profile.addCode(type, name, start, size);
+ }
+};
+
+LogProcessor.prototype.processCodeMove = function(from, to) {
+ this.profile.moveCode(from, to);
+};
+
+LogProcessor.prototype.processCodeDelete = function(start) {
+ this.profile.deleteCode(start);
+};
+
+LogProcessor.prototype.processFunctionMove = function(from, to) {
+ this.profile.moveFunc(from, to);
+};
+
+function RunTest() {
+ // _log must be provided externally.
+ var log_lines = _log.split("\n");
+ var line, pos = 0, log_lines_length = log_lines.length;
+ if (log_lines_length < 2)
+ return "log_lines_length < 2";
+ var logging_processor = new LogProcessor();
+ for ( ; pos < log_lines_length; ++pos) {
+ line = log_lines[pos];
+ if (line === "test-logging-done,\"\"") {
+ ++pos;
+ break;
+ }
+ logging_processor.processLogLine(line);
+ }
+ logging_processor.profile.cleanUpFuncEntries();
+ var logging_entries =
+ logging_processor.profile.codeMap_.getAllDynamicEntriesWithAddresses();
+ if (logging_entries.length === 0)
+ return "logging_entries.length === 0";
+ var traversal_processor = new LogProcessor();
+ for ( ; pos < log_lines_length; ++pos) {
+ line = log_lines[pos];
+ if (line === "test-traversal-done,\"\"") break;
+ traversal_processor.processLogLine(line);
+ }
+ var traversal_entries =
+ traversal_processor.profile.codeMap_.getAllDynamicEntriesWithAddresses();
+ if (traversal_entries.length === 0)
+ return "traversal_entries.length === 0";
+
+ function addressComparator(entryA, entryB) {
+ return entryA[0] < entryB[0] ? -1 : (entryA[0] > entryB[0] ? 1 : 0);
+ }
+
+ logging_entries.sort(addressComparator);
+ traversal_entries.sort(addressComparator);
+
+ function entityNamesEqual(entityA, entityB) {
+ if ("getRawName" in entityB &&
+ entityNamesEqual.builtins.indexOf(entityB.getRawName()) !== -1) {
+ return true;
+ }
+ if (entityNamesEqual.builtins.indexOf(entityB.getName()) !== -1) return true;
+ return entityA.getName() === entityB.getName();
+ }
+ entityNamesEqual.builtins =
+ ["Boolean", "Function", "Number", "Object",
+ "Script", "String", "RegExp", "Date", "Error"];
+
+ function entitiesEqual(entityA, entityB) {
+ if (entityA === null && entityB !== null) return true;
+ if (entityA !== null && entityB === null) return false;
+ return entityA.size === entityB.size && entityNamesEqual(entityA, entityB);
+ }
+
+ var i = 0, j = 0, k = logging_entries.length, l = traversal_entries.length;
+ var comparison = [];
+ var equal = true;
+ // Do a merge-like comparison of entries. At the same address we expect to
+ // find the same entries. We skip builtins during log parsing, but compiled
+ // functions traversal may erroneously recognize them as functions, so we are
+ // expecting more functions in traversal vs. logging.
+ while (i < k && j < l) {
+ var entryA = logging_entries[i], entryB = traversal_entries[j];
+ var cmp = addressComparator(entryA, entryB);
+ var entityA = entryA[1], entityB = entryB[1];
+ var address = entryA[0];
+ if (cmp < 0) {
+ ++i;
+ entityB = null;
+ } else if (cmp > 0) {
+ ++j;
+ entityA = null;
+ address = entryB[0];
+ } else {
+ ++i;
+ ++j;
+ }
+ var entities_equal = entitiesEqual(entityA, entityB);
+ if (!entities_equal) equal = false;
+ comparison.push([entities_equal, address, entityA, entityB]);
+ }
+ if (i < k) equal = false;
+ while (i < k) {
+ var entryA = logging_entries[i++];
+ comparison.push([false, entryA[0], entryA[1], null]);
+ }
+ return [equal, comparison];
+}
+
+var result = RunTest();
+if (typeof result !== "string") {
+ var out = [];
+ if (!result[0]) {
+ var comparison = result[1];
+ for (var i = 0, l = comparison.length; i < l; ++i) {
+ var c = comparison[i];
+ out.push((c[0] ? " " : "* ") +
+ c[1].toString(16) + " " +
+ (c[2] ? c[2] : "---") + " " +
+ (c[3] ? c[3] : "---"));
+ }
+ }
+ result[0] ? true : out.join("\n");
+} else {
+ result;
+}
diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc
index 1531f905d1..8d8770f132 100644
--- a/deps/v8/test/cctest/test-api.cc
+++ b/deps/v8/test/cctest/test-api.cc
@@ -12633,9 +12633,10 @@ v8::Handle<Value> AnalyzeStackInNativeCode(const v8::Arguments& args) {
stackTrace->GetFrame(0));
checkStackFrame(origin, "foo", 6, 3, false, false,
stackTrace->GetFrame(1));
- checkStackFrame(NULL, "", 1, 1, false, false,
+ // This is the source string inside the eval which has the call to foo.
+ checkStackFrame(NULL, "", 1, 5, false, false,
stackTrace->GetFrame(2));
- // The last frame is an anonymous function that has the initial call.
+ // The last frame is an anonymous function which has the initial eval call.
checkStackFrame(origin, "", 8, 7, false, false,
stackTrace->GetFrame(3));
@@ -12654,9 +12655,10 @@ v8::Handle<Value> AnalyzeStackInNativeCode(const v8::Arguments& args) {
bool is_eval = false;
#endif // ENABLE_DEBUGGER_SUPPORT
- checkStackFrame(NULL, "", 1, 1, is_eval, false,
+ // This is the source string inside the eval which has the call to baz.
+ checkStackFrame(NULL, "", 1, 5, is_eval, false,
stackTrace->GetFrame(2));
- // The last frame is an anonymous function that has the initial call to foo.
+ // The last frame is an anonymous function which has the initial eval call.
checkStackFrame(origin, "", 10, 1, false, false,
stackTrace->GetFrame(3));
diff --git a/deps/v8/test/cctest/test-cpu-profiler.cc b/deps/v8/test/cctest/test-cpu-profiler.cc
index 7d898cedb3..9ff2a171a3 100644
--- a/deps/v8/test/cctest/test-cpu-profiler.cc
+++ b/deps/v8/test/cctest/test-cpu-profiler.cc
@@ -2,8 +2,6 @@
//
// Tests of profiles generator and utilities.
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include "v8.h"
#include "cpu-profiler-inl.h"
#include "cctest.h"
@@ -401,5 +399,3 @@ TEST(DeleteCpuProfileDifferentTokens) {
CHECK_EQ(0, CpuProfiler::GetProfilesCount());
CHECK_EQ(NULL, v8::CpuProfiler::FindProfile(uid3));
}
-
-#endif // ENABLE_LOGGING_AND_PROFILING
diff --git a/deps/v8/test/cctest/test-heap-profiler.cc b/deps/v8/test/cctest/test-heap-profiler.cc
index a2426cc0fe..8675a0146c 100644
--- a/deps/v8/test/cctest/test-heap-profiler.cc
+++ b/deps/v8/test/cctest/test-heap-profiler.cc
@@ -2,8 +2,6 @@
//
// Tests for heap profiler
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include "v8.h"
#include "cctest.h"
@@ -893,5 +891,3 @@ TEST(NodesIteration) {
}
CHECK_EQ(1, count);
}
-
-#endif // ENABLE_LOGGING_AND_PROFILING
diff --git a/deps/v8/test/cctest/test-lockers.cc b/deps/v8/test/cctest/test-lockers.cc
index 4579361f34..2b184e913d 100644
--- a/deps/v8/test/cctest/test-lockers.cc
+++ b/deps/v8/test/cctest/test-lockers.cc
@@ -314,7 +314,11 @@ class SeparateIsolatesLocksNonexclusiveThread : public JoinableThread {
// Run parallel threads that lock and access different isolates in parallel
TEST(SeparateIsolatesLocksNonexclusive) {
+#ifdef V8_TARGET_ARCH_ARM
+ const int kNThreads = 50;
+#else
const int kNThreads = 100;
+#endif
v8::Isolate* isolate1 = v8::Isolate::New();
v8::Isolate* isolate2 = v8::Isolate::New();
i::List<JoinableThread*> threads(kNThreads);
@@ -383,7 +387,11 @@ class LockerUnlockerThread : public JoinableThread {
// Use unlocker inside of a Locker, multiple threads.
TEST(LockerUnlocker) {
+#ifdef V8_TARGET_ARCH_ARM
+ const int kNThreads = 50;
+#else
const int kNThreads = 100;
+#endif
i::List<JoinableThread*> threads(kNThreads);
v8::Isolate* isolate = v8::Isolate::New();
for (int i = 0; i < kNThreads; i++) {
@@ -431,7 +439,11 @@ class LockTwiceAndUnlockThread : public JoinableThread {
// Use Unlocker inside two Lockers.
TEST(LockTwiceAndUnlock) {
+#ifdef V8_TARGET_ARCH_ARM
+ const int kNThreads = 50;
+#else
const int kNThreads = 100;
+#endif
i::List<JoinableThread*> threads(kNThreads);
v8::Isolate* isolate = v8::Isolate::New();
for (int i = 0; i < kNThreads; i++) {
diff --git a/deps/v8/test/cctest/test-log-stack-tracer.cc b/deps/v8/test/cctest/test-log-stack-tracer.cc
index b967c7388c..2bcb3fe0b6 100644
--- a/deps/v8/test/cctest/test-log-stack-tracer.cc
+++ b/deps/v8/test/cctest/test-log-stack-tracer.cc
@@ -27,8 +27,6 @@
//
// Tests of profiler-related functions from log.h
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include <stdlib.h>
#include "v8.h"
@@ -413,5 +411,3 @@ TEST(JsEntrySp) {
CompileRun("js_entry_sp_level2();");
CHECK_EQ(0, GetJsEntrySp());
}
-
-#endif // ENABLE_LOGGING_AND_PROFILING
diff --git a/deps/v8/test/cctest/test-log-utils.cc b/deps/v8/test/cctest/test-log-utils.cc
deleted file mode 100644
index 861be12ce8..0000000000
--- a/deps/v8/test/cctest/test-log-utils.cc
+++ /dev/null
@@ -1,140 +0,0 @@
-// Copyright 2006-2009 the V8 project authors. All rights reserved.
-//
-// Tests of logging utilities from log-utils.h
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
-#include "v8.h"
-
-#include "log-utils.h"
-#include "cctest.h"
-
-using v8::internal::CStrVector;
-using v8::internal::EmbeddedVector;
-using v8::internal::LogDynamicBuffer;
-using v8::internal::MutableCStrVector;
-using v8::internal::ScopedVector;
-using v8::internal::Vector;
-using v8::internal::StrLength;
-
-// Fills 'ref_buffer' with test data: a sequence of two-digit
-// hex numbers: '0001020304...'. Then writes 'ref_buffer' contents to 'dynabuf'.
-static void WriteData(LogDynamicBuffer* dynabuf, Vector<char>* ref_buffer) {
- static const char kHex[] = "0123456789ABCDEF";
- CHECK_GT(ref_buffer->length(), 0);
- CHECK_GT(513, ref_buffer->length());
- for (int i = 0, half_len = ref_buffer->length() >> 1; i < half_len; ++i) {
- (*ref_buffer)[i << 1] = kHex[i >> 4];
- (*ref_buffer)[(i << 1) + 1] = kHex[i & 15];
- }
- if (ref_buffer->length() & 1) {
- ref_buffer->last() = kHex[ref_buffer->length() >> 5];
- }
- CHECK_EQ(ref_buffer->length(),
- dynabuf->Write(ref_buffer->start(), ref_buffer->length()));
-}
-
-
-static int ReadData(
- LogDynamicBuffer* dynabuf, int start_pos, i::Vector<char>* buffer) {
- return dynabuf->Read(start_pos, buffer->start(), buffer->length());
-}
-
-
-// Helper function used by CHECK_EQ to compare Vectors. Templatized to
-// accept both "char" and "const char" vector contents.
-template <typename E, typename V>
-static inline void CheckEqualsHelper(const char* file, int line,
- const char* expected_source,
- const Vector<E>& expected,
- const char* value_source,
- const Vector<V>& value) {
- if (expected.length() != value.length()) {
- V8_Fatal(file, line, "CHECK_EQ(%s, %s) failed\n"
- "# Vectors lengths differ: %d expected, %d found\n"
- "# Expected: %.*s\n"
- "# Found: %.*s",
- expected_source, value_source,
- expected.length(), value.length(),
- expected.length(), expected.start(),
- value.length(), value.start());
- }
- if (strncmp(expected.start(), value.start(), expected.length()) != 0) {
- V8_Fatal(file, line, "CHECK_EQ(%s, %s) failed\n"
- "# Vectors contents differ:\n"
- "# Expected: %.*s\n"
- "# Found: %.*s",
- expected_source, value_source,
- expected.length(), expected.start(),
- value.length(), value.start());
- }
-}
-
-
-TEST(DynaBufSingleBlock) {
- LogDynamicBuffer dynabuf(32, 32, "", 0);
- EmbeddedVector<char, 32> ref_buf;
- WriteData(&dynabuf, &ref_buf);
- EmbeddedVector<char, 32> buf;
- CHECK_EQ(32, dynabuf.Read(0, buf.start(), buf.length()));
- CHECK_EQ(32, ReadData(&dynabuf, 0, &buf));
- CHECK_EQ(ref_buf, buf);
-
- // Verify that we can't read and write past the end.
- CHECK_EQ(0, dynabuf.Read(32, buf.start(), buf.length()));
- CHECK_EQ(0, dynabuf.Write(buf.start(), buf.length()));
-}
-
-
-TEST(DynaBufCrossBlocks) {
- LogDynamicBuffer dynabuf(32, 128, "", 0);
- EmbeddedVector<char, 48> ref_buf;
- WriteData(&dynabuf, &ref_buf);
- CHECK_EQ(48, dynabuf.Write(ref_buf.start(), ref_buf.length()));
- // Verify that we can't write data when remaining buffer space isn't enough.
- CHECK_EQ(0, dynabuf.Write(ref_buf.start(), ref_buf.length()));
- EmbeddedVector<char, 48> buf;
- CHECK_EQ(48, ReadData(&dynabuf, 0, &buf));
- CHECK_EQ(ref_buf, buf);
- CHECK_EQ(48, ReadData(&dynabuf, 48, &buf));
- CHECK_EQ(ref_buf, buf);
- CHECK_EQ(0, ReadData(&dynabuf, 48 * 2, &buf));
-}
-
-
-TEST(DynaBufReadTruncation) {
- LogDynamicBuffer dynabuf(32, 128, "", 0);
- EmbeddedVector<char, 128> ref_buf;
- WriteData(&dynabuf, &ref_buf);
- EmbeddedVector<char, 128> buf;
- CHECK_EQ(128, ReadData(&dynabuf, 0, &buf));
- CHECK_EQ(ref_buf, buf);
- // Try to read near the end with a buffer larger than remaining data size.
- EmbeddedVector<char, 48> tail_buf;
- CHECK_EQ(32, ReadData(&dynabuf, 128 - 32, &tail_buf));
- CHECK_EQ(ref_buf.SubVector(128 - 32, 128), tail_buf.SubVector(0, 32));
-}
-
-
-TEST(DynaBufSealing) {
- const char* seal = "Sealed";
- const int seal_size = StrLength(seal);
- LogDynamicBuffer dynabuf(32, 128, seal, seal_size);
- EmbeddedVector<char, 100> ref_buf;
- WriteData(&dynabuf, &ref_buf);
- // Try to write data that will not fit in the buffer.
- CHECK_EQ(0, dynabuf.Write(ref_buf.start(), 128 - 100 - seal_size + 1));
- // Now the buffer is sealed, writing of any amount of data is forbidden.
- CHECK_EQ(0, dynabuf.Write(ref_buf.start(), 1));
- EmbeddedVector<char, 100> buf;
- CHECK_EQ(100, ReadData(&dynabuf, 0, &buf));
- CHECK_EQ(ref_buf, buf);
- // Check the seal.
- EmbeddedVector<char, 50> seal_buf;
- CHECK_EQ(seal_size, ReadData(&dynabuf, 100, &seal_buf));
- CHECK_EQ(CStrVector(seal), seal_buf.SubVector(0, seal_size));
- // Verify that there's no data beyond the seal.
- CHECK_EQ(0, ReadData(&dynabuf, 100 + seal_size, &buf));
-}
-
-#endif // ENABLE_LOGGING_AND_PROFILING
diff --git a/deps/v8/test/cctest/test-log.cc b/deps/v8/test/cctest/test-log.cc
index 10a90bcf17..5704b07ace 100644
--- a/deps/v8/test/cctest/test-log.cc
+++ b/deps/v8/test/cctest/test-log.cc
@@ -2,8 +2,6 @@
//
// Tests of logging functions from log.h
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#ifdef __linux__
#include <math.h>
#include <pthread.h>
@@ -15,6 +13,7 @@
#include "log.h"
#include "cpu-profiler.h"
#include "v8threads.h"
+#include "v8utils.h"
#include "cctest.h"
#include "vm-state-inl.h"
@@ -25,269 +24,75 @@ using v8::internal::StrLength;
namespace i = v8::internal;
-static void SetUp() {
- // Log to memory buffer.
- i::FLAG_logfile = "*";
- i::FLAG_log = true;
- LOGGER->Setup();
-}
-
-static void TearDown() {
- LOGGER->TearDown();
-}
-
-
-TEST(EmptyLog) {
- SetUp();
- CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 0));
- CHECK_EQ(0, LOGGER->GetLogLines(100, NULL, 0));
- CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 100));
- CHECK_EQ(0, LOGGER->GetLogLines(100, NULL, 100));
- TearDown();
-}
-
-
-TEST(GetMessages) {
- SetUp();
- LOGGER->StringEvent("aaa", "bbb");
- LOGGER->StringEvent("cccc", "dddd");
- CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 0));
- char log_lines[100];
- memset(log_lines, 0, sizeof(log_lines));
- // See Logger::StringEvent.
- const char* line_1 = "aaa,\"bbb\"\n";
- const int line_1_len = StrLength(line_1);
- // The exact size.
- CHECK_EQ(line_1_len, LOGGER->GetLogLines(0, log_lines, line_1_len));
- CHECK_EQ(line_1, log_lines);
- memset(log_lines, 0, sizeof(log_lines));
- // A bit more than the first line length.
- CHECK_EQ(line_1_len, LOGGER->GetLogLines(0, log_lines, line_1_len + 3));
- log_lines[line_1_len] = '\0';
- CHECK_EQ(line_1, log_lines);
- memset(log_lines, 0, sizeof(log_lines));
- const char* line_2 = "cccc,\"dddd\"\n";
- const int line_2_len = StrLength(line_2);
- // Now start with line_2 beginning.
- CHECK_EQ(0, LOGGER->GetLogLines(line_1_len, log_lines, 0));
- CHECK_EQ(line_2_len, LOGGER->GetLogLines(line_1_len, log_lines, line_2_len));
- CHECK_EQ(line_2, log_lines);
- memset(log_lines, 0, sizeof(log_lines));
- CHECK_EQ(line_2_len,
- LOGGER->GetLogLines(line_1_len, log_lines, line_2_len + 3));
- CHECK_EQ(line_2, log_lines);
- memset(log_lines, 0, sizeof(log_lines));
- // Now get entire buffer contents.
- const char* all_lines = "aaa,\"bbb\"\ncccc,\"dddd\"\n";
- const int all_lines_len = StrLength(all_lines);
- CHECK_EQ(all_lines_len, LOGGER->GetLogLines(0, log_lines, all_lines_len));
- CHECK_EQ(all_lines, log_lines);
- memset(log_lines, 0, sizeof(log_lines));
- CHECK_EQ(all_lines_len, LOGGER->GetLogLines(0, log_lines, all_lines_len + 3));
- CHECK_EQ(all_lines, log_lines);
- memset(log_lines, 0, sizeof(log_lines));
- TearDown();
-}
-
-
-static int GetLogLines(int start_pos, i::Vector<char>* buffer) {
- return LOGGER->GetLogLines(start_pos, buffer->start(), buffer->length());
-}
-
-
-TEST(BeyondWritePosition) {
- SetUp();
- LOGGER->StringEvent("aaa", "bbb");
- LOGGER->StringEvent("cccc", "dddd");
- // See Logger::StringEvent.
- const char* all_lines = "aaa,\"bbb\"\ncccc,\"dddd\"\n";
- const int all_lines_len = StrLength(all_lines);
- EmbeddedVector<char, 100> buffer;
- const int beyond_write_pos = all_lines_len;
- CHECK_EQ(0, LOGGER->GetLogLines(beyond_write_pos, buffer.start(), 1));
- CHECK_EQ(0, GetLogLines(beyond_write_pos, &buffer));
- CHECK_EQ(0, LOGGER->GetLogLines(beyond_write_pos + 1, buffer.start(), 1));
- CHECK_EQ(0, GetLogLines(beyond_write_pos + 1, &buffer));
- CHECK_EQ(0, LOGGER->GetLogLines(beyond_write_pos + 100, buffer.start(), 1));
- CHECK_EQ(0, GetLogLines(beyond_write_pos + 100, &buffer));
- CHECK_EQ(0, LOGGER->GetLogLines(10 * 1024 * 1024, buffer.start(), 1));
- CHECK_EQ(0, GetLogLines(10 * 1024 * 1024, &buffer));
- TearDown();
-}
-
-
-TEST(MemoryLoggingTurnedOff) {
- // Log to stdout
- i::FLAG_logfile = "-";
- i::FLAG_log = true;
- LOGGER->Setup();
- CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 0));
- CHECK_EQ(0, LOGGER->GetLogLines(100, NULL, 0));
- CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 100));
- CHECK_EQ(0, LOGGER->GetLogLines(100, NULL, 100));
- LOGGER->TearDown();
-}
-
-
-static void CompileAndRunScript(const char *src) {
- v8::Script::Compile(v8::String::New(src))->Run();
-}
-
-
-namespace v8 {
-namespace internal {
-
-class LoggerTestHelper : public AllStatic {
- public:
- static bool IsSamplerActive() { return LOGGER->IsProfilerSamplerActive(); }
- static void ResetSamplesTaken() {
- reinterpret_cast<Sampler*>(LOGGER->ticker_)->ResetSamplesTaken();
- }
- static bool has_samples_taken() {
- return reinterpret_cast<Sampler*>(LOGGER->ticker_)->samples_taken() > 0;
- }
-};
-
-} // namespace v8::internal
-} // namespace v8
-
-using v8::internal::LoggerTestHelper;
-
-
namespace {
class ScopedLoggerInitializer {
public:
explicit ScopedLoggerInitializer(bool prof_lazy)
- : saved_prof_lazy_(i::FLAG_prof_lazy),
+ : saved_log_(i::FLAG_log),
+ saved_prof_lazy_(i::FLAG_prof_lazy),
saved_prof_(i::FLAG_prof),
saved_prof_auto_(i::FLAG_prof_auto),
+ temp_file_(NULL),
+ // Need to run this prior to creating the scope.
trick_to_run_init_flags_(init_flags_(prof_lazy)),
- need_to_set_up_logger_(i::V8::IsRunning()),
scope_(),
env_(v8::Context::New()) {
- if (need_to_set_up_logger_) LOGGER->Setup();
env_->Enter();
}
~ScopedLoggerInitializer() {
env_->Exit();
LOGGER->TearDown();
+ if (temp_file_ != NULL) fclose(temp_file_);
i::FLAG_prof_lazy = saved_prof_lazy_;
i::FLAG_prof = saved_prof_;
i::FLAG_prof_auto = saved_prof_auto_;
+ i::FLAG_log = saved_log_;
}
v8::Handle<v8::Context>& env() { return env_; }
+ FILE* StopLoggingGetTempFile() {
+ temp_file_ = LOGGER->TearDown();
+ CHECK_NE(NULL, temp_file_);
+ fflush(temp_file_);
+ rewind(temp_file_);
+ return temp_file_;
+ }
+
private:
static bool init_flags_(bool prof_lazy) {
+ i::FLAG_log = true;
i::FLAG_prof = true;
i::FLAG_prof_lazy = prof_lazy;
i::FLAG_prof_auto = false;
- i::FLAG_logfile = "*";
+ i::FLAG_logfile = i::Log::kLogToTemporaryFile;
return prof_lazy;
}
+ const bool saved_log_;
const bool saved_prof_lazy_;
const bool saved_prof_;
const bool saved_prof_auto_;
+ FILE* temp_file_;
const bool trick_to_run_init_flags_;
- const bool need_to_set_up_logger_;
v8::HandleScope scope_;
v8::Handle<v8::Context> env_;
DISALLOW_COPY_AND_ASSIGN(ScopedLoggerInitializer);
};
-
-class LogBufferMatcher {
- public:
- LogBufferMatcher() {
- // Skip all initially logged stuff.
- log_pos_ = GetLogLines(0, &buffer_);
- }
-
- int log_pos() { return log_pos_; }
-
- int GetNextChunk() {
- int chunk_size = GetLogLines(log_pos_, &buffer_);
- CHECK_GT(buffer_.length(), chunk_size);
- buffer_[chunk_size] = '\0';
- log_pos_ += chunk_size;
- return chunk_size;
- }
-
- const char* Find(const char* substr) {
- return strstr(buffer_.start(), substr);
- }
-
- const char* Find(const i::Vector<char>& substr) {
- return Find(substr.start());
- }
-
- bool IsInSequence(const char* s1, const char* s2) {
- const char* s1_pos = Find(s1);
- const char* s2_pos = Find(s2);
- CHECK_NE(NULL, s1_pos);
- CHECK_NE(NULL, s2_pos);
- return s1_pos < s2_pos;
- }
-
- void PrintBuffer() {
- puts(buffer_.start());
- }
-
- private:
- EmbeddedVector<char, 102400> buffer_;
- int log_pos_;
-};
-
} // namespace
-static void CheckThatProfilerWorks(LogBufferMatcher* matcher) {
- CHECK(i::RuntimeProfiler::IsEnabled() ||
- !LoggerTestHelper::IsSamplerActive());
- LoggerTestHelper::ResetSamplesTaken();
-
- LOGGER->ResumeProfiler();
- CHECK(LoggerTestHelper::IsSamplerActive());
-
- // Verify that the current map of compiled functions has been logged.
- CHECK_GT(matcher->GetNextChunk(), 0);
- const char* code_creation = "\ncode-creation,"; // eq. to /^code-creation,/
- CHECK_NE(NULL, matcher->Find(code_creation));
-
- // Force compiler to generate new code by parametrizing source.
- EmbeddedVector<char, 100> script_src;
- i::OS::SNPrintF(script_src,
- "function f%d(x) { return %d * x; }"
- "for (var i = 0; i < 10000; ++i) { f%d(i); }",
- matcher->log_pos(), matcher->log_pos(), matcher->log_pos());
- // Run code for 200 msecs to get some ticks.
- const double end_time = i::OS::TimeCurrentMillis() + 200;
- while (i::OS::TimeCurrentMillis() < end_time) {
- CompileAndRunScript(script_src.start());
- // Yield CPU to give Profiler thread a chance to process ticks.
- i::OS::Sleep(1);
- }
-
- LOGGER->PauseProfiler();
- CHECK(i::RuntimeProfiler::IsEnabled() ||
- !LoggerTestHelper::IsSamplerActive());
-
- // Wait 50 msecs to allow Profiler thread to process the last
- // tick sample it has got.
- i::OS::Sleep(50);
-
- // Now we must have compiler and tick records.
- CHECK_GT(matcher->GetNextChunk(), 0);
- matcher->PrintBuffer();
- CHECK_NE(NULL, matcher->Find(code_creation));
- const char* tick = "\ntick,";
- const bool ticks_found = matcher->Find(tick) != NULL;
- CHECK_EQ(LoggerTestHelper::has_samples_taken(), ticks_found);
+static const char* StrNStr(const char* s1, const char* s2, int n) {
+ if (s1[n] == '\0') return strstr(s1, s2);
+ i::ScopedVector<char> str(n + 1);
+ i::OS::StrNCpy(str, s1, static_cast<size_t>(n));
+ str[n] = '\0';
+ char* found = strstr(str.start(), s2);
+ return found != NULL ? s1 + (found - str.start()) : NULL;
}
@@ -296,29 +101,61 @@ TEST(ProfLazyMode) {
if (!i::V8::UseCrankshaft()) return;
- // No sampling should happen prior to resuming profiler unless we
- // are runtime profiling.
- CHECK(i::RuntimeProfiler::IsEnabled() ||
- !LoggerTestHelper::IsSamplerActive());
+ LOGGER->StringEvent("test-start", "");
+ CompileRun("var a = (function(x) { return x + 1; })(10);");
+ LOGGER->StringEvent("test-profiler-start", "");
+ v8::V8::ResumeProfiler();
+ CompileRun(
+ "var b = (function(x) { return x + 2; })(10);\n"
+ "var c = (function(x) { return x + 3; })(10);\n"
+ "var d = (function(x) { return x + 4; })(10);\n"
+ "var e = (function(x) { return x + 5; })(10);");
+ v8::V8::PauseProfiler();
+ LOGGER->StringEvent("test-profiler-stop", "");
+ CompileRun("var f = (function(x) { return x + 6; })(10);");
+ // Check that profiling can be resumed again.
+ LOGGER->StringEvent("test-profiler-start-2", "");
+ v8::V8::ResumeProfiler();
+ CompileRun(
+ "var g = (function(x) { return x + 7; })(10);\n"
+ "var h = (function(x) { return x + 8; })(10);\n"
+ "var i = (function(x) { return x + 9; })(10);\n"
+ "var j = (function(x) { return x + 10; })(10);");
+ v8::V8::PauseProfiler();
+ LOGGER->StringEvent("test-profiler-stop-2", "");
+ LOGGER->StringEvent("test-stop", "");
+
+ bool exists = false;
+ i::Vector<const char> log(
+ i::ReadFile(initialize_logger.StopLoggingGetTempFile(), &exists, true));
+ CHECK(exists);
+
+ const char* test_start_position =
+ StrNStr(log.start(), "test-start,", log.length());
+ CHECK_NE(NULL, test_start_position);
+ const char* test_profiler_start_position =
+ StrNStr(log.start(), "test-profiler-start,", log.length());
+ CHECK_NE(NULL, test_profiler_start_position);
+ CHECK_GT(test_profiler_start_position, test_start_position);
+ const char* test_profiler_stop_position =
+ StrNStr(log.start(), "test-profiler-stop,", log.length());
+ CHECK_NE(NULL, test_profiler_stop_position);
+ CHECK_GT(test_profiler_stop_position, test_profiler_start_position);
+ const char* test_profiler_start_2_position =
+ StrNStr(log.start(), "test-profiler-start-2,", log.length());
+ CHECK_NE(NULL, test_profiler_start_2_position);
+ CHECK_GT(test_profiler_start_2_position, test_profiler_stop_position);
- LogBufferMatcher matcher;
// Nothing must be logged until profiling is resumed.
- CHECK_EQ(0, matcher.log_pos());
-
- CompileAndRunScript("var a = (function(x) { return x + 1; })(10);");
-
+ CHECK_EQ(NULL, StrNStr(test_start_position,
+ "code-creation,",
+ static_cast<int>(test_profiler_start_position -
+ test_start_position)));
// Nothing must be logged while profiling is suspended.
- CHECK_EQ(0, matcher.GetNextChunk());
-
- CheckThatProfilerWorks(&matcher);
-
- CompileAndRunScript("var a = (function(x) { return x + 1; })(10);");
-
- // No new data beyond last retrieved position.
- CHECK_EQ(0, matcher.GetNextChunk());
-
- // Check that profiling can be resumed again.
- CheckThatProfilerWorks(&matcher);
+ CHECK_EQ(NULL, StrNStr(test_profiler_stop_position,
+ "code-creation,",
+ static_cast<int>(test_profiler_start_2_position -
+ test_profiler_stop_position)));
}
@@ -383,7 +220,7 @@ class LoopingJsThread : public LoopingThread {
{
v8::Context::Scope context_scope(context);
SignalRunning();
- CompileAndRunScript(
+ CompileRun(
"var j; for (var i=0; i<10000; ++i) { j = Math.sin(i); }");
}
context.Dispose();
@@ -531,34 +368,34 @@ static v8::Handle<v8::Value> ObjMethod1(const v8::Arguments& args) {
TEST(LogCallbacks) {
ScopedLoggerInitializer initialize_logger(false);
- LogBufferMatcher matcher;
v8::Persistent<v8::FunctionTemplate> obj =
v8::Persistent<v8::FunctionTemplate>::New(v8::FunctionTemplate::New());
- obj->SetClassName(v8::String::New("Obj"));
+ obj->SetClassName(v8_str("Obj"));
v8::Handle<v8::ObjectTemplate> proto = obj->PrototypeTemplate();
v8::Local<v8::Signature> signature = v8::Signature::New(obj);
- proto->Set(v8::String::New("method1"),
+ proto->Set(v8_str("method1"),
v8::FunctionTemplate::New(ObjMethod1,
v8::Handle<v8::Value>(),
signature),
static_cast<v8::PropertyAttribute>(v8::DontDelete));
initialize_logger.env()->Global()->Set(v8_str("Obj"), obj->GetFunction());
- CompileAndRunScript("Obj.prototype.method1.toString();");
+ CompileRun("Obj.prototype.method1.toString();");
LOGGER->LogCompiledFunctions();
- CHECK_GT(matcher.GetNextChunk(), 0);
- const char* callback_rec = "code-creation,Callback,";
- char* pos = const_cast<char*>(matcher.Find(callback_rec));
- CHECK_NE(NULL, pos);
- pos += strlen(callback_rec);
- EmbeddedVector<char, 100> ref_data;
+ bool exists = false;
+ i::Vector<const char> log(
+ i::ReadFile(initialize_logger.StopLoggingGetTempFile(), &exists, true));
+ CHECK(exists);
+
+ i::EmbeddedVector<char, 100> ref_data;
i::OS::SNPrintF(ref_data,
- "0x%" V8PRIxPTR ",1,\"method1\"", ObjMethod1);
- *(pos + strlen(ref_data.start())) = '\0';
- CHECK_EQ(ref_data.start(), pos);
+ "code-creation,Callback,0x%" V8PRIxPTR ",1,\"method1\"\0",
+ ObjMethod1);
+
+ CHECK_NE(NULL, StrNStr(log.start(), ref_data.start(), log.length()));
obj.Dispose();
}
@@ -581,34 +418,41 @@ static v8::Handle<v8::Value> Prop2Getter(v8::Local<v8::String> property,
TEST(LogAccessorCallbacks) {
ScopedLoggerInitializer initialize_logger(false);
- LogBufferMatcher matcher;
v8::Persistent<v8::FunctionTemplate> obj =
v8::Persistent<v8::FunctionTemplate>::New(v8::FunctionTemplate::New());
- obj->SetClassName(v8::String::New("Obj"));
+ obj->SetClassName(v8_str("Obj"));
v8::Handle<v8::ObjectTemplate> inst = obj->InstanceTemplate();
- inst->SetAccessor(v8::String::New("prop1"), Prop1Getter, Prop1Setter);
- inst->SetAccessor(v8::String::New("prop2"), Prop2Getter);
+ inst->SetAccessor(v8_str("prop1"), Prop1Getter, Prop1Setter);
+ inst->SetAccessor(v8_str("prop2"), Prop2Getter);
LOGGER->LogAccessorCallbacks();
- CHECK_GT(matcher.GetNextChunk(), 0);
- matcher.PrintBuffer();
+
+ bool exists = false;
+ i::Vector<const char> log(
+ i::ReadFile(initialize_logger.StopLoggingGetTempFile(), &exists, true));
+ CHECK(exists);
EmbeddedVector<char, 100> prop1_getter_record;
i::OS::SNPrintF(prop1_getter_record,
"code-creation,Callback,0x%" V8PRIxPTR ",1,\"get prop1\"",
Prop1Getter);
- CHECK_NE(NULL, matcher.Find(prop1_getter_record));
+ CHECK_NE(NULL,
+ StrNStr(log.start(), prop1_getter_record.start(), log.length()));
+
EmbeddedVector<char, 100> prop1_setter_record;
i::OS::SNPrintF(prop1_setter_record,
"code-creation,Callback,0x%" V8PRIxPTR ",1,\"set prop1\"",
Prop1Setter);
- CHECK_NE(NULL, matcher.Find(prop1_setter_record));
+ CHECK_NE(NULL,
+ StrNStr(log.start(), prop1_setter_record.start(), log.length()));
+
EmbeddedVector<char, 100> prop2_getter_record;
i::OS::SNPrintF(prop2_getter_record,
"code-creation,Callback,0x%" V8PRIxPTR ",1,\"get prop2\"",
Prop2Getter);
- CHECK_NE(NULL, matcher.Find(prop2_getter_record));
+ CHECK_NE(NULL,
+ StrNStr(log.start(), prop2_getter_record.start(), log.length()));
obj.Dispose();
}
@@ -625,377 +469,6 @@ TEST(IsLoggingPreserved) {
}
-static inline bool IsStringEqualTo(const char* r, const char* s) {
- return strncmp(r, s, strlen(r)) == 0;
-}
-
-
-static bool Consume(const char* str, char** buf) {
- if (IsStringEqualTo(str, *buf)) {
- *buf += strlen(str);
- return true;
- }
- return false;
-}
-
-
-namespace {
-
-// A code entity is a pointer to a position of code-creation event in buffer log
-// offset to a point where entity size begins, i.e.: '255,"func"\n'. This makes
-// comparing code entities pretty easy.
-typedef char* CodeEntityInfo;
-
-class Interval {
- public:
- Interval()
- : min_addr_(reinterpret_cast<Address>(-1)),
- max_addr_(reinterpret_cast<Address>(0)), next_(NULL) {}
-
- ~Interval() { delete next_; }
-
- size_t Length() {
- size_t result = max_addr_ - min_addr_ + 1;
- if (next_ != NULL) result += next_->Length();
- return result;
- }
-
- void CloneFrom(Interval* src) {
- while (src != NULL) {
- RegisterAddress(src->min_addr_);
- RegisterAddress(src->max_addr_);
- src = src->next_;
- }
- }
-
- bool Contains(Address addr) {
- if (min_addr_ <= addr && addr <= max_addr_) {
- return true;
- }
- if (next_ != NULL) {
- return next_->Contains(addr);
- } else {
- return false;
- }
- }
-
- size_t GetIndex(Address addr) {
- if (min_addr_ <= addr && addr <= max_addr_) {
- return addr - min_addr_;
- }
- CHECK_NE(NULL, next_);
- return (max_addr_ - min_addr_ + 1) + next_->GetIndex(addr);
- }
-
- Address GetMinAddr() {
- return next_ == NULL ? min_addr_ : i::Min(min_addr_, next_->GetMinAddr());
- }
-
- Address GetMaxAddr() {
- return next_ == NULL ? max_addr_ : i::Max(max_addr_, next_->GetMaxAddr());
- }
-
- void RegisterAddress(Address addr) {
- if (min_addr_ == reinterpret_cast<Address>(-1)
- || (size_t)(addr > min_addr_ ?
- addr - min_addr_ : min_addr_ - addr) < MAX_DELTA) {
- if (addr < min_addr_) min_addr_ = addr;
- if (addr > max_addr_) max_addr_ = addr;
- } else {
- if (next_ == NULL) next_ = new Interval();
- next_->RegisterAddress(addr);
- }
- }
-
- Address raw_min_addr() { return min_addr_; }
-
- Address raw_max_addr() { return max_addr_; }
-
- Interval* get_next() { return next_; }
-
- private:
- static const size_t MAX_DELTA = 0x100000;
- Address min_addr_;
- Address max_addr_;
- Interval* next_;
-};
-
-
-// A structure used to return log parsing results.
-class ParseLogResult {
- public:
- ParseLogResult()
- : entities_map(NULL), entities(NULL),
- max_entities(0) {}
-
- ~ParseLogResult() {
- i::DeleteArray(entities_map);
- i::DeleteArray(entities);
- }
-
- void AllocateEntities() {
- // Make sure that the test doesn't operate on a bogus log.
- CHECK_GT(max_entities, 0);
- CHECK_GT(bounds.GetMinAddr(), 0);
- CHECK_GT(bounds.GetMaxAddr(), bounds.GetMinAddr());
-
- entities = i::NewArray<CodeEntityInfo>(max_entities);
- for (int i = 0; i < max_entities; ++i) {
- entities[i] = NULL;
- }
- const size_t map_length = bounds.Length();
- entities_map = i::NewArray<int>(static_cast<int>(map_length));
- for (size_t i = 0; i < map_length; ++i) {
- entities_map[i] = -1;
- }
- }
-
- bool HasIndexForAddress(Address addr) {
- return bounds.Contains(addr);
- }
-
- size_t GetIndexForAddress(Address addr) {
- CHECK(HasIndexForAddress(addr));
- return bounds.GetIndex(addr);
- }
-
- CodeEntityInfo GetEntity(Address addr) {
- if (HasIndexForAddress(addr)) {
- size_t idx = GetIndexForAddress(addr);
- int item = entities_map[idx];
- return item != -1 ? entities[item] : NULL;
- }
- return NULL;
- }
-
- void ParseAddress(char* start) {
- Address addr =
- reinterpret_cast<Address>(strtoul(start, NULL, 16)); // NOLINT
- bounds.RegisterAddress(addr);
- }
-
- Address ConsumeAddress(char** start) {
- char* end_ptr;
- Address addr =
- reinterpret_cast<Address>(strtoul(*start, &end_ptr, 16)); // NOLINT
- CHECK(HasIndexForAddress(addr));
- *start = end_ptr;
- return addr;
- }
-
- Interval bounds;
- // Memory map of entities start addresses.
- int* entities_map;
- // An array of code entities.
- CodeEntityInfo* entities;
- // Maximal entities count. Actual entities count can be lower,
- // empty entity slots are pointing to NULL.
- int max_entities;
-};
-
-} // namespace
-
-
-typedef void (*ParserBlock)(char* start, char* end, ParseLogResult* result);
-
-static void ParserCycle(
- char* start, char* end, ParseLogResult* result,
- ParserBlock block_creation, ParserBlock block_delete,
- ParserBlock block_move) {
-
- const char* code_creation = "code-creation,";
- const char* code_delete = "code-delete,";
- const char* code_move = "code-move,";
-
- const char* lazy_compile = "LazyCompile,";
- const char* script = "Script,";
- const char* function = "Function,";
-
- while (start < end) {
- if (Consume(code_creation, &start)) {
- if (Consume(lazy_compile, &start)
- || Consume(script, &start)
- || Consume(function, &start)) {
- block_creation(start, end, result);
- }
- } else if (Consume(code_delete, &start)) {
- block_delete(start, end, result);
- } else if (Consume(code_move, &start)) {
- block_move(start, end, result);
- }
- while (start < end && *start != '\n') ++start;
- ++start;
- }
-}
-
-
-static void Pass1CodeCreation(char* start, char* end, ParseLogResult* result) {
- result->ParseAddress(start);
- ++result->max_entities;
-}
-
-
-static void Pass1CodeDelete(char* start, char* end, ParseLogResult* result) {
- result->ParseAddress(start);
-}
-
-
-static void Pass1CodeMove(char* start, char* end, ParseLogResult* result) {
- result->ParseAddress(start);
- // Skip old address.
- while (start < end && *start != ',') ++start;
- CHECK_GT(end, start);
- ++start; // Skip ','.
- result->ParseAddress(start);
-}
-
-
-static void Pass2CodeCreation(char* start, char* end, ParseLogResult* result) {
- Address addr = result->ConsumeAddress(&start);
- CHECK_GT(end, start);
- ++start; // Skip ','.
-
- size_t idx = result->GetIndexForAddress(addr);
- result->entities_map[idx] = -1;
- for (int i = 0; i < result->max_entities; ++i) {
- // Find an empty slot and fill it.
- if (result->entities[i] == NULL) {
- result->entities[i] = start;
- result->entities_map[idx] = i;
- break;
- }
- }
- // Make sure that a slot was found.
- CHECK_GE(result->entities_map[idx], 0);
-}
-
-
-static void Pass2CodeDelete(char* start, char* end, ParseLogResult* result) {
- Address addr = result->ConsumeAddress(&start);
- size_t idx = result->GetIndexForAddress(addr);
- // There can be code deletes that are not related to JS code.
- if (result->entities_map[idx] >= 0) {
- result->entities[result->entities_map[idx]] = NULL;
- result->entities_map[idx] = -1;
- }
-}
-
-
-static void Pass2CodeMove(char* start, char* end, ParseLogResult* result) {
- Address from_addr = result->ConsumeAddress(&start);
- CHECK_GT(end, start);
- ++start; // Skip ','.
- Address to_addr = result->ConsumeAddress(&start);
- CHECK_GT(end, start);
-
- size_t from_idx = result->GetIndexForAddress(from_addr);
- size_t to_idx = result->GetIndexForAddress(to_addr);
- // There can be code moves that are not related to JS code.
- if (from_idx != to_idx && result->entities_map[from_idx] >= 0) {
- CHECK_EQ(-1, result->entities_map[to_idx]);
- result->entities_map[to_idx] = result->entities_map[from_idx];
- result->entities_map[from_idx] = -1;
- };
-}
-
-
-static void ParseLog(char* start, char* end, ParseLogResult* result) {
- // Pass 1: Calculate boundaries of addresses and entities count.
- ParserCycle(start, end, result,
- Pass1CodeCreation, Pass1CodeDelete, Pass1CodeMove);
-
- printf("min_addr: %p, max_addr: %p, entities: %d\n",
- result->bounds.GetMinAddr(), result->bounds.GetMaxAddr(),
- result->max_entities);
-
- result->AllocateEntities();
-
- // Pass 2: Fill in code entries data.
- ParserCycle(start, end, result,
- Pass2CodeCreation, Pass2CodeDelete, Pass2CodeMove);
-}
-
-
-static inline void PrintCodeEntityInfo(CodeEntityInfo entity) {
- const int max_len = 50;
- if (entity != NULL) {
- char* eol = strchr(entity, '\n');
- int len = static_cast<int>(eol - entity);
- len = len <= max_len ? len : max_len;
- printf("%-*.*s ", max_len, len, entity);
- } else {
- printf("%*s", max_len + 1, "");
- }
-}
-
-
-static void PrintCodeEntitiesInfo(
- bool is_equal, Address addr,
- CodeEntityInfo l_entity, CodeEntityInfo r_entity) {
- printf("%c %p ", is_equal ? ' ' : '*', addr);
- PrintCodeEntityInfo(l_entity);
- PrintCodeEntityInfo(r_entity);
- printf("\n");
-}
-
-
-static inline int StrChrLen(const char* s, char c) {
- return static_cast<int>(strchr(s, c) - s);
-}
-
-
-static bool AreFuncSizesEqual(CodeEntityInfo ref_s, CodeEntityInfo new_s) {
- int ref_len = StrChrLen(ref_s, ',');
- int new_len = StrChrLen(new_s, ',');
- return ref_len == new_len && strncmp(ref_s, new_s, ref_len) == 0;
-}
-
-
-static bool AreFuncNamesEqual(CodeEntityInfo ref_s, CodeEntityInfo new_s) {
- // Skip size.
- ref_s = strchr(ref_s, ',') + 1;
- new_s = strchr(new_s, ',') + 1;
- CHECK_EQ('"', ref_s[0]);
- CHECK_EQ('"', new_s[0]);
- int ref_len = StrChrLen(ref_s + 1, '\"');
- int new_len = StrChrLen(new_s + 1, '\"');
- // A special case for ErrorPrototype. Haven't yet figured out why they
- // are different.
- const char* error_prototype = "\"ErrorPrototype";
- if (IsStringEqualTo(error_prototype, ref_s)
- && IsStringEqualTo(error_prototype, new_s)) {
- return true;
- }
- // Built-in objects have problems too.
- const char* built_ins[] = {
- "\"Boolean\"", "\"Function\"", "\"Number\"",
- "\"Object\"", "\"Script\"", "\"String\""
- };
- for (size_t i = 0; i < sizeof(built_ins) / sizeof(*built_ins); ++i) {
- if (IsStringEqualTo(built_ins[i], new_s)) {
- return true;
- }
- }
- return ref_len == new_len && strncmp(ref_s, new_s, ref_len) == 0;
-}
-
-
-static bool AreEntitiesEqual(CodeEntityInfo ref_e, CodeEntityInfo new_e) {
- if (ref_e == NULL && new_e != NULL) return true;
- if (ref_e != NULL && new_e != NULL) {
- return AreFuncSizesEqual(ref_e, new_e) && AreFuncNamesEqual(ref_e, new_e);
- }
- if (ref_e != NULL && new_e == NULL) {
- // args_count entities (argument adapters) are not found by heap traversal,
- // but they are not needed because they doesn't contain any code.
- ref_e = strchr(ref_e, ',') + 1;
- const char* args_count = "\"args_count:";
- return IsStringEqualTo(args_count, ref_e);
- }
- return false;
-}
-
-
// Test that logging of code create / move / delete events
// is equivalent to traversal of a resulting heap.
TEST(EquivalenceOfLoggingAndTraversal) {
@@ -1008,86 +481,68 @@ TEST(EquivalenceOfLoggingAndTraversal) {
// P.S. No, V8 can't be re-initialized after disposal, see include/v8.h.
CHECK(!i::V8::IsRunning());
- i::FLAG_logfile = "*";
- i::FLAG_log = true;
- i::FLAG_log_code = true;
-
- // Make sure objects move.
- bool saved_always_compact = i::FLAG_always_compact;
- if (!i::FLAG_never_compact) {
- i::FLAG_always_compact = true;
- }
-
- v8::HandleScope scope;
- v8::Handle<v8::Value> global_object = v8::Handle<v8::Value>();
- v8::Handle<v8::Context> env = v8::Context::New(
- 0, v8::Handle<v8::ObjectTemplate>(), global_object);
- env->Enter();
+ // Start with profiling to capture all code events from the beginning.
+ ScopedLoggerInitializer initialize_logger(false);
// Compile and run a function that creates other functions.
- CompileAndRunScript(
+ CompileRun(
"(function f(obj) {\n"
" obj.test =\n"
" (function a(j) { return function b() { return j; } })(100);\n"
"})(this);");
- HEAP->CollectAllGarbage(false);
-
- EmbeddedVector<char, 204800> buffer;
- int log_size;
- ParseLogResult ref_result;
-
- // Retrieve the log.
- {
- // Make sure that no GCs occur prior to LogCompiledFunctions call.
- i::AssertNoAllocation no_alloc;
-
- log_size = GetLogLines(0, &buffer);
- CHECK_GT(log_size, 0);
- CHECK_GT(buffer.length(), log_size);
-
- // Fill a map of compiled code objects.
- ParseLog(buffer.start(), buffer.start() + log_size, &ref_result);
- }
+ v8::V8::PauseProfiler();
+ HEAP->CollectAllGarbage(true);
+ LOGGER->StringEvent("test-logging-done", "");
// Iterate heap to find compiled functions, will write to log.
LOGGER->LogCompiledFunctions();
- char* new_log_start = buffer.start() + log_size;
- const int new_log_size = LOGGER->GetLogLines(
- log_size, new_log_start, buffer.length() - log_size);
- CHECK_GT(new_log_size, 0);
- CHECK_GT(buffer.length(), log_size + new_log_size);
-
- // Fill an equivalent map of compiled code objects.
- ParseLogResult new_result;
- ParseLog(new_log_start, new_log_start + new_log_size, &new_result);
-
- // Test their actual equivalence.
- Interval combined;
- combined.CloneFrom(&ref_result.bounds);
- combined.CloneFrom(&new_result.bounds);
- Interval* iter = &combined;
- bool results_equal = true;
-
- while (iter != NULL) {
- for (Address addr = iter->raw_min_addr();
- addr <= iter->raw_max_addr(); ++addr) {
- CodeEntityInfo ref_entity = ref_result.GetEntity(addr);
- CodeEntityInfo new_entity = new_result.GetEntity(addr);
- if (ref_entity != NULL || new_entity != NULL) {
- const bool equal = AreEntitiesEqual(ref_entity, new_entity);
- if (!equal) results_equal = false;
- PrintCodeEntitiesInfo(equal, addr, ref_entity, new_entity);
- }
+ LOGGER->StringEvent("test-traversal-done", "");
+
+ bool exists = false;
+ i::Vector<const char> log(
+ i::ReadFile(initialize_logger.StopLoggingGetTempFile(), &exists, true));
+ CHECK(exists);
+ v8::Handle<v8::String> log_str = v8::String::New(log.start(), log.length());
+ initialize_logger.env()->Global()->Set(v8_str("_log"), log_str);
+
+ const char* scripts[] = {
+ "tools/splaytree.js", "tools/codemap.js", "tools/csvparser.js",
+ "tools/consarray.js", "tools/profile.js", "tools/profile_view.js",
+ "tools/logreader.js", "test/cctest/log-eq-of-logging-and-traversal.js"
+ };
+ int scripts_count = sizeof(scripts) / sizeof(scripts[0]);
+ v8::Handle<v8::Value> last_result;
+ for (int i = 0; i < scripts_count; ++i) {
+ bool exists = true;
+ i::Vector<const char> source(i::ReadFile(scripts[i], &exists, true));
+ CHECK(exists);
+ CHECK_GT(source.length(), 0);
+ v8::Handle<v8::String> source_str =
+ v8::String::New(source.start(), source.length());
+ v8::TryCatch try_catch;
+ v8::Handle<v8::Script> script =
+ v8::Script::Compile(source_str, v8_str(scripts[i]));
+ if (script.IsEmpty()) {
+ v8::String::Utf8Value exception(try_catch.Exception());
+ printf("compile %s: %s\n", scripts[i], *exception);
+ CHECK(false);
+ }
+ last_result = script->Run();
+ if (last_result.IsEmpty()) {
+ v8::String::Utf8Value exception(try_catch.Exception());
+ printf("run %s: %s\n", scripts[i], *exception);
+ CHECK(false);
}
- iter = iter->get_next();
}
- // Make sure that all log data is written prior crash due to CHECK failure.
- fflush(stdout);
- CHECK(results_equal);
-
- env->Exit();
- LOGGER->TearDown();
- i::FLAG_always_compact = saved_always_compact;
+ // The result either be a "true" literal or problem description.
+ if (!last_result->IsTrue()) {
+ v8::Local<v8::String> s = last_result->ToString();
+ i::ScopedVector<char> data(s->Length() + 1);
+ CHECK_NE(NULL, data.start());
+ s->WriteAscii(data.start());
+ printf("%s\n", data.start());
+ // Make sure that our output is written prior crash due to CHECK failure.
+ fflush(stdout);
+ CHECK(false);
+ }
}
-
-#endif // ENABLE_LOGGING_AND_PROFILING
diff --git a/deps/v8/test/cctest/test-profile-generator.cc b/deps/v8/test/cctest/test-profile-generator.cc
index fbe5834e54..6d30443178 100644
--- a/deps/v8/test/cctest/test-profile-generator.cc
+++ b/deps/v8/test/cctest/test-profile-generator.cc
@@ -2,8 +2,6 @@
//
// Tests of profiles generator and utilities.
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
#include "v8.h"
#include "profile-generator-inl.h"
#include "cctest.h"
@@ -824,5 +822,3 @@ TEST(Issue51919) {
for (int i = 0; i < CpuProfilesCollection::kMaxSimultaneousProfiles; ++i)
i::DeleteArray(titles[i]);
}
-
-#endif // ENABLE_LOGGING_AND_PROFILING
diff --git a/deps/v8/test/cctest/test-regexp.cc b/deps/v8/test/cctest/test-regexp.cc
index 6588c68b7d..9f18b600b2 100644
--- a/deps/v8/test/cctest/test-regexp.cc
+++ b/deps/v8/test/cctest/test-regexp.cc
@@ -41,19 +41,25 @@
#ifdef V8_INTERPRETED_REGEXP
#include "interpreter-irregexp.h"
#else // V8_INTERPRETED_REGEXP
+#include "macro-assembler.h"
+#include "code.h"
#ifdef V8_TARGET_ARCH_ARM
+#include "arm/assembler-arm.h"
#include "arm/macro-assembler-arm.h"
#include "arm/regexp-macro-assembler-arm.h"
#endif
#ifdef V8_TARGET_ARCH_MIPS
+#include "mips/assembler-mips.h"
#include "mips/macro-assembler-mips.h"
#include "mips/regexp-macro-assembler-mips.h"
#endif
#ifdef V8_TARGET_ARCH_X64
+#include "x64/assembler-x64.h"
#include "x64/macro-assembler-x64.h"
#include "x64/regexp-macro-assembler-x64.h"
#endif
#ifdef V8_TARGET_ARCH_IA32
+#include "ia32/assembler-ia32.h"
#include "ia32/macro-assembler-ia32.h"
#include "ia32/regexp-macro-assembler-ia32.h"
#endif
diff --git a/deps/v8/test/message/regress/regress-1527.js b/deps/v8/test/message/regress/regress-1527.js
new file mode 100644
index 0000000000..682e386d3d
--- /dev/null
+++ b/deps/v8/test/message/regress/regress-1527.js
@@ -0,0 +1,33 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var o1 = {foo: 'bar'};
+var o2 = {
+ 1: 'blah',
+ 2: o1.foo,
+ 3: foo
+}
diff --git a/deps/v8/test/message/regress/regress-1527.out b/deps/v8/test/message/regress/regress-1527.out
new file mode 100644
index 0000000000..dc17fb3517
--- /dev/null
+++ b/deps/v8/test/message/regress/regress-1527.out
@@ -0,0 +1,32 @@
+# Copyright 2011 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*%(basename)s:32: ReferenceError: foo is not defined
+ 3: foo
+ ^
+ReferenceError: foo is not defined
+ at *%(basename)s:32:6
diff --git a/deps/v8/test/mjsunit/debug-backtrace.js b/deps/v8/test/mjsunit/debug-backtrace.js
index d15b2d2619..3647913ac1 100644
--- a/deps/v8/test/mjsunit/debug-backtrace.js
+++ b/deps/v8/test/mjsunit/debug-backtrace.js
@@ -195,7 +195,7 @@ function listener(event, exec_state, event_data, data) {
assertEquals("m", response.lookup(frame.func.ref).inferredName);
assertFalse(frame.constructCall);
assertEquals(35, frame.line);
- assertEquals(2, frame.column);
+ assertEquals(6, frame.column);
assertEquals(0, frame.arguments.length);
json = '{"seq":0,"type":"request","command":"frame","arguments":{"number":3}}'
@@ -269,4 +269,3 @@ g();
// Make sure that the debug event listener vas invoked.
assertFalse(exception, "exception in listener");
assertTrue(listenerCalled);
-
diff --git a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
index 10dfbabf0c..584d1afdab 100644
--- a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
+++ b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
@@ -29,8 +29,10 @@
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
-listenerComplete = false;
-exception = false;
+var listenerComplete = false;
+var exception = false;
+
+var testingConstructCall = false;
function listener(event, exec_state, event_data, data) {
@@ -41,16 +43,38 @@ function listener(event, exec_state, event_data, data) {
for (var i = 0; i < exec_state.frameCount(); i++) {
var frame = exec_state.frame(i);
- // All frames except the bottom one has normal variables a and b.
if (i < exec_state.frameCount() - 1) {
+ // All frames except the bottom one has normal variables a and b.
assertEquals('a', frame.localName(0));
assertEquals('b', frame.localName(1));
assertEquals(i * 2 + 1 + (i * 2 + 1) / 100,
frame.localValue(0).value());
assertEquals(i * 2 + 2 + (i * 2 + 2) / 100,
frame.localValue(1).value());
+
+ // All frames except the bottom one has arguments variables x and y.
+ assertEquals('x', frame.argumentName(0));
+ assertEquals('y', frame.argumentName(1));
+ assertEquals((i + 1) * 2 + 1 + ((i + 1) * 2 + 1) / 100,
+ frame.argumentValue(0).value());
+ assertEquals((i + 1) * 2 + 2 + ((i + 1) * 2 + 2) / 100,
+ frame.argumentValue(1).value());
}
+ // Check the frame function.
+ switch (i) {
+ case 0: assertEquals(h, frame.func().value()); break;
+ case 1: assertEquals(g3, frame.func().value()); break;
+ case 2: assertEquals(g2, frame.func().value()); break;
+ case 3: assertEquals(g1, frame.func().value()); break;
+ case 4: assertEquals(f, frame.func().value()); break;
+ case 5: break;
+ default: assertUnreachable();
+ }
+
+ // Check for construct call.
+ assertEquals(testingConstructCall && i == 4, frame.isConstructCall());
+
// When function f is optimized (2 means YES, see runtime.cc) we
// expect an optimized frame for f with g1, g2 and g3 inlined.
if (%GetOptimizationStatus(f) == 2) {
@@ -123,7 +147,10 @@ function f(x, y) {
g1(a, b);
};
+// Test calling f normally and as a constructor.
f(11.11, 12.12);
+testingConstructCall = true;
+new f(11.11, 12.12);
// Make sure that the debug event listener vas invoked.
assertFalse(exception, "exception in listener " + exception)
diff --git a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js
index cdff0b718d..1aaf29633c 100644
--- a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js
+++ b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js
@@ -29,8 +29,10 @@
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
-listenerComplete = false;
-exception = false;
+var listenerComplete = false;
+var exception = false;
+
+var testingConstructCall = false;
function listener(event, exec_state, event_data, data) {
@@ -41,14 +43,34 @@ function listener(event, exec_state, event_data, data) {
for (var i = 0; i < exec_state.frameCount(); i++) {
var frame = exec_state.frame(i);
- // All frames except the bottom one has normal variables a and b.
if (i < exec_state.frameCount() - 1) {
+ // All frames except the bottom one has normal variables a and b.
assertEquals('a', frame.localName(0));
assertEquals('b', frame.localName(1));
assertEquals(i * 2 + 1, frame.localValue(0).value());
assertEquals(i * 2 + 2, frame.localValue(1).value());
+
+ // All frames except the bottom one has arguments variables x and y.
+ assertEquals('x', frame.argumentName(0));
+ assertEquals('y', frame.argumentName(1));
+ assertEquals((i + 1) * 2 + 1, frame.argumentValue(0).value());
+ assertEquals((i + 1) * 2 + 2, frame.argumentValue(1).value());
}
+ // Check the frame function.
+ switch (i) {
+ case 0: assertEquals(h, frame.func().value()); break;
+ case 1: assertEquals(g3, frame.func().value()); break;
+ case 2: assertEquals(g2, frame.func().value()); break;
+ case 3: assertEquals(g1, frame.func().value()); break;
+ case 4: assertEquals(f, frame.func().value()); break;
+ case 5: break;
+ default: assertUnreachable();
+ }
+
+ // Check for construct call.
+ assertEquals(testingConstructCall && i == 4, frame.isConstructCall());
+
// When function f is optimized (2 means YES, see runtime.cc) we
// expect an optimized frame for f with g1, g2 and g3 inlined.
if (%GetOptimizationStatus(f) == 2) {
@@ -110,7 +132,10 @@ function f(x, y) {
g1(a, b);
};
+// Test calling f normally and as a constructor.
f(11, 12);
+testingConstructCall = true;
+new f(11, 12);
// Make sure that the debug event listener vas invoked.
assertFalse(exception, "exception in listener " + exception)
diff --git a/deps/v8/test/mjsunit/harmony/proxies.js b/deps/v8/test/mjsunit/harmony/proxies.js
index 62bee870b7..37f351317d 100644
--- a/deps/v8/test/mjsunit/harmony/proxies.js
+++ b/deps/v8/test/mjsunit/harmony/proxies.js
@@ -35,7 +35,6 @@ function TestGet(handler) {
var o = Proxy.create(handler)
assertEquals(42, o.a)
assertEquals(42, o["b"])
-// assertEquals(Object.getOwnPropertyDescriptor(o, "b").value, 42)
}
TestGet({
@@ -69,6 +68,64 @@ TestGet(Proxy.create({
}))
+function TestGetCall(handler) {
+ var p = Proxy.create(handler)
+ assertEquals(55, p.f())
+ assertEquals(55, p.f("unused", "arguments"))
+ assertEquals(55, p.f.call(p))
+ assertEquals(55, p.withargs(45, 5))
+ assertEquals(55, p.withargs.call(p, 11, 22))
+ assertEquals("6655", "66" + p) // calls p.toString
+}
+
+TestGetCall({
+ get: function(r, k) { return function() { return 55 } }
+})
+TestGetCall({
+ get: function(r, k) { return this.get2(r, k) },
+ get2: function(r, k) { return function() { return 55 } }
+})
+TestGetCall({
+ getPropertyDescriptor: function(k) {
+ return {value: function() { return 55 }}
+ }
+})
+TestGetCall({
+ getPropertyDescriptor: function(k) { return this.getPropertyDescriptor2(k) },
+ getPropertyDescriptor2: function(k) {
+ return {value: function() { return 55 }}
+ }
+})
+TestGetCall({
+ getPropertyDescriptor: function(k) {
+ return {get value() { return function() { return 55 } }}
+ }
+})
+TestGetCall({
+ get: undefined,
+ getPropertyDescriptor: function(k) {
+ return {value: function() { return 55 }}
+ }
+})
+TestGetCall({
+ get: function(r, k) {
+ if (k == "gg") {
+ return function() { return 55 }
+ } else if (k == "withargs") {
+ return function(n, m) { return n + m * 2 }
+ } else {
+ return function() { return this.gg() }
+ }
+ }
+})
+
+TestGetCall(Proxy.create({
+ get: function(pr, pk) {
+ return function(r, k) { return function() { return 55 } }
+ }
+}))
+
+
// Setters.
@@ -82,9 +139,6 @@ function TestSet(handler) {
assertEquals(43, o["b"] = 43)
assertEquals("b", key)
assertEquals(43, val)
-// assertTrue(Object.defineProperty(o, "c", {value: 44}))
-// assertEquals("c", key)
-// assertEquals(44, val)
}
TestSet({
@@ -149,6 +203,130 @@ TestSet(Proxy.create({
+// Property definition (Object.defineProperty and Object.defineProperties).
+
+var key
+var desc
+function TestDefine(handler) {
+ var o = Proxy.create(handler)
+ assertEquals(o, Object.defineProperty(o, "a", {value: 44}))
+ assertEquals("a", key)
+ assertEquals(1, Object.getOwnPropertyNames(desc).length)
+ assertEquals(44, desc.value)
+
+ assertEquals(o, Object.defineProperty(o, "b", {value: 45, writable: false}))
+ assertEquals("b", key)
+ assertEquals(2, Object.getOwnPropertyNames(desc).length)
+ assertEquals(45, desc.value)
+ assertEquals(false, desc.writable)
+
+ assertEquals(o, Object.defineProperty(o, "c", {value: 46, enumerable: false}))
+ assertEquals("c", key)
+ assertEquals(2, Object.getOwnPropertyNames(desc).length)
+ assertEquals(46, desc.value)
+ assertEquals(false, desc.enumerable)
+
+ var attributes = {configurable: true, mine: 66, minetoo: 23}
+ assertEquals(o, Object.defineProperty(o, "d", attributes))
+ assertEquals("d", key)
+ // Modifying the attributes object after the fact should have no effect.
+ attributes.configurable = false
+ attributes.mine = 77
+ delete attributes.minetoo
+ assertEquals(3, Object.getOwnPropertyNames(desc).length)
+ assertEquals(true, desc.configurable)
+ assertEquals(66, desc.mine)
+ assertEquals(23, desc.minetoo)
+
+ assertEquals(o, Object.defineProperty(o, "e", {get: function(){ return 5 }}))
+ assertEquals("e", key)
+ assertEquals(1, Object.getOwnPropertyNames(desc).length)
+ assertEquals(5, desc.get())
+
+ assertEquals(o, Object.defineProperty(o, "zzz", {}))
+ assertEquals("zzz", key)
+ assertEquals(0, Object.getOwnPropertyNames(desc).length)
+
+// TODO(rossberg): This test requires [s in proxy] to be implemented first.
+// var d = Proxy.create({
+// get: function(r, k) { return (k === "value") ? 77 : void 0 },
+// getOwnPropertyNames: function() { return ["value"] }
+// })
+// assertEquals(1, Object.getOwnPropertyNames(d).length)
+// assertEquals(77, d.value)
+// assertEquals(o, Object.defineProperty(o, "p", d))
+// assertEquals("p", key)
+// assertEquals(1, Object.getOwnPropertyNames(desc).length)
+// assertEquals(77, desc.value)
+
+ var props = {
+ 'bla': {},
+ blub: {get: function() { return true }},
+ '': {get value() { return 20 }},
+ last: {value: 21, configurable: true, mine: "eyes"}
+ }
+ Object.defineProperty(props, "hidden", {value: "hidden", enumerable: false})
+ assertEquals(o, Object.defineProperties(o, props))
+ assertEquals("last", key)
+ assertEquals(2, Object.getOwnPropertyNames(desc).length)
+ assertEquals(21, desc.value)
+ assertEquals(true, desc.configurable)
+ assertEquals(undefined, desc.mine) // Arguably a bug in the spec...
+}
+
+TestDefine({
+ defineProperty: function(k, d) { key = k; desc = d; return true }
+})
+TestDefine({
+ defineProperty: function(k, d) { return this.defineProperty2(k, d) },
+ defineProperty2: function(k, d) { key = k; desc = d; return true }
+})
+TestDefine(Proxy.create({
+ get: function(pr, pk) {
+ return function(k, d) { key = k; desc = d; return true }
+ }
+}))
+
+
+
+// Property descriptors (Object.getOwnPropertyDescriptor).
+
+function TestDescriptor(handler) {
+ var o = Proxy.create(handler)
+ var descs = [
+ {configurable: true},
+ {value: 34, enumerable: true, configurable: true},
+ {value: 3, writable: false, mine: "eyes", configurable: true},
+ {get value() { return 20 }, get configurable() { return true }},
+ {get: function() { "get" }, set: function() { "set" }, configurable: true}
+ ]
+ for (var i = 0; i < descs.length; ++i) {
+ assertEquals(o, Object.defineProperty(o, i, descs[i]))
+ var desc = Object.getOwnPropertyDescriptor(o, i)
+ for (p in descs[i]) {
+ // TODO(rossberg): Ignore user attributes as long as the spec isn't
+ // fixed suitably.
+ if (p != "mine") assertEquals(descs[i][p], desc[p])
+ }
+ assertEquals(undefined, Object.getOwnPropertyDescriptor(o, "absent"))
+ }
+}
+
+
+TestDescriptor({
+ defineProperty: function(k, d) { this["__" + k] = d; return true },
+ getOwnPropertyDescriptor: function(k) { return this["__" + k] }
+})
+TestDescriptor({
+ defineProperty: function(k, d) { this["__" + k] = d; return true },
+ getOwnPropertyDescriptor: function(k) {
+ return this.getOwnPropertyDescriptor2(k)
+ },
+ getOwnPropertyDescriptor2: function(k) { return this["__" + k] }
+})
+
+
+
// Comparison.
function TestComparison(eq) {
@@ -237,7 +415,7 @@ TestPrototype()
-// Property names (Object.getOwnPropertyNames).
+// Property names (Object.getOwnPropertyNames, Object.keys).
function TestPropertyNames(names, handler) {
var p = Proxy.create(handler)
@@ -259,3 +437,51 @@ TestPropertyNames(["[object Object]"], {
return function() { return [{}] }
}
})
+
+
+function TestKeys(names, handler) {
+ var p = Proxy.create(handler)
+ assertArrayEquals(names, Object.keys(p))
+}
+
+TestKeys([], {
+ keys: function() { return [] }
+})
+TestKeys(["a", "zz", " ", "0"], {
+ keys: function() { return ["a", "zz", " ", 0] }
+})
+TestKeys(["throw", "function "], {
+ keys: function() { return this.keys2() },
+ keys2: function() { return ["throw", "function "] }
+})
+TestKeys(["[object Object]"], {
+ get keys() {
+ return function() { return [{}] }
+ }
+})
+TestKeys(["a", "0"], {
+ getOwnPropertyNames: function() { return ["a", 23, "zz", "", 0] },
+ getOwnPropertyDescriptor: function(k) { return {enumerable: k.length == 1} }
+})
+TestKeys(["23", "zz", ""], {
+ getOwnPropertyNames: function() { return this.getOwnPropertyNames2() },
+ getOwnPropertyNames2: function() { return ["a", 23, "zz", "", 0] },
+ getOwnPropertyDescriptor: function(k) {
+ return this.getOwnPropertyDescriptor2(k)
+ },
+ getOwnPropertyDescriptor2: function(k) { return {enumerable: k.length != 1} }
+})
+TestKeys(["a", "b", "c", "5"], {
+ get getOwnPropertyNames() {
+ return function() { return ["0", 4, "a", "b", "c", 5] }
+ },
+ get getOwnPropertyDescriptor() {
+ return function(k) { return {enumerable: k >= "44"} }
+ }
+})
+TestKeys([], {
+ get getOwnPropertyNames() {
+ return function() { return ["a", "b", "c"] }
+ },
+ getOwnPropertyDescriptor: function(k) { return {} }
+})
diff --git a/deps/v8/test/mjsunit/polymorph-arrays.js b/deps/v8/test/mjsunit/polymorph-arrays.js
new file mode 100644
index 0000000000..ff0c433bd7
--- /dev/null
+++ b/deps/v8/test/mjsunit/polymorph-arrays.js
@@ -0,0 +1,177 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+function init_array(a) {
+ for (var i = 0; i < 10; ++i ){
+ a[i] = i;
+ }
+}
+
+function init_sparse_array(a) {
+ for (var i = 0; i < 10; ++i ){
+ a[i] = i;
+ }
+ a[5000000] = 256;
+ assertTrue(%HasDictionaryElements(a));
+}
+
+function testPolymorphicLoads() {
+ function make_polymorphic_load_function() {
+ function load(a, i) {
+ return a[i];
+ }
+
+ var object_array = new Object;
+ var sparse_object_array = new Object;
+ var js_array = new Array(10);
+ var sparse_js_array = new Array(5000001);
+
+ init_array(object_array);
+ init_array(js_array);
+ init_sparse_array(sparse_object_array);
+ init_sparse_array(sparse_js_array);
+
+ assertEquals(1, load(object_array, 1));
+ assertEquals(1, load(js_array, 1));
+ assertEquals(1, load(sparse_object_array, 1));
+ assertEquals(1, load(sparse_js_array, 1));
+
+ return load;
+ }
+
+ var object_array = new Object;
+ var sparse_object_array = new Object;
+ var js_array = new Array(10);
+ var sparse_js_array = new Array(5000001);
+
+ init_array(object_array);
+ init_array(js_array);
+ init_sparse_array(sparse_object_array);
+ init_sparse_array(sparse_js_array);
+
+ load = make_polymorphic_load_function();
+ assertEquals(undefined, load(js_array, new Object()));
+ load = make_polymorphic_load_function();
+ assertEquals(undefined, load(object_array, new Object()));
+ load = make_polymorphic_load_function();
+ assertEquals(undefined, load(sparse_js_array, new Object()));
+ load = make_polymorphic_load_function();
+ assertEquals(undefined, load(sparse_object_array, new Object()));
+
+ // Try with crankshaft.
+ load = make_polymorphic_load_function();
+ %OptimizeFunctionOnNextCall(load);
+ assertEquals(1, load(object_array, 1));
+ assertEquals(1, load(js_array, 1));
+ assertEquals(1, load(sparse_object_array, 1));
+ assertEquals(1, load(sparse_js_array, 1));
+
+ load = make_polymorphic_load_function();
+ %OptimizeFunctionOnNextCall(load);
+ assertEquals(undefined, load(js_array, new Object()));
+ load = make_polymorphic_load_function();
+ %OptimizeFunctionOnNextCall(load);
+ assertEquals(undefined, load(object_array, new Object()));
+ load = make_polymorphic_load_function();
+ %OptimizeFunctionOnNextCall(load);
+ assertEquals(undefined, load(sparse_js_array, new Object()));
+ load = make_polymorphic_load_function();
+ %OptimizeFunctionOnNextCall(load);
+ assertEquals(undefined, load(sparse_object_array, new Object()));
+}
+
+function testPolymorphicStores() {
+ function make_polymorphic_store_function() {
+ function store(a, i, val) {
+ a[i] = val;
+ }
+
+ var object_array = new Object;
+ var sparse_object_array = new Object;
+ var js_array = new Array(10);
+ var sparse_js_array = new Array(5000001);
+
+ init_array(object_array);
+ init_array(js_array);
+ init_sparse_array(sparse_object_array);
+ init_sparse_array(sparse_js_array);
+
+ store(object_array, 1, 256);
+ store(js_array, 1, 256);
+ store(sparse_object_array, 1, 256);
+ store(sparse_js_array, 1, 256);
+
+ return store;
+ }
+
+ var object_array = new Object;
+ var sparse_object_array = new Object;
+ var js_array = new Array(10);
+ var sparse_js_array = new Array(5000001);
+
+ init_array(object_array);
+ init_array(js_array);
+ init_sparse_array(sparse_object_array);
+ init_sparse_array(sparse_js_array);
+
+ store = make_polymorphic_store_function();
+ store(object_array, 2, 257);
+ store = make_polymorphic_store_function();
+ store(js_array, 2, 257);
+ store = make_polymorphic_store_function();
+ store(sparse_object_array, 2, 257);
+ store = make_polymorphic_store_function();
+ store(sparse_js_array, 2, 257);
+
+ assertEquals(257, object_array[2]);
+ assertEquals(257, js_array[2]);
+ assertEquals(257, sparse_js_array[2]);
+ assertEquals(257, sparse_object_array[2]);
+
+ // Now try Crankshaft optimized polymorphic stores
+ store = make_polymorphic_store_function();
+ %OptimizeFunctionOnNextCall(store);
+ store(object_array, 3, 258);
+ store = make_polymorphic_store_function();
+ %OptimizeFunctionOnNextCall(store);
+ store(js_array, 3, 258);
+ store = make_polymorphic_store_function();
+ %OptimizeFunctionOnNextCall(store);
+ store(sparse_object_array, 3, 258);
+ store = make_polymorphic_store_function();
+ %OptimizeFunctionOnNextCall(store);
+ store(sparse_js_array, 3, 258);
+
+ assertEquals(258, object_array[3]);
+ assertEquals(258, js_array[3]);
+ assertEquals(258, sparse_js_array[3]);
+ assertEquals(258, sparse_object_array[3]);
+}
+
+testPolymorphicLoads();
+testPolymorphicStores();
diff --git a/deps/v8/test/mjsunit/regress/regress-1531.js b/deps/v8/test/mjsunit/regress/regress-1531.js
new file mode 100644
index 0000000000..09e61a6040
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1531.js
@@ -0,0 +1,49 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Regression test for computing elements keys of arguments object. Should
+// not crash or assert.
+function test(x) {
+ arguments[10] = 0;
+ var arr = [];
+ for (var p in arguments) arr.push(p);
+ return arr;
+}
+assertEquals(["0", "10"], test(0));
+
+// Regression test for lookup after delete of a dictionary-mode arguments
+// backing store. Should not crash or assert.
+function test1(x, y, z) {
+ // Put into dictionary mode.
+ arguments.__defineGetter__("5", function () { return 0; });
+ // Delete a property from the dictionary.
+ delete arguments[5];
+ // Look up a property in the dictionary.
+ return arguments[2];
+}
+
+assertEquals(void 0, test1(0));
diff --git a/deps/v8/test/mjsunit/regress/regress-88591.js b/deps/v8/test/mjsunit/regress/regress-88591.js
new file mode 100644
index 0000000000..e42570a95b
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-88591.js
@@ -0,0 +1,42 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Regression test for a crash. A data property in the global object's
+// prototype shadowed by a setter in the global object's prototype's
+// prototype would crash or assert when seen by Runtime_DeclareContextSlot.
+var called = false;
+Object.prototype.__defineSetter__('x', function(x) { called = true; });
+Object.prototype.__defineGetter__('x', function () { return 0; });
+
+this.__proto__ = { x: 1 };
+
+try { fail; } catch (e) { eval('const x = 2'); }
+
+var o = Object.getOwnPropertyDescriptor(this, 'x');
+assertFalse(called);
+assertEquals(2, o.value);
+assertEquals(false, o.writable);
diff --git a/deps/v8/test/mjsunit/regress/regress-88858.js b/deps/v8/test/mjsunit/regress/regress-88858.js
new file mode 100644
index 0000000000..ba33f87036
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-88858.js
@@ -0,0 +1,65 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+// Verify that JSObject::PreventExtensions works for arguments objects.
+
+try {
+ function make_watcher(name) { }
+ var o, p;
+ function f(flag) {
+ if (flag) {
+ o = arguments;
+ } else {
+ p = arguments;
+ o.watch(0, (arguments-1901)('o'));
+ p.watch(0, make_watcher('p'));
+ p.unwatch(0);
+ o.unwatch(0);
+ p[0] = 4;
+ assertEq(flag, 4);
+ }
+ }
+ f(true);
+ f(false);
+ reportCompare(true, true);
+} catch(exc1) { }
+
+try {
+ function __noSuchMethod__() {
+ if (anonymous == "1")
+ return NaN;
+ return __construct__;
+ }
+ f.p = function() { };
+ Object.freeze(p);
+ new new freeze().p;
+ reportCompare(0, 0, "ok");
+} catch(exc2) { }
+
+gc();
diff --git a/deps/v8/test/mjsunit/regress/regress-regexp-codeflush.js b/deps/v8/test/mjsunit/regress/regress-regexp-codeflush.js
new file mode 100644
index 0000000000..5fa42bf8dc
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-regexp-codeflush.js
@@ -0,0 +1,55 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --gc_global
+
+// Regression test for regexp that has multiple matches and which
+// internally calls RegExpImpl::IrregexpExecOnce more than once without
+// ensuring that the regexp is compiled.
+// This can create a crash if the code was exchanged with the sweep
+// generation (for code flushing support) in GC durring the matching.
+
+var re = new RegExp('(s)', "g");
+
+function foo() {
+ return "42";
+}
+
+// Run enough times to get a number of GC's (all mark sweep because of the
+// --gc_global) flag.
+for ( var i = 0; i < 10; i++) {
+ // Make a long string with plenty of matches for re.
+ var x = "s foo s bar s foo s bar s";
+ x = x + x;
+ x = x + x;
+ x = x + x;
+ x = x + x;
+ x = x + x;
+ x = x + x;
+ x = x + x;
+ x.replace(re, foo);
+}
diff --git a/deps/v8/test/mjsunit/tools/profile_view.js b/deps/v8/test/mjsunit/tools/profile_view.js
index 7f60119a07..d62205be16 100644
--- a/deps/v8/test/mjsunit/tools/profile_view.js
+++ b/deps/v8/test/mjsunit/tools/profile_view.js
@@ -26,7 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Load source code files from <project root>/tools.
-// Files: tools/consarray.js tools/profile.js tools/profile_view.js
+// Files: tools/codemap.js tools/consarray.js tools/profile.js
+// Files: tools/profile_view.js
function createNode(name, time, opt_parent) {
diff --git a/deps/v8/test/test262/README b/deps/v8/test/test262/README
index 6d9e56e789..ea6b4a71a6 100644
--- a/deps/v8/test/test262/README
+++ b/deps/v8/test/test262/README
@@ -4,11 +4,11 @@ tests from
http://hg.ecmascript.org/tests/test262
-at revision 62 as 'data' in this directory. Using later version
+at revision 128 as 'data' in this directory. Using later version
may be possible but the tests are only known to pass (and indeed run)
with that revision.
-hg clone -r 62 http://hg.ecmascript.org/tests/test262 data
+hg clone -r 128 http://hg.ecmascript.org/tests/test262 data
If you do update to a newer revision you may have to change the test
harness adapter code since it uses internal functionality from the
diff --git a/deps/v8/test/test262/harness-adapt.js b/deps/v8/test/test262/harness-adapt.js
index bc10a9d506..52b5de70bd 100644
--- a/deps/v8/test/test262/harness-adapt.js
+++ b/deps/v8/test/test262/harness-adapt.js
@@ -43,7 +43,7 @@ var ES5Harness = (function() {
}
Test262Error.prototype.toString = function() {
- return this.result + " " + error;
+ return this.result + " " + this.error;
}
function registerTest(test) {
diff --git a/deps/v8/test/test262/test262.status b/deps/v8/test/test262/test262.status
index 754984df99..f68b85db77 100644
--- a/deps/v8/test/test262/test262.status
+++ b/deps/v8/test/test262/test262.status
@@ -46,24 +46,12 @@ prefix ietestcenter
# Unanalyzed failures which may be bugs or deliberate differences
#
-# BUG: 7.6 - SyntaxError expected: reserved words used as Identifier
-# Names in UTF8: class (class)
-7.6-30: FAIL
-# BUG: 7.6 - SyntaxError expected: reserved words used as Identifier
-# Names in UTF8: extends (extends)
-7.6-31: FAIL
-# BUG: 7.6 - SyntaxError expected: reserved words used as Identifier
-# Names in UTF8: \u0065\u006e\u0075\u006d (enum)
-7.6-32: FAIL
-# BUG: 7.6 - SyntaxError expected: reserved words used as Identifier
-# Names in UTF8: \u0073uper (super)
-7.6-33: FAIL
-# BUG: 7.6 - SyntaxError expected: reserved words used as Identifier
-# Names in UTF8: expor\u0074 (export)
-7.6-35: FAIL
-# BUG: 7.6 - SyntaxError expected: reserved words used as Identifier
-# Names in UTF8: \u0069\u006d\u0070\u006f\u0072\u0074 (import)
-7.6-36: FAIL
+# Bug? Strict Mode - TypeError is thrown when changing the value of a Value
+# Property of the Global Object under strict mode (NaN)
+10.2.1.1.3-4-16-s: FAIL
+# Bug? Strict Mode - TypeError is thrown when changing the value of a Value
+# Property of the Global Object under strict mode (undefined)
+10.2.1.1.3-4-18-s: FAIL
# Invalid test: https://bugs.ecmascript.org/show_bug.cgi?id=76
10.4.2-2-c-1: FAIL
# BUG: 11.8.2 Greater-than Operator - Partial left to right order enforced
@@ -93,6 +81,12 @@ prefix ietestcenter
# BUG: 11.8.3 Less-than-or-equal Operator - Partial left to right order
# enforced when using Less-than-or-equal operator: valueOf <= valueOf
11.8.3-5: FAIL
+# Bug? simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Global.undefined)
+11.13.1-4-27-s: FAIL
+# Bug? simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Global.Infinity)
+11.13.1-4-3-s: FAIL
# BUG: Global.NaN is a data property with default attribute values
15.1.1.1-0: FAIL
# BUG: Global.Infinity is a data property with default attribute values
@@ -123,35 +117,6 @@ prefix ietestcenter
# BUG: Object.getOwnPropertyDescriptor returns data desc (all false)
# for properties on built-ins (RegExp.prototype.multiline)
15.2.3.3-4-215: FAIL
-# Bug? Object.create - 'set' property of one property in 'Properties'
-# is not present (8.10.5 step 8)
-# V8 throws.
-15.2.3.5-4-267: FAIL
-# Bug? Object.create - 'set' property of one property in 'Properties'
-# is undefined (8.10.5 step 8.b)
-# V8 throws.
-15.2.3.5-4-292: FAIL
-# Bug? Object.defineProperty - 'set' property in 'Attributes' is not
-# present (8.10.5 step 8)
-# V8 throws.
-15.2.3.6-3-236: FAIL
-# Bug? Object.defineProperty - 'set' property in 'Attributes' is own
-# accessor property without a get function (8.10.5 step 8.a)
-# V8 throws.
-15.2.3.6-3-245: FAIL
-# Bug? Object.defineProperty - 'set' property in 'Attributes' is own
-# accessor property(without a get function) that overrides an inherited
-# accessor property (8.10.5 step 8.a)
-# V8 throws.
-15.2.3.6-3-246: FAIL
-# Bug? Object.defineProperty - 'set' property in 'Attributes' is an
-# inherited accessor property without a get function (8.10.5 step 8.a)
-# V8 throws.
-15.2.3.6-3-247: FAIL
-# Bug? Object.defineProperty - value of 'set' property in 'Attributes'
-# is undefined (8.10.5 step 8.b)
-# V8 throws.
-15.2.3.6-3-261: FAIL
# Bug? Object.defineProperty - Update [[Enumerable]] attribute of 'name'
# property to true successfully when [[Enumerable]] attribute of 'name'
# is false and [[Configurable]] attribute of 'name' is true, the 'desc'
@@ -464,11 +429,6 @@ prefix ietestcenter
# values of 'name' (15.4.5.1 step 4.c)
15.2.3.6-4-209: FAIL
# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named
-# property, 'name' is accessor property and assignment to the accessor
-# property, fails to convert accessor property from accessor property to
-# data property (15.4.5.1 step 4.c)
-15.2.3.6-4-243-1: FAIL
-# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named
# property, name is accessor property and 'desc' is accessor descriptor,
# test updating the [[Enumerable]] attribute value of 'name' (15.4.5.1 step
# 4.c)
@@ -493,18 +453,6 @@ prefix ietestcenter
# (15.4.5.1 step 4.e.ii)
15.2.3.6-4-276: FAIL
# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
-# formal parameters, 'name' is own property which is defined in both
-# [[ParameterMap]] of 'O' and 'O', and is deleted afterwards, and 'desc' is
-# data descriptor, test 'name' is redefined in 'O' with all correct
-# attribute values (10.6 [[DefineOwnProperty]] step 3)
-15.2.3.6-4-289-1: FAIL
-# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
-# formal parameters, 'name' is own property which is defined in both
-# [[ParameterMap]] of 'O' and 'O', is deleted afterwards, and 'desc' is
-# accessor descriptor, test 'name' is redefined in 'O' with all correct
-# attribute values (10.6 [[DefineOwnProperty]] step 3)
-15.2.3.6-4-290-1: FAIL
-# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
# formal parameters, 'name' is own accessor property of 'O' which is also
# defined in [[ParameterMap]] of 'O', and 'desc' is accessor descriptor,
# test updating multiple attribute values of 'name' (10.6
@@ -516,6 +464,19 @@ prefix ietestcenter
# step 3)
15.2.3.6-4-291: FAIL
# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
+# formal parameters, 'name' is own property of 'O' which is also defined in
+# [[ParameterMap]] of 'O', and 'desc' is data descriptor, test updating
+# multiple attribute values of 'name' (10.6 [[DefineOwnProperty]] step 3
+# and 5.b)
+15.2.3.6-4-292-1: FAIL
+# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
+# formal parameters, 'name' is own data property of 'O' which is also
+# defined in [[ParameterMap]] of 'O', test TypeError is thrown when
+# updating the [[Value]] attribute value of 'name' which is defined as
+# unwritable and non-configurable (10.6 [[DefineOwnProperty]] step 4 and
+# step 5b)
+15.2.3.6-4-293-2: FAIL
+# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
# formal parameters, 'name' is own data property of 'O' which is also
# defined in [[ParameterMap]] of 'O', test TypeError is not thrown when
# updating the [[Value]] attribute value of 'name' which is defined as
@@ -523,39 +484,44 @@ prefix ietestcenter
# 5.b)
15.2.3.6-4-293-3: FAIL
# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
-# formal parameters, 'name' is own accessor property of 'O' which is also
-# defined in [[ParameterMap]] of 'O', test TypeError is thrown when
-# updating the [[Get]] attribute value of 'name' which is defined as
-# non-configurable (10.6 [[DefineOwnProperty]] step 4 and step 5a)
-15.2.3.6-4-297-1: FAIL
-# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
-# formal parameters, 'name' is own accessor property of 'O' which is also
+# formal parameters, 'name' is own data property of 'O' which is also
# defined in [[ParameterMap]] of 'O', test TypeError is thrown when
-# updating the [[Set]] attribute value of 'name' which is defined as
-# non-configurable (10.6 [[DefineOwnProperty]] steps 4 and 5a)
-15.2.3.6-4-298-1: FAIL
+# updating the [[Writable]] attribute value of 'name' which is defined as
+# non-configurable (10.6 [[DefineOwnProperty]] step 4 and 5b)
+15.2.3.6-4-294-1: FAIL
# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
-# formal parameters, 'name' is own accessor property of 'O' which is also
+# formal parameters, 'name' is own data property of 'O' which is also
# defined in [[ParameterMap]] of 'O', test TypeError is thrown when
# updating the [[Enumerable]] attribute value of 'name' which is defined as
-# non-configurable (10.6 [[DefineOwnProperty]] steps 4 and 5a)
-15.2.3.6-4-299-1: FAIL
+# non-configurable (10.6 [[DefineOwnProperty]] step 4 and step 5b)
+15.2.3.6-4-295-1: FAIL
# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
-# formal parameters, 'name' is an index named property of 'O', and 'desc'
-# is data descriptor, test 'name' is defined in 'O' with all correct
-# attribute values (10.6 [[DefineOwnProperty]] step 3)
-15.2.3.6-4-301-1: FAIL
-# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
-# formal parameters, 'name' is an index named property of 'O' but not
-# defined in [[ParameterMap]] of 'O', and 'desc' is accessor descriptor,
-# test 'name' is defined in 'O' with all correct attribute values (10.6
-# [[DefineOwnProperty]] step 3 and step 5a)
-15.2.3.6-4-302-1: FAIL
+# formal parameters, 'name' is own data property of 'O' which is also
+# defined in [[ParameterMap]] of 'O', test TypeError is thrown when
+# updating the [[Configurable]] attribute value of 'name' which is defined
+# as non-configurable (10.6 [[DefineOwnProperty]] step 4 and step 5b)
+15.2.3.6-4-296-1: FAIL
# Bug? Object.defineProperty - 'O' is an Arguments object, 'name' is an index
# named accessor property of 'O' but not defined in [[ParameterMap]] of
# 'O', and 'desc' is accessor descriptor, test updating multiple attribute
# values of 'name' (10.6 [[DefineOwnProperty]] step 3)
15.2.3.6-4-303: FAIL
+# Bug? ES5 Attributes - indexed property 'P' with attributes [[Writable]]: true,
+# [[Enumerable]]: true, [[Configurable]]: false is writable using simple
+# assignment, 'O' is an Arguments object
+15.2.3.6-4-333-11: FAIL
+# Bug? ES5 Attributes - Updating indexed data property 'P' whose attributes are
+# [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: true to an
+# accessor property, 'A' is an Array object (8.12.9 - step 9.b.i)
+15.2.3.6-4-360-1: FAIL
+# Bug? ES5 Attributes - Updating indexed data property 'P' whose attributes are
+# [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: true to an
+# accessor property, 'O' is an Arguments object (8.12.9 - step 9.b.i)
+15.2.3.6-4-360-6: FAIL
+# Bug? ES5 Attributes - Updating indexed data property 'P' whose attributes are
+# [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: true to an
+# accessor property, 'O' is the global object (8.12.9 - step 9.b.i)
+15.2.3.6-4-360-7: FAIL
# Bug? ES5 Attributes - [[Value]] attribute of data property is the activex host
# object
15.2.3.6-4-401: FAIL
@@ -575,17 +541,6 @@ prefix ietestcenter
# prototype has a property with the same name and [[Writable]] set to
# false(Function.prototype.bind)
15.2.3.6-4-420: FAIL
-# Bug? ES5 Attributes - Fail to add property into object (Number instance)
-15.2.3.6-4-581: FAIL
-# Bug? ES5 Attributes - Fail to update value of property into of [[Proptotype]]
-# internal property (JSON)
-15.2.3.6-4-586: FAIL
-# Bug? ES5 Attributes - Fail to update value of property of [[Proptotype]]
-# internal property (Object.create)
-15.2.3.6-4-591: FAIL
-# Bug? ES5 Attributes - Fail to update value of property into of [[Proptotype]]
-# internal property (Function.prototype.bind)
-15.2.3.6-4-596: FAIL
# Bug? ES5 Attributes - all attributes in Array.prototype.indexOf are correct
15.2.3.6-4-612: FAIL
# Bug? ES5 Attributes - all attributes in Object.lastIndexOf are correct
@@ -611,20 +566,6 @@ prefix ietestcenter
15.2.3.6-4-623: FAIL
# Bug? ES5 Attributes - all attributes in Date.prototype.toJSON are correct
15.2.3.6-4-624: FAIL
-# Bug? Object.defineProperties - argument 'Properties' is an Error object
-# props.description = obj1;
-15.2.3.7-2-15: FAIL
-# Bug? Object.defineProperties - 'Properties' is an Error object which
-# implements its own [[Get]] method to get enumerable own property
-# props.description = obj1;
-15.2.3.7-5-a-16: FAIL
-# Bug? Object.defineProperties - 'set' property of 'descObj' is not present
-# (8.10.5 step 8)
-15.2.3.7-5-b-227: FAIL
-# Bug? Object.defineProperties - 'descObj' is an Error object which implements
-# its own [[Get]] method to get 'set' property (8.10.5 step 8.a)
-# descObj.description = { value: 11 };
-15.2.3.7-5-b-248: FAIL
# Bug? Object.defineProperties - 'O' is an Array, test the length property of
# 'O' is own data property (15.4.5.1 step 1)
15.2.3.7-6-a-112: FAIL
@@ -922,23 +863,48 @@ prefix ietestcenter
# 'desc' is accessor descriptor, test updating multiple attribute values of
# 'P' (10.6 [[DefineOwnProperty]] step 3)
15.2.3.7-6-a-280: FAIL
+# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is own data
+# property of 'O' which is also defined in [[ParameterMap]] of 'O', and
+# 'desc' is data descriptor, test updating multiple attribute values of 'P'
+# (10.6 [[DefineOwnProperty]] step 3)
+15.2.3.7-6-a-281: FAIL
+# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is own data
+# property of 'O' which is also defined in [[ParameterMap]] of 'O', test
+# TypeError is thrown when updating the [[Value]] attribute value of 'P'
+# whose writable and configurable attributes are false (10.6
+# [[DefineOwnProperty]] step 4)
+15.2.3.7-6-a-282: FAIL
+# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is own data
+# property of 'O' which is also defined in [[ParameterMap]] of 'O', test
+# TypeError is thrown when updating the [[Writable]] attribute value of 'P'
+# which is defined as non-configurable (10.6 [[DefineOwnProperty]] step 4)
+15.2.3.7-6-a-283: FAIL
+# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is own data
+# property of 'O' which is also defined in [[ParameterMap]] of 'O', test
+# TypeError is thrown when updating the [[Enumerable]] attribute value of
+# 'P' which is defined as non-configurable (10.6 [[DefineOwnProperty]] step
+# 4)
+15.2.3.7-6-a-284: FAIL
+# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is own data
+# property of 'O' which is also defined in [[ParameterMap]] of 'O', test
+# TypeError is thrown when updating the [[Configurable]] attribute value of
+# 'P' which is defined as non-configurable (10.6 [[DefineOwnProperty]] step
+# 4)
+15.2.3.7-6-a-285: FAIL
# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is an array
# index named accessor property of 'O' but not defined in [[ParameterMap]]
# of 'O', and 'desc' is accessor descriptor, test updating multiple
# attribute values of 'P' (10.6 [[DefineOwnProperty]] step 3)
15.2.3.7-6-a-292: FAIL
-# Bug? Object.prototype.toString - '[object Undefined]' will be returned when
-# 'this' value is undefined
-15.2.4.2-1-1: FAIL
-# Bug? Object.prototype.toString - '[object Undefined]' will be returned when
-# 'this' value is undefined
-15.2.4.2-1-2: FAIL
-# Bug? Object.prototype.toString - '[object Null]' will be returned when 'this'
-# value is null
-15.2.4.2-2-1: FAIL
-# Bug? Object.prototype.toString - '[object Null]' will be returned when 'this'
-# value is null
-15.2.4.2-2-2: FAIL
+# Bug? Strict Mode - 'this' value is a string which cannot be converted to
+# wrapper objects when the function is called with an array of arguments
+15.3.4.3-1-s: FAIL
+# Bug? Strict Mode - 'this' value is a number which cannot be converted to
+# wrapper objects when the function is called with an array of arguments
+15.3.4.3-2-s: FAIL
+# Bug? Strict Mode - 'this' value is a boolean which cannot be converted to
+# wrapper objects when the function is called with an array of arguments
+15.3.4.3-3-s: FAIL
# Bug? Function.prototype.bind - [[Get]] attribute of 'caller' property in 'F'
# is thrower
15.3.4.5-20-2: FAIL
@@ -951,16 +917,6 @@ prefix ietestcenter
# Bug? Function.prototype.bind - [[Set]] attribute of 'arguments' property in
# 'F' is thrower
15.3.4.5-21-3: FAIL
-# Bug? Array.prototype.concat will concat an Array when index property
-# (read-only) exists in Array.prototype (Step 5.c.i)
-15.4.4.4-5-c-i-1: FAIL
-# Bug? Array.prototype.indexOf applied to undefined throws a TypeError
-15.4.4.14-1-1: FAIL
-# Bug? Array.prototype.indexOf applied to null throws a TypeError
-15.4.4.14-1-2: FAIL
-# Bug? Array.prototype.indexOf - side effects produced by step 1 are visible
-# when an exception occurs
-15.4.4.14-5-28: FAIL
# Bug? Array.prototype.indexOf - decreasing length of array does not delete
# non-configurable properties
15.4.4.14-9-a-19: FAIL
@@ -983,13 +939,6 @@ prefix ietestcenter
# Bug? Array.prototype.indexOf - terminates iteration on unhandled exception on
# an Array
15.4.4.14-9-b-i-30: FAIL
-# Bug? Array.prototype.lastIndexOf applied to undefined throws a TypeError
-15.4.4.15-1-1: FAIL
-# Bug? Array.prototype.lastIndexOf applied to null throws a TypeError
-15.4.4.15-1-2: FAIL
-# Bug? Array.prototype.lastIndexOf - side effects produced by step 1 are visible
-# when an exception occurs
-15.4.4.15-5-28: FAIL
# Bug? Array.prototype.lastIndexOf - deleting property of prototype causes
# prototype index property not to be visited on an Array
15.4.4.15-8-a-14: FAIL
@@ -1011,6 +960,12 @@ prefix ietestcenter
# Bug? Array.prototype.lastIndexOf terminates iteration on unhandled exception
# on an Array
15.4.4.15-8-b-i-30: FAIL
+# Bug? Array.prototype.every applied to boolean primitive
+15.4.4.16-1-3: FAIL
+# Bug? Array.prototype.every applied to number primitive
+15.4.4.16-1-5: FAIL
+# Bug? Array.prototype.every applied to string primitive
+15.4.4.16-1-7: FAIL
# Bug? Array.prototype.every - side effects produced by step 2 are visible when
# an exception occurs
15.4.4.16-4-8: FAIL
@@ -1048,6 +1003,12 @@ prefix ietestcenter
# Bug? Array.prototype.every - element changed by getter on previous iterations
# is observed on an Array
15.4.4.16-7-c-i-28: FAIL
+# Bug? Array.prototype.some applied to boolean primitive
+15.4.4.17-1-3: FAIL
+# Bug? Array.prototype.some applied to number primitive
+15.4.4.17-1-5: FAIL
+# Bug? Array.prototype.some applied to applied to string primitive
+15.4.4.17-1-7: FAIL
# Bug? Array.prototype.some - side effects produced by step 2 are visible when
# an exception occurs
15.4.4.17-4-8: FAIL
@@ -1085,6 +1046,12 @@ prefix ietestcenter
# Bug? Array.prototype.some - element changed by getter on previous iterations
# is observed on an Array
15.4.4.17-7-c-i-28: FAIL
+# Bug? Array.prototype.forEach applied to boolean primitive
+15.4.4.18-1-3: FAIL
+# Bug? Array.prototype.forEach applied to number primitive
+15.4.4.18-1-5: FAIL
+# Bug? Array.prototype.forEach applied to string primitive
+15.4.4.18-1-7: FAIL
# Bug? Array.prototype.forEach - side effects produced by step 2 are visible
# when an exception occurs
15.4.4.18-4-8: FAIL
@@ -1122,9 +1089,12 @@ prefix ietestcenter
# Bug? Array.prototype.forEach - element changed by getter on previous
# iterations is observed on an Array
15.4.4.18-7-c-i-28: FAIL
-# Bug? Array.prototype.map - applied to Array-like object, 'length' is an own
-# data property that overrides an inherited accessor property
-15.4.4.19-2-5: FAIL
+# Bug? Array.prototype.map - applied to boolean primitive
+15.4.4.19-1-3: FAIL
+# Bug? Array.prototype.map - applied to number primitive
+15.4.4.19-1-5: FAIL
+# Bug? Array.prototype.map - applied to string primitive
+15.4.4.19-1-7: FAIL
# Bug? Array.prototype.map - Side effects produced by step 2 are visible when an
# exception occurs
15.4.4.19-4-8: FAIL
@@ -1162,6 +1132,12 @@ prefix ietestcenter
# Bug? Array.prototype.map - element changed by getter on previous iterations is
# observed on an Array
15.4.4.19-8-c-i-28: FAIL
+# Bug? Array.prototype.filter applied to boolean primitive
+15.4.4.20-1-3: FAIL
+# Bug? Array.prototype.filter applied to number primitive
+15.4.4.20-1-5: FAIL
+# Bug? Array.prototype.filter applied to string primitive
+15.4.4.20-1-7: FAIL
# Bug? Array.prototype.filter - value of 'length' is a number (value is
# negative)
15.4.4.20-3-7: FAIL
@@ -1207,9 +1183,6 @@ prefix ietestcenter
# Bug? Array.prototype.filter - properties can be added to prototype after
# current position are visited on an Array-like object
15.4.4.20-9-b-6: FAIL
-# Bug? Array.prototype.filter - properties can be added to prototype after
-# current position are visited on an Array
-15.4.4.20-9-b-7: FAIL
# Bug? Array.prototype.filter - decreasing length of array does not delete
# non-configurable properties
15.4.4.20-9-b-16: FAIL
@@ -1241,6 +1214,12 @@ prefix ietestcenter
# Bug? Array.prototype.filter - element changed by getter on previous iterations
# is observed on an Array
15.4.4.20-9-c-i-28: FAIL
+# Bug? Array.prototype.reduce applied to boolean primitive
+15.4.4.21-1-3: FAIL
+# Bug? Array.prototype.reduce applied to number primitive
+15.4.4.21-1-5: FAIL
+# Bug? Array.prototype.reduce applied to string primitive
+15.4.4.21-1-7: FAIL
# Bug? Array.prototype.reduce - value of 'length' is a number (value is
# negative)
15.4.4.21-3-7: FAIL
@@ -1289,6 +1268,12 @@ prefix ietestcenter
# Bug? Array.prototype.reduce - decreasing length of array does not delete
# non-configurable properties
15.4.4.21-9-b-29: FAIL
+# Bug? Array.prototype.reduceRight applied to boolean primitive
+15.4.4.22-1-3: FAIL
+# Bug? Array.prototype.reduceRight applied to number primitive
+15.4.4.22-1-5: FAIL
+# Bug? Array.prototype.reduceRight applied to string primitive
+15.4.4.22-1-7: FAIL
# Bug? Array.prototype.reduceRight - value of 'length' is a number (value is
# negative)
15.4.4.22-3-7: FAIL
@@ -1378,10 +1363,6 @@ prefix ietestcenter
# Bug? Array.prototype.reduceRight - modifications to length will change number
# of iterations
15.4.4.22-9-9: FAIL
-# Bug? String.prototype.trim throws TypeError when string is undefined
-15.5.4.20-1-1: FAIL
-# Bug? String.prototype.trim throws TypeError when string is null
-15.5.4.20-1-2: FAIL
# Bug? String.prototype.trim - 'S' is a string with all WhiteSpace
15.5.4.20-3-2: FAIL
# Bug? String.prototype.trim - 'S' is a string with all union of WhiteSpace and
@@ -1404,9 +1385,6 @@ prefix ietestcenter
# Bug? String.prototype.trim handles whitepace and lineterminators
# (\\uFEFF\\uFEFF)
15.5.4.20-4-34: FAIL
-# Bug? Date Time String Format - specified default values will be set for all
-# optional fields(MM, DD, mm, ss and time zone) when they are absent
-15.9.1.15-1: FAIL
# Bug? Date.prototype.toISOString - RangeError is thrown when value of date is
# Date(1970, 0, -99999999, 0, 0, 0, -1), the time zone is UTC(0)
15.9.5.43-0-8: FAIL
diff --git a/deps/v8/tools/codemap.js b/deps/v8/tools/codemap.js
index 71a99cc223..dec494a3b3 100644
--- a/deps/v8/tools/codemap.js
+++ b/deps/v8/tools/codemap.js
@@ -211,6 +211,14 @@ CodeMap.prototype.getAllDynamicEntries = function() {
/**
+ * Returns an array of pairs of all dynamic code entries and their addresses.
+ */
+CodeMap.prototype.getAllDynamicEntriesWithAddresses = function() {
+ return this.dynamics_.exportKeysAndValues();
+};
+
+
+/**
* Returns an array of all static code entries.
*/
CodeMap.prototype.getAllStaticEntries = function() {
diff --git a/deps/v8/tools/gcmole/gcmole.lua b/deps/v8/tools/gcmole/gcmole.lua
index 4afc66d1cb..f8d3b6204a 100644
--- a/deps/v8/tools/gcmole/gcmole.lua
+++ b/deps/v8/tools/gcmole/gcmole.lua
@@ -97,8 +97,6 @@ local function MakeClangCommandLine(plugin, plugin_args, triple, arch_define)
.. (plugin_args or "")
.. " -triple " .. triple
.. " -D" .. arch_define
- .. " -DENABLE_VMSTATE_TRACKING"
- .. " -DENABLE_LOGGING_AND_PROFILING"
.. " -DENABLE_DEBUGGER_SUPPORT"
.. " -Isrc"
end
diff --git a/deps/v8/tools/gyp/v8.gyp b/deps/v8/tools/gyp/v8.gyp
index 91984d6f3f..26504836af 100644
--- a/deps/v8/tools/gyp/v8.gyp
+++ b/deps/v8/tools/gyp/v8.gyp
@@ -62,10 +62,7 @@
['use_system_v8==0', {
'target_defaults': {
'defines': [
- 'ENABLE_LOGGING_AND_PROFILING',
'ENABLE_DEBUGGER_SUPPORT',
- 'ENABLE_VMSTATE_TRACKING',
- 'V8_FAST_TLS',
],
'conditions': [
['OS!="mac"', {
@@ -286,6 +283,12 @@
'conditions': [
['component=="shared_library"', {
'conditions': [
+ # The ARM assembler assumes the host is 32 bits, so force building
+ # 32-bit host tools.
+ ['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
+ 'cflags': ['-m32'],
+ 'ldflags': ['-m32'],
+ }],
['OS=="win"', {
'defines': [
'BUILDING_V8_SHARED',
@@ -971,6 +974,12 @@
# This could be gotten by not setting chromium_code, if that's OK.
'defines': ['_CRT_SECURE_NO_WARNINGS'],
}],
+ # The ARM assembler assumes the host is 32 bits, so force building
+ # 32-bit host tools.
+ ['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
+ 'cflags': ['-m32'],
+ 'ldflags': ['-m32'],
+ }],
['v8_compress_startup_data=="bz2"', {
'libraries': [
'-lbz2',
diff --git a/deps/v8/tools/profile.js b/deps/v8/tools/profile.js
index c9c9437e93..10a07f8246 100644
--- a/deps/v8/tools/profile.js
+++ b/deps/v8/tools/profile.js
@@ -162,8 +162,16 @@ Profile.prototype.addFuncCode = function(
// Function object has been overwritten with a new one.
func.name = name;
}
- var entry = new Profile.DynamicFuncCodeEntry(size, type, func, state);
- this.codeMap_.addCode(start, entry);
+ var entry = this.codeMap_.findDynamicEntryByStartAddress(start);
+ if (entry) {
+ if (entry.size === size && entry.func === func) {
+ // Entry state has changed.
+ entry.state = state;
+ }
+ } else {
+ entry = new Profile.DynamicFuncCodeEntry(size, type, func, state);
+ this.codeMap_.addCode(start, entry);
+ }
return entry;
};
@@ -374,6 +382,31 @@ Profile.prototype.getFlatProfile = function(opt_label) {
/**
+ * Cleans up function entries that are not referenced by code entries.
+ */
+Profile.prototype.cleanUpFuncEntries = function() {
+ var referencedFuncEntries = [];
+ var entries = this.codeMap_.getAllDynamicEntriesWithAddresses();
+ for (var i = 0, l = entries.length; i < l; ++i) {
+ if (entries[i][1].constructor === Profile.FunctionEntry) {
+ entries[i][1].used = false;
+ }
+ }
+ for (var i = 0, l = entries.length; i < l; ++i) {
+ if ("func" in entries[i][1]) {
+ entries[i][1].func.used = true;
+ }
+ }
+ for (var i = 0, l = entries.length; i < l; ++i) {
+ if (entries[i][1].constructor === Profile.FunctionEntry &&
+ !entries[i][1].used) {
+ this.codeMap_.deleteCode(entries[i][0]);
+ }
+ }
+};
+
+
+/**
* Creates a dynamic code entry.
*
* @param {number} size Code size.
@@ -408,6 +441,11 @@ Profile.DynamicCodeEntry.prototype.isJSFunction = function() {
};
+Profile.DynamicCodeEntry.prototype.toString = function() {
+ return this.getName() + ': ' + this.size.toString(16);
+};
+
+
/**
* Creates a dynamic code entry.
*
@@ -448,6 +486,11 @@ Profile.DynamicFuncCodeEntry.prototype.isJSFunction = function() {
};
+Profile.DynamicFuncCodeEntry.prototype.toString = function() {
+ return this.getName() + ': ' + this.size.toString(16);
+};
+
+
/**
* Creates a shared function object entry.
*
@@ -473,6 +516,7 @@ Profile.FunctionEntry.prototype.getName = function() {
return name;
};
+Profile.FunctionEntry.prototype.toString = CodeMap.CodeEntry.prototype.toString;
/**
* Constructs a call graph.
diff --git a/deps/v8/tools/splaytree.js b/deps/v8/tools/splaytree.js
index 1c9aab9e2e..d272a9e182 100644
--- a/deps/v8/tools/splaytree.js
+++ b/deps/v8/tools/splaytree.js
@@ -191,6 +191,17 @@ SplayTree.prototype.findGreatestLessThan = function(key) {
/**
+ * @return {Array<*>} An array containing all the values of tree's nodes paired
+ * with keys.
+ */
+SplayTree.prototype.exportKeysAndValues = function() {
+ var result = [];
+ this.traverse_(function(node) { result.push([node.key, node.value]); });
+ return result;
+};
+
+
+/**
* @return {Array<*>} An array containing all the values of tree's nodes.
*/
SplayTree.prototype.exportValues = function() {