summaryrefslogtreecommitdiff
path: root/deps/v8/tools
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/tools')
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/build_db.js3
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/db.js30
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/exceptions.js26
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/mutators/crossover_mutator.js5
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/source_helpers.js27
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test/test_db.js33
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test/test_regressions.js46
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_data/cross_over_mutator_class_input.js (renamed from deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/cross_over_mutator_class_input.js)0
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_data/db/this/file.js (renamed from deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/this/file.js)0
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load.js4
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_0.js6
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_1.js2
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_self.js2
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/cross_over_mutator_input.js7
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/destructuring/input.js7
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_data/spidermonkey/test/load.js2
-rw-r--r--deps/v8/tools/clusterfuzz/js_fuzzer/test_db.js5
-rw-r--r--deps/v8/tools/clusterfuzz/v8_commands.py3
-rwxr-xr-xdeps/v8/tools/clusterfuzz/v8_foozzie.py18
-rw-r--r--deps/v8/tools/clusterfuzz/v8_smoke_tests.js2
-rwxr-xr-xdeps/v8/tools/cppgc/gen_cmake.py8
-rwxr-xr-xdeps/v8/tools/cppgc/test_cmake.sh2
-rwxr-xr-xdeps/v8/tools/dev/gm.py23
-rw-r--r--deps/v8/tools/gen-postmortem-metadata.py17
-rwxr-xr-xdeps/v8/tools/generate-header-include-checks.py4
-rwxr-xr-xdeps/v8/tools/mb/mb.py64
-rwxr-xr-xdeps/v8/tools/mb/mb_unittest.py22
-rw-r--r--deps/v8/tools/profile.mjs5
-rw-r--r--deps/v8/tools/release/PRESUBMIT.py8
-rwxr-xr-xdeps/v8/tools/release/auto_tag.py2
-rwxr-xr-xdeps/v8/tools/release/check_clusterfuzz.py2
-rw-r--r--deps/v8/tools/release/common_includes.py26
-rwxr-xr-xdeps/v8/tools/release/create_release.py22
-rwxr-xr-xdeps/v8/tools/release/merge_to_branch.py4
-rwxr-xr-xdeps/v8/tools/release/mergeinfo.py10
-rwxr-xr-xdeps/v8/tools/release/roll_merge.py4
-rwxr-xr-xdeps/v8/tools/release/search_related_commits.py2
-rwxr-xr-xdeps/v8/tools/release/test_mergeinfo.py10
-rwxr-xr-xdeps/v8/tools/release/test_scripts.py44
-rwxr-xr-xdeps/v8/tools/release/test_search_related_commits.py38
-rw-r--r--deps/v8/tools/run_perf.py29
-rw-r--r--deps/v8/tools/testrunner/base_runner.py29
-rw-r--r--deps/v8/tools/testrunner/local/android.py6
-rw-r--r--deps/v8/tools/testrunner/local/junit_output.py49
-rw-r--r--deps/v8/tools/testrunner/local/statusfile.py2
-rw-r--r--deps/v8/tools/testrunner/local/variants.py9
-rwxr-xr-xdeps/v8/tools/testrunner/num_fuzzer.py16
-rw-r--r--deps/v8/tools/testrunner/objects/testcase.py9
-rw-r--r--deps/v8/tools/testrunner/outproc/base.py2
-rwxr-xr-xdeps/v8/tools/testrunner/standard_runner.py7
-rw-r--r--deps/v8/tools/testrunner/testproc/expectation.py9
-rw-r--r--deps/v8/tools/testrunner/testproc/fuzzer.py6
-rw-r--r--deps/v8/tools/testrunner/testproc/progress.py40
-rwxr-xr-xdeps/v8/tools/unittests/run_tests_test.py3
-rw-r--r--deps/v8/tools/unittests/testdata/testroot1/v8_build_config.json1
-rw-r--r--deps/v8/tools/unittests/testdata/testroot2/v8_build_config.json1
-rw-r--r--deps/v8/tools/unittests/testdata/testroot3/v8_build_config.json1
-rw-r--r--deps/v8/tools/v8heapconst.py396
-rw-r--r--deps/v8/tools/whitespace.txt2
59 files changed, 504 insertions, 658 deletions
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/build_db.js b/deps/v8/tools/clusterfuzz/js_fuzzer/build_db.js
index 675a322c64..c00d286eb1 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/build_db.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/build_db.js
@@ -34,7 +34,6 @@ function main() {
}
const mutateDb = new db.MutateDbWriter(program.output_dir);
- const expressions = new Set();
const inputDir = path.resolve(program.input_dir);
for (const corpusName of program.args) {
@@ -53,7 +52,7 @@ function main() {
}
try{
- mutateDb.process(source, expressions);
+ mutateDb.process(source);
} catch (e) {
console.log(e);
}
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/db.js b/deps/v8/tools/clusterfuzz/js_fuzzer/db.js
index e96265b068..3fbe438023 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/db.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/db.js
@@ -11,11 +11,13 @@ const fs = require('fs');
const fsPath = require('path');
const babelGenerator = require('@babel/generator').default;
+const babelTemplate = require('@babel/template').default;
const babelTraverse = require('@babel/traverse').default;
const babelTypes = require('@babel/types');
const globals = require('globals');
const random = require('./random.js');
+const sourceHelpers = require('./source_helpers.js');
const globalIdentifiers = new Set(Object.keys(globals.builtin));
const propertyNames = new Set([
@@ -238,6 +240,29 @@ function _markSkipped(path) {
}
}
+/**
+ * Returns true if an expression can be applied or false otherwise.
+ */
+function isValid(expression) {
+ const expressionTemplate = babelTemplate(
+ expression.source,
+ sourceHelpers.BABYLON_REPLACE_VAR_OPTIONS);
+
+ const dependencies = {};
+ if (expression.dependencies) {
+ for (const dependency of expression.dependencies) {
+ dependencies[dependency] = babelTypes.identifier('__v_0');
+ }
+ }
+
+ try {
+ expressionTemplate(dependencies);
+ } catch (e) {
+ return false;
+ }
+ return true;
+}
+
class MutateDbWriter {
constructor(outputDir) {
this.seen = new Set();
@@ -393,6 +418,11 @@ class MutateDbWriter {
return;
}
+ // Test results.
+ if (!isValid(expression)) {
+ return;
+ }
+
// Write results.
let dirPath = fsPath.join(self.outputDir, expression.type);
if (!fs.existsSync(dirPath)) {
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/exceptions.js b/deps/v8/tools/clusterfuzz/js_fuzzer/exceptions.js
index efb1a8a649..4a571d5dd0 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/exceptions.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/exceptions.js
@@ -144,24 +144,6 @@ const DISALLOWED_DIFFERENTIAL_FUZZ_FLAGS = [
'--validate-asm',
];
-const ALLOWED_RUNTIME_FUNCTIONS = new Set([
- // List of allowed runtime functions. Others will be replaced with no-ops.
- 'ArrayBufferDetach',
- 'CompileBaseline',
- 'DeoptimizeFunction',
- 'DeoptimizeNow',
- 'EnableCodeLoggingForTesting',
- 'GetUndetectable',
- 'HeapObjectVerify',
- 'IsBeingInterpreted',
- 'NeverOptimizeFunction',
- 'OptimizeFunctionOnNextCall',
- 'OptimizeOsr',
- 'PrepareFunctionForOptimization',
- 'SetAllocationTimeout',
- 'SimulateNewspaceFull',
-]);
-
const MAX_FILE_SIZE_BYTES = 128 * 1024; // 128KB
const MEDIUM_FILE_SIZE_BYTES = 32 * 1024; // 32KB
@@ -260,13 +242,6 @@ function filterDifferentialFuzzFlags(flags) {
flag => _doesntMatch(DISALLOWED_DIFFERENTIAL_FUZZ_FLAGS, flag));
}
-function isAllowedRuntimeFunction(name) {
- if (process.env.APP_NAME != 'd8') {
- return false;
- }
-
- return ALLOWED_RUNTIME_FUNCTIONS.has(name);
-}
module.exports = {
filterDifferentialFuzzFlags: filterDifferentialFuzzFlags,
@@ -274,7 +249,6 @@ module.exports = {
getGeneratedSoftSkipped: getGeneratedSoftSkipped,
getGeneratedSloppy: getGeneratedSloppy,
getSoftSkipped: getSoftSkipped,
- isAllowedRuntimeFunction: isAllowedRuntimeFunction,
isTestSkippedAbs: isTestSkippedAbs,
isTestSkippedRel: isTestSkippedRel,
isTestSoftSkippedAbs: isTestSoftSkippedAbs,
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/mutators/crossover_mutator.js b/deps/v8/tools/clusterfuzz/js_fuzzer/mutators/crossover_mutator.js
index 7e3c4955ce..491501dc5c 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/mutators/crossover_mutator.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/mutators/crossover_mutator.js
@@ -36,12 +36,9 @@ class CrossOverMutator extends mutator.Mutator {
{canHaveSuper: canHaveSuper});
// Insert the statement.
- var templateOptions = Object.assign({}, sourceHelpers.BABYLON_OPTIONS);
- templateOptions['placeholderPattern'] = /^VAR_[0-9]+$/;
-
let toInsert = babelTemplate(
randomExpression.source,
- templateOptions);
+ sourceHelpers.BABYLON_REPLACE_VAR_OPTIONS);
const dependencies = {};
if (randomExpression.dependencies) {
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/source_helpers.js b/deps/v8/tools/clusterfuzz/js_fuzzer/source_helpers.js
index 264734607a..d7cb142f81 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/source_helpers.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/source_helpers.js
@@ -46,6 +46,9 @@ const BABYLON_OPTIONS = {
],
}
+const BABYLON_REPLACE_VAR_OPTIONS = Object.assign({}, BABYLON_OPTIONS);
+BABYLON_REPLACE_VAR_OPTIONS['placeholderPattern'] = /^VAR_[0-9]+$/;
+
function _isV8OrSpiderMonkeyLoad(path) {
// 'load' and 'loadRelativeToScript' used by V8 and SpiderMonkey.
return (babelTypes.isIdentifier(path.node.callee) &&
@@ -323,7 +326,6 @@ function loadSource(baseDir, relPath, parseStrict=false) {
removeComments(ast);
cleanAsserts(ast);
- neuterDisallowedV8Natives(ast);
annotateWithOriginalPath(ast, relPath);
const flags = loadFlags(data);
@@ -373,28 +375,6 @@ function cleanAsserts(ast) {
}
/**
- * Filter out disallowed V8 runtime functions.
- */
-function neuterDisallowedV8Natives(ast) {
- babelTraverse(ast, {
- CallExpression(path) {
- if (!babelTypes.isIdentifier(path.node.callee) ||
- !path.node.callee.name.startsWith(V8_BUILTIN_PREFIX)) {
- return;
- }
-
- const functionName = path.node.callee.name.substr(
- V8_BUILTIN_PREFIX.length);
-
- if (!exceptions.isAllowedRuntimeFunction(functionName)) {
- path.replaceWith(babelTypes.callExpression(
- babelTypes.identifier('nop'), []));
- }
- }
- });
-}
-
-/**
* Annotate code with original file path.
*/
function annotateWithOriginalPath(ast, relPath) {
@@ -468,6 +448,7 @@ function generateCode(source, dependencies=[]) {
module.exports = {
BABYLON_OPTIONS: BABYLON_OPTIONS,
+ BABYLON_REPLACE_VAR_OPTIONS: BABYLON_REPLACE_VAR_OPTIONS,
generateCode: generateCode,
loadDependencyAbs: loadDependencyAbs,
loadResource: loadResource,
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test/test_db.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test/test_db.js
new file mode 100644
index 0000000000..1b645865b7
--- /dev/null
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test/test_db.js
@@ -0,0 +1,33 @@
+// Copyright 2021 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/**
+ * @fileoverview Test the script building the DB.
+ */
+
+'use strict';
+
+const assert = require('assert');
+const { execSync } = require("child_process");
+const fs = require('fs');
+const path = require('path');
+const tempy = require('tempy');
+
+function buildDb(inputDir, corpusName, outputDir) {
+ execSync(
+ `node build_db.js -i ${inputDir} -o ${outputDir} ${corpusName}`,
+ {stdio: ['pipe']});
+}
+
+describe('DB tests', () => {
+ // Test feeds an expression that does not apply.
+ it('omits erroneous expressions', () => {
+ const outPath = tempy.directory();
+ buildDb('test_data/db', 'this', outPath);
+ const indexFile = path.join(outPath, 'index.json');
+ const indexJSON = JSON.parse(fs.readFileSync(indexFile), 'utf-8');
+ assert.deepEqual(
+ indexJSON, {"statements": [], "superStatements": [], "all": []});
+ });
+});
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test/test_regressions.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test/test_regressions.js
index a753c1c60a..62481f1f40 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test/test_regressions.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test/test_regressions.js
@@ -38,31 +38,6 @@ function execFile(jsFile) {
execSync("node " + jsFile, {stdio: ['pipe']});
}
-function buildDb(inputDir, corpusName, outputDir) {
- execSync(
- `node build_db.js -i ${inputDir} -o ${outputDir} ${corpusName}`,
- {stdio: ['pipe']});
-}
-
-function assertFuzzWithDbThrows(dbInputDir, corpusName, settings, regexp) {
- const outPath = tempy.directory();
- buildDb(dbInputDir, corpusName, outPath);
-
- settings['MUTATE_CROSSOVER_INSERT'] = 1.0;
- assert.throws(
- () => {
- createFuzzTest(
- outPath, settings,
- ['regress/build_db/cross_over_mutator_input.js']);
- },
- err => {
- assert(regexp.test(err));
- return true;
- },
- 'unexpected error',
- );
-}
-
describe('Regression tests', () => {
beforeEach(() => {
helpers.deterministicRandom(sandbox);
@@ -135,25 +110,4 @@ describe('Regression tests', () => {
['regress/numbers/input_indices.js']);
execFile(file);
});
-
- it('create call expression', () => {
- // TODO(machenbach): Build_db extracts a function expression without
- // parentheses, re-parsing this later fails in cross-over mutator.
- assertFuzzWithDbThrows(
- 'test_data/regress/build_db',
- 'destructuring',
- this.settings,
- SYNTAX_ERROR_RE);
- });
-
- it('create assignment expression', () => {
- // TODO(machenbach): Build_db extracts some assignment expressions with a
- // spurious dependency. This leads to an "unknown substitution" error
- // when applying the template.
- assertFuzzWithDbThrows(
- 'test_data/regress/build_db',
- 'this',
- this.settings,
- /.*Unknown substitution.*/);
- });
});
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/cross_over_mutator_class_input.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/cross_over_mutator_class_input.js
index f16fb2fe53..f16fb2fe53 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/cross_over_mutator_class_input.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/cross_over_mutator_class_input.js
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/this/file.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/db/this/file.js
index 115616da0d..115616da0d 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/this/file.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/db/this/file.js
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load.js
index dfa4bc49ba..342c9d87a3 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load.js
@@ -3,5 +3,5 @@
// found in the LICENSE file.
var testLoad = 'test_load';
-d8.file.execute('test_data/mjsunit/test_load_1.js');
-d8.file.execute('test_load_0.js');
+load('test_data/mjsunit/test_load_1.js');
+load('test_load_0.js');
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_0.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_0.js
index 3959a126b4..d0e66e4a9f 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_0.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_0.js
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-d8.file.execute('test_data/mjsunit/test_load_1.js');
-d8.file.execute('test_load_2.js');
-d8.file.execute('test_load_3.js');
+load('test_data/mjsunit/test_load_1.js');
+load('test_load_2.js');
+load('test_load_3.js');
var testLoad0 = 'test_load_0';
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_1.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_1.js
index 8328dd2468..03c9166975 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_1.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_1.js
@@ -2,5 +2,5 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-d8.file.execute('test_load_2.js');
+load('test_load_2.js');
var testLoad1 = 'test_load_1';
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_self.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_self.js
index cd2dfb5c04..31a9f4c507 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_self.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/mjsunit/test_load_self.js
@@ -2,4 +2,4 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-d8.file.execute("test_load_self.js");
+load("test_load_self.js");
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/cross_over_mutator_input.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/cross_over_mutator_input.js
deleted file mode 100644
index 3d7ed65c78..0000000000
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/cross_over_mutator_input.js
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright 2020 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-let x = 2;
-let y = 2;
-Math.pow(x, y);
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/destructuring/input.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/destructuring/input.js
deleted file mode 100644
index fce0782617..0000000000
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/regress/build_db/destructuring/input.js
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright 2020 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-
-let x, y;
-(function([ x = y = 1 ]) {}([]));
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/spidermonkey/test/load.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/spidermonkey/test/load.js
index fa5ddf6086..43a776c476 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/spidermonkey/test/load.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test_data/spidermonkey/test/load.js
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-d8.file.execute('load1.js');
+load('load1.js');
loadRelativeToScript('load2.js');
console.log('load.js');
diff --git a/deps/v8/tools/clusterfuzz/js_fuzzer/test_db.js b/deps/v8/tools/clusterfuzz/js_fuzzer/test_db.js
index ff13c383c5..35f7956b76 100644
--- a/deps/v8/tools/clusterfuzz/js_fuzzer/test_db.js
+++ b/deps/v8/tools/clusterfuzz/js_fuzzer/test_db.js
@@ -29,7 +29,6 @@ function main() {
return;
}
- const loader = new sourceHelpers.V8SourceLoader();
const mutateDb = new db.MutateDb(program.input_dir);
const mutator = new crossOverMutator.CrossOverMutator(
{ MUTATE_CROSSOVER_INSERT: 1.0, testing: true }, mutateDb);
@@ -47,9 +46,9 @@ function main() {
() => { return expression; });
// Use a source that will try to insert one statement, allowing
// super.
- const source = loader.load(
+ const source = sourceHelpers.loadSource(
__dirname,
- 'test_data/regress/build_db/cross_over_mutator_class_input.js');
+ 'test_data/cross_over_mutator_class_input.js');
try {
mutator.mutate(source);
nPass++;
diff --git a/deps/v8/tools/clusterfuzz/v8_commands.py b/deps/v8/tools/clusterfuzz/v8_commands.py
index 924acbedd9..f03161c2c4 100644
--- a/deps/v8/tools/clusterfuzz/v8_commands.py
+++ b/deps/v8/tools/clusterfuzz/v8_commands.py
@@ -110,8 +110,7 @@ class Output(object):
self.pid = pid
def HasCrashed(self):
- return (self.exit_code < 0 and
- self.exit_code != -signal.SIGABRT)
+ return self.exit_code < 0
def Execute(args, cwd, timeout=None):
diff --git a/deps/v8/tools/clusterfuzz/v8_foozzie.py b/deps/v8/tools/clusterfuzz/v8_foozzie.py
index 52b7954093..92f881df83 100755
--- a/deps/v8/tools/clusterfuzz/v8_foozzie.py
+++ b/deps/v8/tools/clusterfuzz/v8_foozzie.py
@@ -78,13 +78,6 @@ CONFIGS = dict(
'--always-opt',
'--force-slow-path',
],
- trusted=[
- '--no-untrusted-code-mitigations',
- ],
- trusted_opt=[
- '--always-opt',
- '--no-untrusted-code-mitigations',
- ],
)
BASELINE_CONFIG = 'ignition'
@@ -173,6 +166,15 @@ KNOWN_FAILURES = {
'CrashTests/5694376231632896/1033966.js': 'flaky',
}
+# Flags that are already crashy during smoke tests should not be used.
+DISALLOWED_FLAGS = [
+ '--gdbjit',
+]
+
+
+def filter_flags(flags):
+ return [flag for flag in flags if flag not in DISALLOWED_FLAGS]
+
def infer_arch(d8):
"""Infer the V8 architecture from the build configuration next to the
@@ -223,7 +225,7 @@ class ExecutionArgumentsConfig(object):
d8 = os.path.join(BASE_PATH, d8)
assert os.path.exists(d8)
- flags = CONFIGS[config] + get('config_extra_flags')
+ flags = CONFIGS[config] + filter_flags(get('config_extra_flags'))
RunOptions = namedtuple('RunOptions', ['arch', 'config', 'd8', 'flags'])
return RunOptions(infer_arch(d8), config, d8, flags)
diff --git a/deps/v8/tools/clusterfuzz/v8_smoke_tests.js b/deps/v8/tools/clusterfuzz/v8_smoke_tests.js
index 39eb2d4e21..2c5fab338d 100644
--- a/deps/v8/tools/clusterfuzz/v8_smoke_tests.js
+++ b/deps/v8/tools/clusterfuzz/v8_smoke_tests.js
@@ -40,6 +40,6 @@ print("Sensitive runtime functions are neutered");
%OptimizeFunctionOnNextCall(foo);
foo();
print(%GetOptimizationStatus(foo));
- const fun = new Function("f", "sync", "return %GetOptimizationStatus(f);");
+ const fun = new Function("f", "return %GetOptimizationStatus(f);");
print(fun(foo));
})();
diff --git a/deps/v8/tools/cppgc/gen_cmake.py b/deps/v8/tools/cppgc/gen_cmake.py
index 0375d0fd3b..1063455b7f 100755
--- a/deps/v8/tools/cppgc/gen_cmake.py
+++ b/deps/v8/tools/cppgc/gen_cmake.py
@@ -244,7 +244,7 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
option(CPPGC_ENABLE_OBJECT_NAMES "Enable object names in cppgc for debug purposes" OFF)
option(CPPGC_ENABLE_CAGED_HEAP "Enable heap reservation of size 4GB, only possible for 64bit archs" OFF)
-option(CPPGC_ENABLE_VERIFY_LIVE_BYTES " Enable verification of live bytes in the marking verifier" OFF)
+option(CPPGC_ENABLE_VERIFY_HEAP "Enables additional heap verification phases and checks" OFF)
option(CPPGC_CHECK_ASSIGNMENTS_IN_PREFINALIZERS " Enable assignment checks for Members/Persistents during prefinalizer invocations" OFF)
option(CPPGC_ENABLE_YOUNG_GENERATION "Enable young generation in cppgc" OFF)
set(CPPGC_TARGET_ARCH "x64" CACHE STRING "Target architecture, possible options: x64, x86, arm, arm64, ppc64, s390x, mipsel, mips64el")
@@ -409,7 +409,7 @@ else{else_cond}
deps=['Threads::Threads'],
desc='Main library'),
'sample':
- Target(name='cppgc_sample',
+ Target(name='cppgc_hello_world',
cmake='add_executable',
deps=['cppgc'],
desc='Example'),
@@ -435,8 +435,8 @@ endif()
if(CPPGC_ENABLE_CAGED_HEAP)
target_compile_definitions({target.name} PRIVATE "-DCPPGC_CAGED_HEAP")
endif()
-if(CPPGC_ENABLE_VERIFY_LIVE_BYTES)
- target_compile_definitions({target.name} PRIVATE "-DCPPGC_VERIFY_LIVE_BYTES")
+if(CPPGC_ENABLE_VERIFY_HEAP)
+ target_compile_definitions({target.name} PRIVATE "-DCPPGC_ENABLE_VERIFY_HEAP")
endif()
if(CPPGC_CHECK_ASSIGNMENTS_IN_PREFINALIZERS)
target_compile_definitions({target.name} PRIVATE "-DCPPGC_CHECK_ASSIGNMENTS_IN_PREFINALIZERS")
diff --git a/deps/v8/tools/cppgc/test_cmake.sh b/deps/v8/tools/cppgc/test_cmake.sh
index 77f551c0b5..55765ddcdd 100755
--- a/deps/v8/tools/cppgc/test_cmake.sh
+++ b/deps/v8/tools/cppgc/test_cmake.sh
@@ -50,7 +50,7 @@ cmake -GNinja $rootdir || fail "Failed to execute cmake"
# Build all targets.
ninja cppgc || fail "Failed to build cppgc"
-ninja cppgc_sample || fail "Failed to build sample"
+ninja cppgc_hello_world || fail "Failed to build sample"
ninja cppgc_unittests || fail "Failed to build unittests"
# Run unittests.
diff --git a/deps/v8/tools/dev/gm.py b/deps/v8/tools/dev/gm.py
index 3d52b70cdf..613065d5b1 100755
--- a/deps/v8/tools/dev/gm.py
+++ b/deps/v8/tools/dev/gm.py
@@ -28,6 +28,7 @@ not contain spaces.
from __future__ import print_function
import errno
import os
+import platform
import re
import subprocess
import sys
@@ -42,7 +43,7 @@ BUILD_TARGETS_ALL = ["all"]
# All arches that this script understands.
ARCHES = ["ia32", "x64", "arm", "arm64", "mipsel", "mips64el", "ppc", "ppc64",
- "riscv64", "s390", "s390x", "android_arm", "android_arm64"]
+ "riscv64", "s390", "s390x", "android_arm", "android_arm64", "loong64"]
# Arches that get built/run when you don't specify any.
DEFAULT_ARCHES = ["ia32", "x64", "arm", "arm64"]
# Modes that this script understands.
@@ -250,9 +251,7 @@ def _Notify(summary, body):
print("{} - {}".format(summary, body))
def _GetMachine():
- # Once we migrate to Python3, this can use os.uname().machine.
- # The index-based access is compatible with all Python versions.
- return os.uname()[4]
+ return platform.machine()
def GetPath(arch, mode):
subdir = "%s.%s" % (arch, mode)
@@ -299,6 +298,10 @@ class Config(object):
cpu = "arm64"
elif self.arch == "arm" and _GetMachine() in ("aarch64", "arm64"):
cpu = "arm"
+ elif self.arch == "loong64" and _GetMachine() == "loongarch64":
+ cpu = "loong64"
+ elif self.arch == "mips64el" and _GetMachine() == "mips64":
+ cpu = "mips64el"
elif "64" in self.arch or self.arch == "s390x":
# Native x64 or simulator build.
cpu = "x64"
@@ -310,7 +313,7 @@ class Config(object):
elif self.arch == "android_arm64":
v8_cpu = "arm64"
elif self.arch in ("arm", "arm64", "mipsel", "mips64el", "ppc", "ppc64",
- "riscv64", "s390", "s390x"):
+ "riscv64", "s390", "s390x", "loong64"):
v8_cpu = self.arch
else:
return []
@@ -322,9 +325,9 @@ class Config(object):
return []
def GetSpecialCompiler(self):
- if _GetMachine() == "aarch64":
- # We have no prebuilt Clang for arm64 on Linux, so use the system Clang
- # instead.
+ if _GetMachine() in ("aarch64", "mips64", "loongarch64"):
+ # We have no prebuilt Clang for arm64, mips64 or loongarch64 on Linux,
+ # so use the system Clang instead.
return ["clang_base_path = \"/usr\"", "clang_use_chrome_plugins = false"]
return []
@@ -363,7 +366,7 @@ class Config(object):
csa_trap = re.compile("Specify option( --csa-trap-on-node=[^ ]*)")
match = csa_trap.search(output)
extra_opt = match.group(1) if match else ""
- cmdline = re.compile("python ../../tools/run.py ./mksnapshot (.*)")
+ cmdline = re.compile("python3 ../../tools/run.py ./mksnapshot (.*)")
orig_cmdline = cmdline.search(output).group(1).strip()
cmdline = PrepareMksnapshotCmdline(orig_cmdline, path) + extra_opt
_Notify("V8 build requires your attention",
@@ -503,7 +506,7 @@ def Main(argv):
return_code = 0
# If we have Goma but it is not running, start it.
if (IS_GOMA_MACHINE and
- _Call("ps -e | grep compiler_proxy > /dev/null", silent=True) != 0):
+ _Call("pgrep -x compiler_proxy > /dev/null", silent=True) != 0):
_Call("%s/goma_ctl.py ensure_start" % GOMADIR)
for c in configs:
return_code += configs[c].Build()
diff --git a/deps/v8/tools/gen-postmortem-metadata.py b/deps/v8/tools/gen-postmortem-metadata.py
index 7b3dcedc92..564c750229 100644
--- a/deps/v8/tools/gen-postmortem-metadata.py
+++ b/deps/v8/tools/gen-postmortem-metadata.py
@@ -91,6 +91,16 @@ consts_misc = [
{ 'name': 'TaggedSize', 'value': 'kTaggedSize' },
{ 'name': 'TaggedSizeLog2', 'value': 'kTaggedSizeLog2' },
+ { 'name': 'CodeKindFieldMask', 'value': 'Code::KindField::kMask' },
+ { 'name': 'CodeKindFieldShift', 'value': 'Code::KindField::kShift' },
+
+ { 'name': 'CodeKindBytecodeHandler',
+ 'value': 'static_cast<int>(CodeKind::BYTECODE_HANDLER)' },
+ { 'name': 'CodeKindInterpretedFunction',
+ 'value': 'static_cast<int>(CodeKind::INTERPRETED_FUNCTION)' },
+ { 'name': 'CodeKindBaseline',
+ 'value': 'static_cast<int>(CodeKind::BASELINE)' },
+
{ 'name': 'OddballFalse', 'value': 'Oddball::kFalse' },
{ 'name': 'OddballTrue', 'value': 'Oddball::kTrue' },
{ 'name': 'OddballTheHole', 'value': 'Oddball::kTheHole' },
@@ -189,6 +199,10 @@ consts_misc = [
'value': 'StandardFrameConstants::kFunctionOffset' },
{ 'name': 'off_fp_args',
'value': 'StandardFrameConstants::kFixedFrameSizeAboveFp' },
+ { 'name': 'off_fp_bytecode_array',
+ 'value': 'UnoptimizedFrameConstants::kBytecodeArrayFromFp' },
+ { 'name': 'off_fp_bytecode_offset',
+ 'value': 'UnoptimizedFrameConstants::kBytecodeOffsetOrFeedbackVectorFromFp' },
{ 'name': 'scopeinfo_idx_nparams',
'value': 'ScopeInfo::kParameterCount' },
@@ -250,6 +264,7 @@ extras_accessors = [
'JSObject, elements, Object, kElementsOffset',
'JSObject, internal_fields, uintptr_t, kHeaderSize',
'FixedArray, data, uintptr_t, kHeaderSize',
+ 'BytecodeArray, data, uintptr_t, kHeaderSize',
'JSArrayBuffer, backing_store, uintptr_t, kBackingStoreOffset',
'JSArrayBuffer, byte_length, size_t, kByteLengthOffset',
'JSArrayBufferView, byte_length, size_t, kByteLengthOffset',
@@ -273,6 +288,7 @@ extras_accessors = [
'UncompiledData, inferred_name, String, kInferredNameOffset',
'UncompiledData, start_position, int32_t, kStartPositionOffset',
'UncompiledData, end_position, int32_t, kEndPositionOffset',
+ 'Script, source, Object, kSourceOffset',
'Script, name, Object, kNameOffset',
'Script, line_ends, Object, kLineEndsOffset',
'SharedFunctionInfo, raw_function_token_offset, int16_t, kFunctionTokenOffsetOffset',
@@ -280,6 +296,7 @@ extras_accessors = [
'SharedFunctionInfo, flags, int, kFlagsOffset',
'SharedFunctionInfo, length, uint16_t, kLengthOffset',
'SlicedString, parent, String, kParentOffset',
+ 'Code, flags, uint32_t, kFlagsOffset',
'Code, instruction_start, uintptr_t, kHeaderSize',
'Code, instruction_size, int, kInstructionSizeOffset',
'String, length, int32_t, kLengthOffset',
diff --git a/deps/v8/tools/generate-header-include-checks.py b/deps/v8/tools/generate-header-include-checks.py
index 250b741068..42c118c9d5 100755
--- a/deps/v8/tools/generate-header-include-checks.py
+++ b/deps/v8/tools/generate-header-include-checks.py
@@ -23,7 +23,7 @@ import re
import sys
# TODO(clemensb): Extend to tests.
-DEFAULT_INPUT = ['base', 'src']
+DEFAULT_INPUT = ['base', 'include', 'src']
DEFAULT_GN_FILE = 'BUILD.gn'
MY_DIR = os.path.dirname(os.path.realpath(__file__))
V8_DIR = os.path.dirname(MY_DIR)
@@ -44,7 +44,7 @@ AUTO_EXCLUDE_PATTERNS = [
# platform-specific headers
'\\b{}\\b'.format(p) for p in
('win', 'win32', 'ia32', 'x64', 'arm', 'arm64', 'mips', 'mips64', 's390',
- 'ppc','riscv64')]
+ 'ppc', 'riscv64', 'loong64')]
args = None
def parse_args():
diff --git a/deps/v8/tools/mb/mb.py b/deps/v8/tools/mb/mb.py
index 42ed60c7ef..671773272a 100755
--- a/deps/v8/tools/mb/mb.py
+++ b/deps/v8/tools/mb/mb.py
@@ -242,8 +242,6 @@ class MetaBuildWrapper(object):
' This can be either a regular path or a '
'GN-style source-relative path like '
'//out/Default.'))
- subp.add_argument('-s', '--swarmed', action='store_true',
- help='Run under swarming with the default dimensions')
subp.add_argument('-d', '--dimension', default=[], action='append', nargs=2,
dest='dimensions', metavar='FOO bar',
help='dimension to filter on')
@@ -375,67 +373,7 @@ class MetaBuildWrapper(object):
if ret:
return ret
- if self.args.swarmed:
- return self._RunUnderSwarming(build_dir, target)
- else:
- return self._RunLocallyIsolated(build_dir, target)
-
- def _RunUnderSwarming(self, build_dir, target):
- # TODO(dpranke): Look up the information for the target in
- # the //testing/buildbot.json file, if possible, so that we
- # can determine the isolate target, command line, and additional
- # swarming parameters, if possible.
- #
- # TODO(dpranke): Also, add support for sharding and merging results.
- # TODO(liviurau): While this seems to not be used in V8 yet, we need to add
- # a switch for internal try-bots, since they need to use 'chrome-swarming'
- cas_instance = 'chromium-swarm'
- dimensions = []
- for k, v in self._DefaultDimensions() + self.args.dimensions:
- dimensions += ['-d', k, v]
-
- archive_json_path = self.ToSrcRelPath(
- '%s/%s.archive.json' % (build_dir, target))
- cmd = [
- self.PathJoin(self.chromium_src_dir, 'tools', 'luci-go',
- self.isolate_exe),
- 'archive',
- '-i',
- self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
- '-cas-instance', cas_instance,
- '-dump-json',
- archive_json_path,
- ]
- ret, _, _ = self.Run(cmd, force_verbose=False)
- if ret:
- return ret
-
- try:
- archive_hashes = json.loads(self.ReadFile(archive_json_path))
- except Exception:
- self.Print(
- 'Failed to read JSON file "%s"' % archive_json_path, file=sys.stderr)
- return 1
- try:
- cas_digest = archive_hashes[target]
- except Exception:
- self.Print(
- 'Cannot find hash for "%s" in "%s", file content: %s' %
- (target, archive_json_path, archive_hashes),
- file=sys.stderr)
- return 1
-
- cmd = [
- self.executable,
- self.PathJoin('tools', 'swarming_client', 'swarming.py'),
- 'run',
- '-digests', cas_digest,
- '-S', 'chromium-swarm.appspot.com',
- ] + dimensions
- if self.args.extra_args:
- cmd += ['--'] + self.args.extra_args
- ret, _, _ = self.Run(cmd, force_verbose=True, buffer_output=False)
- return ret
+ return self._RunLocallyIsolated(build_dir, target)
def _RunLocallyIsolated(self, build_dir, target):
cmd = [
diff --git a/deps/v8/tools/mb/mb_unittest.py b/deps/v8/tools/mb/mb_unittest.py
index 4c67495de4..86d9cd403b 100755
--- a/deps/v8/tools/mb/mb_unittest.py
+++ b/deps/v8/tools/mb/mb_unittest.py
@@ -523,28 +523,6 @@ class UnitTest(unittest.TestCase):
self.check(['run', '-c', 'debug_goma', '//out/Default',
'base_unittests'], files=files, ret=0)
- def test_run_swarmed(self):
- files = {
- '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
- "{'base_unittests': {"
- " 'label': '//base:base_unittests',"
- " 'type': 'raw',"
- " 'args': [],"
- "}}\n"
- ),
- '/fake_src/out/Default/base_unittests.runtime_deps': (
- "base_unittests\n"
- ),
- 'out/Default/base_unittests.archive.json':
- ("{\"base_unittests\":\"fake_hash\"}"),
- }
-
- mbw = self.fake_mbw(files=files)
- self.check(['run', '-s', '-c', 'debug_goma', '//out/Default',
- 'base_unittests'], mbw=mbw, ret=0)
- self.check(['run', '-s', '-c', 'debug_goma', '-d', 'os', 'Win7',
- '//out/Default', 'base_unittests'], mbw=mbw, ret=0)
-
def test_lookup(self):
self.check(['lookup', '-c', 'debug_goma'], ret=0,
out=('\n'
diff --git a/deps/v8/tools/profile.mjs b/deps/v8/tools/profile.mjs
index 4127b34b07..526baa835e 100644
--- a/deps/v8/tools/profile.mjs
+++ b/deps/v8/tools/profile.mjs
@@ -116,8 +116,9 @@ export class Script {
sourcePosition = new SourcePosition(this, line, column,)
this._addSourcePosition(line, column, sourcePosition);
}
- if (entry.entry?.type == "Script") {
- // Mark the source position of scripts, for inline scripts which
+ if (this.sourcePosition === undefined && entry.entry?.type === "Script") {
+ // Mark the source position of scripts, for inline scripts which don't
+ // start at line 1.
this.sourcePosition = sourcePosition;
}
sourcePosition.addEntry(entry);
diff --git a/deps/v8/tools/release/PRESUBMIT.py b/deps/v8/tools/release/PRESUBMIT.py
index 3bcb26d29f..a982b2e153 100644
--- a/deps/v8/tools/release/PRESUBMIT.py
+++ b/deps/v8/tools/release/PRESUBMIT.py
@@ -2,7 +2,13 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-def CheckChangeOnCommit(input_api, output_api):
+def _CommonChecks(input_api, output_api):
tests = input_api.canned_checks.GetUnitTestsInDirectory(
input_api, output_api, '.', files_to_check=['test_scripts.py$'])
return input_api.RunTests(tests)
+
+def CheckChangeOnUpload(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
+
+def CheckChangeOnCommit(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
diff --git a/deps/v8/tools/release/auto_tag.py b/deps/v8/tools/release/auto_tag.py
index fddefed61f..7e77c313d8 100755
--- a/deps/v8/tools/release/auto_tag.py
+++ b/deps/v8/tools/release/auto_tag.py
@@ -23,7 +23,7 @@ class Preparation(Step):
self.CommonPrepare()
self.PrepareBranch()
- self.GitCheckout("master")
+ self.GitCheckout("main")
self.vc.Pull()
diff --git a/deps/v8/tools/release/check_clusterfuzz.py b/deps/v8/tools/release/check_clusterfuzz.py
index 021cd55286..b1b7e084df 100755
--- a/deps/v8/tools/release/check_clusterfuzz.py
+++ b/deps/v8/tools/release/check_clusterfuzz.py
@@ -28,7 +28,7 @@ import urllib2
# Constants to git repos.
BASE_URL = "https://chromium.googlesource.com"
-DEPS_LOG = BASE_URL + "/chromium/src/+log/master/DEPS?format=JSON"
+DEPS_LOG = BASE_URL + "/chromium/src/+log/main/DEPS?format=JSON"
# Constants for retrieving v8 rolls.
CRREV = "https://cr-rev.appspot.com/_ah/api/crrev/v1/commit/%s"
diff --git a/deps/v8/tools/release/common_includes.py b/deps/v8/tools/release/common_includes.py
index 5049cc4534..b61a3e2e27 100644
--- a/deps/v8/tools/release/common_includes.py
+++ b/deps/v8/tools/release/common_includes.py
@@ -214,13 +214,13 @@ class VCInterface(object):
def GetBranches(self):
raise NotImplementedError()
- def MasterBranch(self):
+ def MainBranch(self):
raise NotImplementedError()
def CandidateBranch(self):
raise NotImplementedError()
- def RemoteMasterBranch(self):
+ def RemoteMainBranch(self):
raise NotImplementedError()
def RemoteCandidateBranch(self):
@@ -258,14 +258,14 @@ class GitInterface(VCInterface):
# Remove 'branch-heads/' prefix.
return map(lambda s: s[13:], branches)
- def MasterBranch(self):
- return "master"
+ def MainBranch(self):
+ return "main"
def CandidateBranch(self):
return "candidates"
- def RemoteMasterBranch(self):
- return "origin/master"
+ def RemoteMainBranch(self):
+ return "origin/main"
def RemoteCandidateBranch(self):
return "origin/candidates"
@@ -275,7 +275,7 @@ class GitInterface(VCInterface):
# want.
if name.startswith('refs/'):
return name
- if name in ["candidates", "master"]:
+ if name in ["candidates", "main"]:
return "refs/remotes/origin/%s" % name
try:
# Check if branch is in heads.
@@ -474,8 +474,8 @@ class Step(GitRecipesMixin):
if not self.GitIsWorkdirClean(): # pragma: no cover
self.Die("Workspace is not clean. Please commit or undo your changes.")
- # Checkout master in case the script was left on a work branch.
- self.GitCheckout('origin/master')
+ # Checkout main in case the script was left on a work branch.
+ self.GitCheckout('origin/main')
# Fetch unfetched revisions.
self.vc.Fetch()
@@ -485,7 +485,7 @@ class Step(GitRecipesMixin):
self.DeleteBranch(self._config["BRANCHNAME"])
def CommonCleanup(self):
- self.GitCheckout('origin/master')
+ self.GitCheckout('origin/main')
self.GitDeleteBranch(self._config["BRANCHNAME"])
# Clean up all temporary files.
@@ -605,13 +605,13 @@ class Step(GitRecipesMixin):
if match:
# Legacy: In the old process there's one level of indirection. The
# version is on the candidates branch and points to the real release
- # base on master through the commit message.
+ # base on main through the commit message.
return match.group("git_rev")
match = PUSH_MSG_NEW_RE.match(title)
if match:
- # This is a new-style v8 version branched from master. The commit
+ # This is a new-style v8 version branched from main. The commit
# "latest_hash" is the version-file change. Its parent is the release
- # base on master.
+ # base on main.
return self.GitLog(n=1, format="%H", git_hash="%s^" % latest_hash)
self.Die("Unknown latest release: %s" % latest_hash)
diff --git a/deps/v8/tools/release/create_release.py b/deps/v8/tools/release/create_release.py
index 20a666fb83..d1a066f00b 100755
--- a/deps/v8/tools/release/create_release.py
+++ b/deps/v8/tools/release/create_release.py
@@ -19,7 +19,7 @@ class Preparation(Step):
def RunStep(self):
self.Git("fetch origin +refs/heads/*:refs/heads/*")
- self.GitCheckout("origin/master")
+ self.GitCheckout("origin/main")
self.DeleteBranch("work-branch")
@@ -28,7 +28,7 @@ class PrepareBranchRevision(Step):
def RunStep(self):
self["push_hash"] = (self._options.revision or
- self.GitLog(n=1, format="%H", branch="origin/master"))
+ self.GitLog(n=1, format="%H", branch="origin/main"))
assert self["push_hash"]
print("Release revision %s" % self["push_hash"])
@@ -39,16 +39,16 @@ class IncrementVersion(Step):
def RunStep(self):
latest_version = self.GetLatestVersion()
- # The version file on master can be used to bump up major/minor at
+ # The version file on main can be used to bump up major/minor at
# branch time.
- self.GitCheckoutFile(VERSION_FILE, self.vc.RemoteMasterBranch())
- self.ReadAndPersistVersion("master_")
- master_version = self.ArrayToVersion("master_")
+ self.GitCheckoutFile(VERSION_FILE, self.vc.RemoteMainBranch())
+ self.ReadAndPersistVersion("main_")
+ main_version = self.ArrayToVersion("main_")
- # Use the highest version from master or from tags to determine the new
+ # Use the highest version from main or from tags to determine the new
# version.
authoritative_version = sorted(
- [master_version, latest_version], key=SortingKey)[1]
+ [main_version, latest_version], key=SortingKey)[1]
self.StoreVersion(authoritative_version, "authoritative_")
# Variables prefixed with 'new_' contain the new version numbers for the
@@ -74,7 +74,7 @@ class DetectLastRelease(Step):
MESSAGE = "Detect commit ID of last release base."
def RunStep(self):
- self["last_push_master"] = self.GetLatestReleaseBase()
+ self["last_push_main"] = self.GetLatestReleaseBase()
class DeleteBranchRef(Step):
@@ -107,7 +107,7 @@ class MakeBranch(Step):
MESSAGE = "Create the branch."
def RunStep(self):
- self.Git("reset --hard origin/master")
+ self.Git("reset --hard origin/main")
self.Git("new-branch work-branch --upstream origin/%s" % self["version"])
self.GitCheckoutFile(VERSION_FILE, self["latest_version"])
@@ -186,7 +186,7 @@ class CleanUp(Step):
print("Congratulations, you have successfully created version %s."
% self["version"])
- self.GitCheckout("origin/master")
+ self.GitCheckout("origin/main")
self.DeleteBranch("work-branch")
self.Git("gc")
diff --git a/deps/v8/tools/release/merge_to_branch.py b/deps/v8/tools/release/merge_to_branch.py
index 44f933e541..08a36125f8 100755
--- a/deps/v8/tools/release/merge_to_branch.py
+++ b/deps/v8/tools/release/merge_to_branch.py
@@ -77,7 +77,7 @@ class SearchArchitecturePorts(Step):
# Search for commits which matches the "Port XXX" pattern.
git_hashes = self.GitLog(reverse=True, format="%H",
grep="^[Pp]ort %s" % revision,
- branch=self.vc.RemoteMasterBranch())
+ branch=self.vc.RemoteMainBranch())
for git_hash in git_hashes.splitlines():
revision_title = self.GitLog(n=1, format="%s", git_hash=git_hash)
@@ -198,7 +198,7 @@ class CleanUp(Step):
class MergeToBranch(ScriptsBase):
def _Description(self):
return ("Performs the necessary steps to merge revisions from "
- "master to release branches like 4.5. This script does not "
+ "main to release branches like 4.5. This script does not "
"version the commit. See http://goo.gl/9ke2Vw for more "
"information.")
diff --git a/deps/v8/tools/release/mergeinfo.py b/deps/v8/tools/release/mergeinfo.py
index bed7441f85..8fae8ad05c 100755
--- a/deps/v8/tools/release/mergeinfo.py
+++ b/deps/v8/tools/release/mergeinfo.py
@@ -30,25 +30,25 @@ def describe_commit(git_working_dir, hash_to_search, one_line=False):
def get_followup_commits(git_working_dir, hash_to_search):
cmd = ['log', '--grep=' + hash_to_search, GIT_OPTION_HASH_ONLY,
- 'remotes/origin/master'];
+ 'remotes/origin/main'];
return git_execute(git_working_dir, cmd).strip().splitlines()
def get_merge_commits(git_working_dir, hash_to_search):
- merges = get_related_commits_not_on_master(git_working_dir, hash_to_search)
- false_merges = get_related_commits_not_on_master(
+ merges = get_related_commits_not_on_main(git_working_dir, hash_to_search)
+ false_merges = get_related_commits_not_on_main(
git_working_dir, 'Cr-Branched-From: ' + hash_to_search)
false_merges = set(false_merges)
return ([merge_commit for merge_commit in merges
if merge_commit not in false_merges])
-def get_related_commits_not_on_master(git_working_dir, grep_command):
+def get_related_commits_not_on_main(git_working_dir, grep_command):
commits = git_execute(git_working_dir, ['log',
'--all',
'--grep=' + grep_command,
GIT_OPTION_ONELINE,
'--decorate',
'--not',
- 'remotes/origin/master',
+ 'remotes/origin/main',
GIT_OPTION_HASH_ONLY])
return commits.splitlines()
diff --git a/deps/v8/tools/release/roll_merge.py b/deps/v8/tools/release/roll_merge.py
index 636c882980..d25f95e397 100755
--- a/deps/v8/tools/release/roll_merge.py
+++ b/deps/v8/tools/release/roll_merge.py
@@ -78,7 +78,7 @@ class SearchArchitecturePorts(Step):
# Search for commits which matches the "Port XXX" pattern.
git_hashes = self.GitLog(reverse=True, format="%H",
grep="Port %s" % revision,
- branch=self.vc.RemoteMasterBranch())
+ branch=self.vc.RemoteMainBranch())
for git_hash in git_hashes.splitlines():
revision_title = self.GitLog(n=1, format="%s", git_hash=git_hash)
@@ -226,7 +226,7 @@ class CleanUp(Step):
class RollMerge(ScriptsBase):
def _Description(self):
return ("Performs the necessary steps to merge revisions from "
- "master to other branches, including candidates and roll branches.")
+ "main to other branches, including candidates and roll branches.")
def _PrepareOptions(self, parser):
group = parser.add_mutually_exclusive_group(required=True)
diff --git a/deps/v8/tools/release/search_related_commits.py b/deps/v8/tools/release/search_related_commits.py
index e6e52d2196..48e6ae2592 100755
--- a/deps/v8/tools/release/search_related_commits.py
+++ b/deps/v8/tools/release/search_related_commits.py
@@ -200,7 +200,7 @@ if __name__ == "__main__": # pragma: no cover
"This tool analyzes the commit range between <of> and <until>. "
"It finds commits which belong together e.g. Implement/Revert pairs and "
"Implement/Port/Revert triples. All supplied hashes need to be "
- "from the same branch e.g. master.")
+ "from the same branch e.g. main.")
parser.add_argument("-g", "--git-dir", required=False, default=".",
help="The path to your git working directory.")
parser.add_argument("--verbose", action="store_true",
diff --git a/deps/v8/tools/release/test_mergeinfo.py b/deps/v8/tools/release/test_mergeinfo.py
index f8619bb2fd..9404542ef6 100755
--- a/deps/v8/tools/release/test_mergeinfo.py
+++ b/deps/v8/tools/release/test_mergeinfo.py
@@ -31,7 +31,7 @@ class TestMergeInfo(unittest.TestCase):
return output
def _update_origin(self):
- # Fetch from origin to get/update the origin/master branch
+ # Fetch from origin to get/update the origin/main branch
self._execute_git(['fetch', 'origin'])
def setUp(self):
@@ -54,10 +54,10 @@ class TestMergeInfo(unittest.TestCase):
def _assert_correct_standard_result(
self, result, all_commits, hash_of_first_commit):
- self.assertEqual(len(result), 1, "Master commit not found")
+ self.assertEqual(len(result), 1, "Main commit not found")
self.assertTrue(
result.get(hash_of_first_commit),
- "Master commit is wrong")
+ "Main commit is wrong")
self.assertEqual(
len(result[hash_of_first_commit]),
@@ -124,7 +124,7 @@ class TestMergeInfo(unittest.TestCase):
def testSearchMerges(self):
self._execute_git(['branch', 'test'])
- self._execute_git(['checkout', 'master'])
+ self._execute_git(['checkout', 'main'])
message = 'real initial commit'
self._make_empty_commit(message)
commits = self._get_commits()
@@ -142,7 +142,7 @@ class TestMergeInfo(unittest.TestCase):
message = 'Cr-Branched-From: ' + hash_of_first_commit
hash_of_ignored = self._make_empty_commit(message)
- self._execute_git(['checkout', 'master'])
+ self._execute_git(['checkout', 'main'])
followups = mergeinfo.get_followup_commits(
self.base_dir,
diff --git a/deps/v8/tools/release/test_scripts.py b/deps/v8/tools/release/test_scripts.py
index e8664cb2f1..e8757cf277 100755
--- a/deps/v8/tools/release/test_scripts.py
+++ b/deps/v8/tools/release/test_scripts.py
@@ -300,7 +300,7 @@ class ScriptTest(unittest.TestCase):
def testCommonPrepareDefault(self):
self.Expect([
Cmd("git status -s -uno", ""),
- Cmd("git checkout -f origin/master", ""),
+ Cmd("git checkout -f origin/main", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("Y"),
@@ -312,7 +312,7 @@ class ScriptTest(unittest.TestCase):
def testCommonPrepareNoConfirm(self):
self.Expect([
Cmd("git status -s -uno", ""),
- Cmd("git checkout -f origin/master", ""),
+ Cmd("git checkout -f origin/main", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("n"),
@@ -323,7 +323,7 @@ class ScriptTest(unittest.TestCase):
def testCommonPrepareDeleteBranchFailure(self):
self.Expect([
Cmd("git status -s -uno", ""),
- Cmd("git checkout -f origin/master", ""),
+ Cmd("git checkout -f origin/main", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("Y"),
@@ -395,13 +395,13 @@ class ScriptTest(unittest.TestCase):
test_tag
"""
- # Version as tag: 3.22.4.0. Version on master: 3.22.6.
+ # Version as tag: 3.22.4.0. Version on main: 3.22.6.
# Make sure that the latest version is 3.22.6.0.
def testIncrementVersion(self):
self.Expect([
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
- Cmd("git checkout -f origin/master -- include/v8-version.h",
+ Cmd("git checkout -f origin/main -- include/v8-version.h",
"", cb=lambda: self.WriteFakeVersionFile(3, 22, 6)),
])
@@ -430,7 +430,7 @@ test_tag
def testCreateRelease(self):
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
- # The version file on master has build level 5.
+ # The version file on main has build level 5.
self.WriteFakeVersionFile(build=5)
commit_msg = """Version 3.22.5"""
@@ -449,18 +449,18 @@ test_tag
expectations = [
Cmd("git fetch origin +refs/heads/*:refs/heads/*", ""),
- Cmd("git checkout -f origin/master", "", cb=self.WriteFakeWatchlistsFile),
+ Cmd("git checkout -f origin/main", "", cb=self.WriteFakeWatchlistsFile),
Cmd("git branch", ""),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
- Cmd("git checkout -f origin/master -- include/v8-version.h",
+ Cmd("git checkout -f origin/main -- include/v8-version.h",
"", cb=self.WriteFakeVersionFile),
Cmd("git log -1 --format=%H 3.22.4", "release_hash\n"),
Cmd("git log -1 --format=%s release_hash", "Version 3.22.4\n"),
Cmd("git log -1 --format=%H release_hash^", "abc3\n"),
Cmd("git log --format=%H abc3..push_hash", "rev1\n"),
Cmd("git push origin push_hash:refs/heads/3.22.5", ""),
- Cmd("git reset --hard origin/master", ""),
+ Cmd("git reset --hard origin/main", ""),
Cmd("git new-branch work-branch --upstream origin/3.22.5", ""),
Cmd("git checkout -f 3.22.4 -- include/v8-version.h", "",
cb=self.WriteFakeVersionFile),
@@ -475,8 +475,8 @@ test_tag
"\"Version 3.22.5\" origin/3.22.5", "hsh_to_tag"),
Cmd("git tag 3.22.5 hsh_to_tag", ""),
Cmd("git push origin refs/tags/3.22.5:refs/tags/3.22.5", ""),
- Cmd("git checkout -f origin/master", ""),
- Cmd("git branch", "* master\n work-branch\n"),
+ Cmd("git checkout -f origin/main", ""),
+ Cmd("git branch", "* main\n work-branch\n"),
Cmd("git branch -D work-branch", ""),
Cmd("git gc", ""),
]
@@ -488,7 +488,7 @@ test_tag
CreateRelease(TEST_CONFIG, self).Run(args)
# Note: The version file is on build number 5 again in the end of this test
- # since the git command that merges to master is mocked out.
+ # since the git command that merges to main is mocked out.
# Check for correct content of the WATCHLISTS file
@@ -718,21 +718,21 @@ BUG=123,234,345,456,567,v8:123
self.Expect([
Cmd("git status -s -uno", ""),
- Cmd("git checkout -f origin/master", ""),
+ Cmd("git checkout -f origin/main", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* branch2\n"),
Cmd("git new-branch %s --upstream refs/remotes/origin/candidates" %
TEST_CONFIG["BRANCHNAME"], ""),
Cmd(("git log --format=%H --grep=\"Port ab12345\" "
- "--reverse origin/master"),
+ "--reverse origin/main"),
"ab45678\nab23456"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd(("git log --format=%H --grep=\"Port ab23456\" "
- "--reverse origin/master"),
+ "--reverse origin/main"),
""),
Cmd(("git log --format=%H --grep=\"Port ab34567\" "
- "--reverse origin/master"),
+ "--reverse origin/main"),
"ab56789"),
Cmd("git log -1 --format=%s ab56789", "Title3"),
RL("Y"), # Automatically add corresponding ports (ab34567, ab56789)?
@@ -792,7 +792,7 @@ BUG=123,234,345,456,567,v8:123
"hsh_to_tag"),
Cmd("git tag 3.22.5.1 hsh_to_tag", ""),
Cmd("git push origin refs/tags/3.22.5.1:refs/tags/3.22.5.1", ""),
- Cmd("git checkout -f origin/master", ""),
+ Cmd("git checkout -f origin/main", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
@@ -855,21 +855,21 @@ NOTREECHECKS=true
self.Expect([
Cmd("git status -s -uno", ""),
- Cmd("git checkout -f origin/master", ""),
+ Cmd("git checkout -f origin/main", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* branch2\n"),
Cmd("git new-branch %s --upstream refs/remotes/origin/candidates" %
TEST_CONFIG["BRANCHNAME"], ""),
Cmd(("git log --format=%H --grep=\"^[Pp]ort ab12345\" "
- "--reverse origin/master"),
+ "--reverse origin/main"),
"ab45678\nab23456"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd(("git log --format=%H --grep=\"^[Pp]ort ab23456\" "
- "--reverse origin/master"),
+ "--reverse origin/main"),
""),
Cmd(("git log --format=%H --grep=\"^[Pp]ort ab34567\" "
- "--reverse origin/master"),
+ "--reverse origin/main"),
"ab56789"),
Cmd("git log -1 --format=%s ab56789", "Title3"),
RL("Y"), # Automatically add corresponding ports (ab34567, ab56789)?
@@ -916,7 +916,7 @@ NOTREECHECKS=true
Cmd("git cl presubmit", "Presubmit successfull\n"),
Cmd("git cl land -f --bypass-hooks", "Closing issue\n",
cb=VerifyLand),
- Cmd("git checkout -f origin/master", ""),
+ Cmd("git checkout -f origin/main", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
diff --git a/deps/v8/tools/release/test_search_related_commits.py b/deps/v8/tools/release/test_search_related_commits.py
index cf6123611f..6943915fd6 100755
--- a/deps/v8/tools/release/test_search_related_commits.py
+++ b/deps/v8/tools/release/test_search_related_commits.py
@@ -43,7 +43,7 @@ class TestSearchRelatedCommits(unittest.TestCase):
Review URL: https://codereview.chromium.org/1084243005
- Cr-Commit-Position: refs/heads/master@{#28059}"""
+ Cr-Commit-Position: refs/heads/main@{#28059}"""
self._make_empty_commit(message)
message = """[crankshaft] Do some stuff
@@ -52,7 +52,7 @@ class TestSearchRelatedCommits(unittest.TestCase):
Review URL: https://codereview.chromium.org/1084243007
- Cr-Commit-Position: refs/heads/master@{#28030}"""
+ Cr-Commit-Position: refs/heads/main@{#28030}"""
self._make_empty_commit(message)
@@ -62,10 +62,10 @@ class TestSearchRelatedCommits(unittest.TestCase):
def _assert_correct_standard_result(
self, result, all_commits, hash_of_first_commit):
- self.assertEqual(len(result), 1, "Master commit not found")
+ self.assertEqual(len(result), 1, "Main commit not found")
self.assertTrue(
result.get(hash_of_first_commit),
- "Master commit is wrong")
+ "Main commit is wrong")
self.assertEqual(
len(result[hash_of_first_commit]),
@@ -86,12 +86,12 @@ class TestSearchRelatedCommits(unittest.TestCase):
def testSearchByCommitPosition(self):
message = """Revert of some stuff.
- > Cr-Commit-Position: refs/heads/master@{#28059}
+ > Cr-Commit-Position: refs/heads/main@{#28059}
R=mstarzinger@chromium.org
Review URL: https://codereview.chromium.org/1084243005
- Cr-Commit-Position: refs/heads/master@{#28088}"""
+ Cr-Commit-Position: refs/heads/main@{#28088}"""
self._make_empty_commit(message)
@@ -106,12 +106,12 @@ class TestSearchRelatedCommits(unittest.TestCase):
def testSearchByTitle(self):
message = """Revert of some stuff.
> [turbofan] Sanitize language mode for javascript operators.
- > Cr-Commit-Position: refs/heads/master@{#289}
+ > Cr-Commit-Position: refs/heads/main@{#289}
R=mstarzinger@chromium.org
Review URL: https://codereview.chromium.org/1084243005
- Cr-Commit-Position: refs/heads/master@{#28088}"""
+ Cr-Commit-Position: refs/heads/main@{#28088}"""
self._make_empty_commit(message)
@@ -134,7 +134,7 @@ class TestSearchRelatedCommits(unittest.TestCase):
Review URL: https://codereview.chromium.org/1084243005
- Cr-Commit-Position: refs/heads/master@{#28088}"""
+ Cr-Commit-Position: refs/heads/main@{#28088}"""
self._make_empty_commit(message)
@@ -162,16 +162,16 @@ class TestSearchRelatedCommits(unittest.TestCase):
Review URL: https://codereview.chromium.org/1084243005
- Cr-Commit-Position: refs/heads/master@{#28088}"""
+ Cr-Commit-Position: refs/heads/main@{#28088}"""
self._make_empty_commit(message)
# Related commits happen before and after separator so it is a hit
- commit_pos_of_master = "27088"
- message = """Implement awesome feature: Master commit
+ commit_pos_of_main = "27088"
+ message = """Implement awesome feature: Main commit
Review URL: https://codereview.chromium.org/1084243235
- Cr-Commit-Position: refs/heads/master@{#""" + commit_pos_of_master + "}"
+ Cr-Commit-Position: refs/heads/main@{#""" + commit_pos_of_main + "}"
self._make_empty_commit(message)
# Separator commit
@@ -179,7 +179,7 @@ class TestSearchRelatedCommits(unittest.TestCase):
Review URL: https://codereview.chromium.org/1084243456
- Cr-Commit-Position: refs/heads/master@{#28173}"""
+ Cr-Commit-Position: refs/heads/main@{#28173}"""
self._make_empty_commit(message)
# Filler commit
@@ -187,11 +187,11 @@ class TestSearchRelatedCommits(unittest.TestCase):
self._make_empty_commit(message)
# Related commit after separator: a hit
- message = "Patch r" + commit_pos_of_master +""" done
+ message = "Patch r" + commit_pos_of_main +""" done
Review URL: https://codereview.chromium.org/1084243235
- Cr-Commit-Position: refs/heads/master@{#29567}"""
+ Cr-Commit-Position: refs/heads/main@{#29567}"""
self._make_empty_commit(message)
#Fetch again for an update
@@ -221,12 +221,12 @@ class TestSearchRelatedCommits(unittest.TestCase):
def testPrettyPrint(self):
message = """Revert of some stuff.
> [turbofan] Sanitize language mode for javascript operators.
- > Cr-Commit-Position: refs/heads/master@{#289}
+ > Cr-Commit-Position: refs/heads/main@{#289}
R=mstarzinger@chromium.org
Review URL: https://codereview.chromium.org/1084243005
- Cr-Commit-Position: refs/heads/master@{#28088}"""
+ Cr-Commit-Position: refs/heads/main@{#28088}"""
self._make_empty_commit(message)
@@ -248,7 +248,7 @@ class TestSearchRelatedCommits(unittest.TestCase):
output.append(current_line)
self.assertIs(len(output), 2, "Not exactly two entries written")
- self.assertTrue(output[0].startswith("+"), "Master entry not marked with +")
+ self.assertTrue(output[0].startswith("+"), "Main entry not marked with +")
self.assertTrue(output[1].startswith("| "), "Child entry not marked with |")
def testNothingFound(self):
diff --git a/deps/v8/tools/run_perf.py b/deps/v8/tools/run_perf.py
index cdbbed8176..f2e72261f0 100644
--- a/deps/v8/tools/run_perf.py
+++ b/deps/v8/tools/run_perf.py
@@ -126,6 +126,7 @@ from testrunner.local import command
from testrunner.local import utils
from testrunner.objects.output import Output, NULL_OUTPUT
+# for py2/py3 compatibility
try:
basestring # Python 2
except NameError: # Python 3
@@ -152,7 +153,7 @@ def GeometricMean(values):
The mean is calculated using log to avoid overflow.
"""
- values = map(float, values)
+ values = list(map(float, values))
return math.exp(sum(map(math.log, values)) / len(values))
@@ -224,9 +225,9 @@ class ResultTracker(object):
def ToDict(self):
return {
- 'traces': self.traces.values(),
+ 'traces': list(self.traces.values()),
'errors': self.errors,
- 'runnables': self.runnables.values(),
+ 'runnables': list(self.runnables.values()),
}
def WriteToFile(self, file_name):
@@ -596,9 +597,11 @@ def find_build_directory(base_path, arch):
'Release',
]
possible_paths = [os.path.join(base_path, p) for p in possible_paths]
- actual_paths = filter(is_build, possible_paths)
+ actual_paths = list(filter(is_build, possible_paths))
assert actual_paths, 'No build directory found.'
- assert len(actual_paths) == 1, 'Found ambiguous build directories.'
+ assert len(
+ actual_paths
+ ) == 1, 'Found ambiguous build directories use --binary-override-path.'
return actual_paths[0]
@@ -677,10 +680,10 @@ class DesktopPlatform(Platform):
if args.prioritize:
self.command_prefix += ['-n', '-20']
if args.affinitize != None:
- # schedtool expects a bit pattern when setting affinity, where each
- # bit set to '1' corresponds to a core where the process may run on.
- # First bit corresponds to CPU 0. Since the 'affinitize' parameter is
- # a core number, we need to map to said bit pattern.
+ # schedtool expects a bit pattern when setting affinity, where each
+ # bit set to '1' corresponds to a core where the process may run on.
+ # First bit corresponds to CPU 0. Since the 'affinitize' parameter is
+ # a core number, we need to map to said bit pattern.
cpu = int(args.affinitize)
core = 1 << cpu
self.command_prefix += ['-a', ('0x%x' % core)]
@@ -841,10 +844,10 @@ class CustomMachineConfiguration:
try:
with open('/sys/devices/system/cpu/present', 'r') as f:
indexes = f.readline()
- r = map(int, indexes.split('-'))
+ r = list(map(int, indexes.split('-')))
if len(r) == 1:
- return range(r[0], r[0] + 1)
- return range(r[0], r[1] + 1)
+ return list(range(r[0], r[0] + 1))
+ return list(range(r[0], r[1] + 1))
except Exception:
logging.exception('Failed to retrieve number of CPUs.')
raise
@@ -1034,7 +1037,7 @@ def Main(argv):
# Ensure all arguments have absolute path before we start changing current
# directory.
- args.suite = map(os.path.abspath, args.suite)
+ args.suite = list(map(os.path.abspath, args.suite))
prev_aslr = None
prev_cpu_gov = None
diff --git a/deps/v8/tools/testrunner/base_runner.py b/deps/v8/tools/testrunner/base_runner.py
index cf5854c32c..48d3460e48 100644
--- a/deps/v8/tools/testrunner/base_runner.py
+++ b/deps/v8/tools/testrunner/base_runner.py
@@ -113,7 +113,8 @@ SLOW_ARCHS = [
"mips64el",
"s390",
"s390x",
- "riscv64"
+ "riscv64",
+ "loong64"
]
@@ -191,6 +192,7 @@ class BuildConfig(object):
self.lite_mode = build_config['v8_enable_lite_mode']
self.pointer_compression = build_config['v8_enable_pointer_compression']
self.pointer_compression_shared_cage = build_config['v8_enable_pointer_compression_shared_cage']
+ self.virtual_memory_cage = build_config['v8_enable_virtual_memory_cage']
self.third_party_heap = build_config['v8_enable_third_party_heap']
self.webassembly = build_config['v8_enable_webassembly']
# Export only for MIPS target
@@ -234,6 +236,8 @@ class BuildConfig(object):
detected_options.append('pointer_compression')
if self.pointer_compression_shared_cage:
detected_options.append('pointer_compression_shared_cage')
+ if self.virtual_memory_cage:
+ detected_options.append('virtual_memory_cage')
if self.third_party_heap:
detected_options.append('third_party_heap')
if self.webassembly:
@@ -267,6 +271,7 @@ class BaseTestRunner(object):
self.build_config = None
self.mode_options = None
self.target_os = None
+ self.infra_staging = False
@property
def framework_name(self):
@@ -279,6 +284,7 @@ class BaseTestRunner(object):
try:
parser = self._create_parser()
options, args = self._parse_args(parser, sys_args)
+ self.infra_staging = options.infra_staging
if options.swarming:
# Swarming doesn't print how isolated commands are called. Lets make
# this less cryptic by printing it ourselves.
@@ -348,6 +354,13 @@ class BaseTestRunner(object):
help="How long should fuzzer run")
parser.add_option("--swarming", default=False, action="store_true",
help="Indicates running test driver on swarming.")
+ parser.add_option('--infra-staging', help='Use new test runner features',
+ dest='infra_staging', default=None,
+ action='store_true')
+ parser.add_option('--no-infra-staging',
+ help='Opt out of new test runner features',
+ dest='infra_staging', default=None,
+ action='store_false')
parser.add_option("-j", help="The number of parallel tasks to run",
default=0, type=int)
@@ -370,9 +383,6 @@ class BaseTestRunner(object):
help="Path to a file for storing json results.")
parser.add_option('--slow-tests-cutoff', type="int", default=100,
help='Collect N slowest tests')
- parser.add_option("--junitout", help="File name of the JUnit output")
- parser.add_option("--junittestsuite", default="v8tests",
- help="The testsuite name in the JUnit output file")
parser.add_option("--exit-after-n-failures", type="int", default=100,
help="Exit after the first N failures instead of "
"running all tests. Pass 0 to disable this feature.")
@@ -666,6 +676,9 @@ class BaseTestRunner(object):
self.build_config.arch == 'mipsel':
no_simd_hardware = not simd_mips
+ if self.build_config.arch == 'loong64':
+ no_simd_hardware = True
+
# S390 hosts without VEF1 do not support Simd.
if self.build_config.arch == 's390x' and \
not self.build_config.simulator_run and \
@@ -678,6 +691,10 @@ class BaseTestRunner(object):
utils.GuessPowerProcessorVersion() < 9:
no_simd_hardware = True
+ # riscv64 do not support Simd instructions
+ if self.build_config.arch == 'riscv64':
+ no_simd_hardware = True
+
return {
"arch": self.build_config.arch,
"asan": self.build_config.asan,
@@ -716,6 +733,7 @@ class BaseTestRunner(object):
"lite_mode": self.build_config.lite_mode,
"pointer_compression": self.build_config.pointer_compression,
"pointer_compression_shared_cage": self.build_config.pointer_compression_shared_cage,
+ "virtual_memory_cage": self.build_config.virtual_memory_cage,
}
def _runner_flags(self):
@@ -812,9 +830,6 @@ class BaseTestRunner(object):
def _create_progress_indicators(self, test_count, options):
procs = [PROGRESS_INDICATORS[options.progress]()]
- if options.junitout:
- procs.append(progress.JUnitTestProgressIndicator(options.junitout,
- options.junittestsuite))
if options.json_test_results:
procs.append(progress.JsonTestProgressIndicator(self.framework_name))
diff --git a/deps/v8/tools/testrunner/local/android.py b/deps/v8/tools/testrunner/local/android.py
index ebf04afad6..cfc4e537f5 100644
--- a/deps/v8/tools/testrunner/local/android.py
+++ b/deps/v8/tools/testrunner/local/android.py
@@ -128,12 +128,6 @@ class _Driver(object):
)
self.push_file(
shell_dir,
- 'snapshot_blob_trusted.bin',
- target_dir,
- skip_if_missing=True,
- )
- self.push_file(
- shell_dir,
'icudtl.dat',
target_dir,
skip_if_missing=True,
diff --git a/deps/v8/tools/testrunner/local/junit_output.py b/deps/v8/tools/testrunner/local/junit_output.py
deleted file mode 100644
index 52f31ec422..0000000000
--- a/deps/v8/tools/testrunner/local/junit_output.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2013 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following
-# disclaimer in the documentation and/or other materials provided
-# with the distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived
-# from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-
-import xml.etree.ElementTree as xml
-
-
-class JUnitTestOutput:
- def __init__(self, test_suite_name):
- self.root = xml.Element("testsuite")
- self.root.attrib["name"] = test_suite_name
-
- def HasRunTest(self, test_name, test_cmd, test_duration, test_failure):
- testCaseElement = xml.Element("testcase")
- testCaseElement.attrib["name"] = test_name
- testCaseElement.attrib["cmd"] = test_cmd
- testCaseElement.attrib["time"] = str(round(test_duration, 3))
- if len(test_failure):
- failureElement = xml.Element("failure")
- failureElement.text = test_failure
- testCaseElement.append(failureElement)
- self.root.append(testCaseElement)
-
- def FinishAndWrite(self, f):
- xml.ElementTree(self.root).write(f, "UTF-8")
diff --git a/deps/v8/tools/testrunner/local/statusfile.py b/deps/v8/tools/testrunner/local/statusfile.py
index 48b9286959..de903752bb 100644
--- a/deps/v8/tools/testrunner/local/statusfile.py
+++ b/deps/v8/tools/testrunner/local/statusfile.py
@@ -64,7 +64,7 @@ VARIABLES = {ALWAYS: True}
for var in ["debug", "release", "big", "little", "android",
"arm", "arm64", "ia32", "mips", "mipsel", "mips64", "mips64el",
"x64", "ppc", "ppc64", "s390", "s390x", "macos", "windows",
- "linux", "aix", "r1", "r2", "r3", "r5", "r6", "riscv64"]:
+ "linux", "aix", "r1", "r2", "r3", "r5", "r6", "riscv64", "loong64"]:
VARIABLES[var] = var
# Allow using variants as keywords.
diff --git a/deps/v8/tools/testrunner/local/variants.py b/deps/v8/tools/testrunner/local/variants.py
index ba4eff451a..42bf12d464 100644
--- a/deps/v8/tools/testrunner/local/variants.py
+++ b/deps/v8/tools/testrunner/local/variants.py
@@ -13,11 +13,12 @@ ALL_VARIANT_FLAGS = {
"infra_staging": [[]],
"interpreted_regexp": [["--regexp-interpret-all"]],
"experimental_regexp": [["--default-to-experimental-regexp-engine"]],
- "concurrent_inlining": [["--concurrent-inlining"]],
"jitless": [["--jitless"]],
"sparkplug": [["--sparkplug"]],
"always_sparkplug": [[ "--always-sparkplug", "--sparkplug"]],
"minor_mc": [["--minor-mc"]],
+ "no_concurrent_inlining": [["--no-concurrent-inlining",
+ "--no-stress-concurrent-inlining"]],
"no_lfa": [["--no-lazy-feedback-allocation"]],
# No optimization means disable all optimizations. OptimizeFunctionOnNextCall
# would not force optimization too. It turns into a Nop. Please see
@@ -38,7 +39,6 @@ ALL_VARIANT_FLAGS = {
"stress_snapshot": [["--stress-snapshot"]],
# Trigger stress sampling allocation profiler with sample interval = 2^14
"stress_sampling": [["--stress-sampling-allocation-profiler=16384"]],
- "trusted": [["--no-untrusted-code-mitigations"]],
"no_wasm_traps": [["--no-wasm-trap-handler"]],
"turboprop": [["--turboprop"]],
"turboprop_as_toptier": [["--turboprop-as-toptier", "--turboprop"]],
@@ -58,13 +58,14 @@ INCOMPATIBLE_FLAGS_PER_VARIANT = {
"nooptimization": ["--always-opt"],
"slow_path": ["--no-force-slow-path"],
"stress_concurrent_allocation": ["--single-threaded-gc", "--predictable"],
- "stress_concurrent_inlining": ["--single-threaded", "--predictable", "--turboprop"],
+ "stress_concurrent_inlining": ["--single-threaded", "--predictable",
+ "--turboprop", "--lazy-feedback-allocation"],
"turboprop": ["--stress_concurrent_inlining"],
# The fast API tests initialize an embedder object that never needs to be
# serialized to the snapshot, so we don't have a
# SerializeInternalFieldsCallback for it, so they are incompatible with
# stress_snapshot.
- "stress_snapshot": ["--turbo-fast-api-calls"],
+ "stress_snapshot": ["--expose-fast-api"],
"stress": ["--always-opt", "--no-always-opt",
"--max-inlined-bytecode-size=*",
"--max-inlined-bytecode-size-cumulative=*", "--stress-inline",
diff --git a/deps/v8/tools/testrunner/num_fuzzer.py b/deps/v8/tools/testrunner/num_fuzzer.py
index d5b243ba96..ebf01078fb 100755
--- a/deps/v8/tools/testrunner/num_fuzzer.py
+++ b/deps/v8/tools/testrunner/num_fuzzer.py
@@ -20,7 +20,7 @@ from testrunner.testproc import fuzzer
from testrunner.testproc.base import TestProcProducer
from testrunner.testproc.combiner import CombinerProc
from testrunner.testproc.execution import ExecutionProc
-from testrunner.testproc.expectation import ForgiveTimeoutProc
+from testrunner.testproc.expectation import ExpectationProc
from testrunner.testproc.filter import StatusFileFilterProc, NameFilterProc
from testrunner.testproc.loader import LoadProc
from testrunner.testproc.progress import ResultsTracker
@@ -63,6 +63,11 @@ class NumFuzzer(base_runner.BaseTestRunner):
help="probability [0-10] of adding --random-gc-interval "
"flag to the test")
+ # Stress stack size
+ parser.add_option("--stress-stack-size", default=0, type="int",
+ help="probability [0-10] of adding --stack-size "
+ "flag to the test")
+
# Stress tasks
parser.add_option("--stress-delay-tasks", default=0, type="int",
help="probability [0-10] of adding --stress-delay-tasks "
@@ -119,7 +124,10 @@ class NumFuzzer(base_runner.BaseTestRunner):
def _runner_flags(self):
"""Extra default flags specific to the test runner implementation."""
- return ['--no-abort-on-contradictory-flags']
+ flags = ['--no-abort-on-contradictory-flags', '--testing-d8-test-runner']
+ if self.infra_staging:
+ flags.append('--no-fail')
+ return flags
def _get_statusfile_variables(self, options):
variables = (
@@ -133,6 +141,7 @@ class NumFuzzer(base_runner.BaseTestRunner):
options.stress_compaction,
options.stress_gc,
options.stress_delay_tasks,
+ options.stress_stack_size,
options.stress_thread_pool_size])),
})
return variables
@@ -154,7 +163,7 @@ class NumFuzzer(base_runner.BaseTestRunner):
# TODO(majeski): Improve sharding when combiner is present. Maybe select
# different random seeds for shards instead of splitting tests.
self._create_shard_proc(options),
- ForgiveTimeoutProc(),
+ ExpectationProc(self.infra_staging),
combiner,
self._create_fuzzer(fuzzer_rng, options),
sigproc,
@@ -221,6 +230,7 @@ class NumFuzzer(base_runner.BaseTestRunner):
add('marking', options.stress_marking)
add('scavenge', options.stress_scavenge)
add('gc_interval', options.stress_gc)
+ add('stack', options.stress_stack_size)
add('threads', options.stress_thread_pool_size)
add('delay', options.stress_delay_tasks)
add('deopt', options.stress_deopt, options.stress_deopt_min)
diff --git a/deps/v8/tools/testrunner/objects/testcase.py b/deps/v8/tools/testrunner/objects/testcase.py
index a1f1754b22..e044c20805 100644
--- a/deps/v8/tools/testrunner/objects/testcase.py
+++ b/deps/v8/tools/testrunner/objects/testcase.py
@@ -166,6 +166,15 @@ class TestCase(object):
self._expected_outcomes = (
self.expected_outcomes + [statusfile.TIMEOUT])
+ def allow_pass(self):
+ if self.expected_outcomes == outproc.OUTCOMES_TIMEOUT:
+ self._expected_outcomes = outproc.OUTCOMES_PASS_OR_TIMEOUT
+ elif self.expected_outcomes == outproc.OUTCOMES_FAIL:
+ self._expected_outcomes = outproc.OUTCOMES_FAIL_OR_PASS
+ elif statusfile.PASS not in self.expected_outcomes:
+ self._expected_outcomes = (
+ self.expected_outcomes + [statusfile.PASS])
+
@property
def expected_outcomes(self):
def is_flag(maybe_flag):
diff --git a/deps/v8/tools/testrunner/outproc/base.py b/deps/v8/tools/testrunner/outproc/base.py
index 9646b96c06..74a1d90159 100644
--- a/deps/v8/tools/testrunner/outproc/base.py
+++ b/deps/v8/tools/testrunner/outproc/base.py
@@ -12,8 +12,10 @@ from ..testproc.result import Result
OUTCOMES_PASS = [statusfile.PASS]
OUTCOMES_FAIL = [statusfile.FAIL]
+OUTCOMES_TIMEOUT = [statusfile.TIMEOUT]
OUTCOMES_PASS_OR_TIMEOUT = [statusfile.PASS, statusfile.TIMEOUT]
OUTCOMES_FAIL_OR_TIMEOUT = [statusfile.FAIL, statusfile.TIMEOUT]
+OUTCOMES_FAIL_OR_PASS = [statusfile.FAIL, statusfile.PASS]
class BaseOutProc(object):
diff --git a/deps/v8/tools/testrunner/standard_runner.py b/deps/v8/tools/testrunner/standard_runner.py
index 41352b34e8..50482da70e 100755
--- a/deps/v8/tools/testrunner/standard_runner.py
+++ b/deps/v8/tools/testrunner/standard_runner.py
@@ -132,13 +132,6 @@ class StandardTestRunner(base_runner.BaseTestRunner):
parser.add_option('--cfi-vptr',
help='Run tests with UBSAN cfi_vptr option.',
default=False, action='store_true')
- parser.add_option('--infra-staging', help='Use new test runner features',
- dest='infra_staging', default=None,
- action='store_true')
- parser.add_option('--no-infra-staging',
- help='Opt out of new test runner features',
- dest='infra_staging', default=None,
- action='store_false')
parser.add_option('--no-sorting', '--nosorting',
help='Don\'t sort tests according to duration of last'
' run.',
diff --git a/deps/v8/tools/testrunner/testproc/expectation.py b/deps/v8/tools/testrunner/testproc/expectation.py
index 285a599a74..df7a2c2b1a 100644
--- a/deps/v8/tools/testrunner/testproc/expectation.py
+++ b/deps/v8/tools/testrunner/testproc/expectation.py
@@ -7,14 +7,17 @@ from . import base
from testrunner.local import statusfile
from testrunner.outproc import base as outproc
-class ForgiveTimeoutProc(base.TestProcProducer):
+class ExpectationProc(base.TestProcProducer):
"""Test processor passing tests and results through and forgiving timeouts."""
- def __init__(self):
- super(ForgiveTimeoutProc, self).__init__('no-timeout')
+ def __init__(self, infra_staging):
+ super(ExpectationProc, self).__init__('no-timeout')
+ self.infra_staging = infra_staging
def _next_test(self, test):
subtest = self._create_subtest(test, 'no_timeout')
subtest.allow_timeouts()
+ if self.infra_staging:
+ subtest.allow_pass()
return self._send_test(subtest)
def _result_for(self, test, subtest, result):
diff --git a/deps/v8/tools/testrunner/testproc/fuzzer.py b/deps/v8/tools/testrunner/testproc/fuzzer.py
index 1237da56b2..67250b1c74 100644
--- a/deps/v8/tools/testrunner/testproc/fuzzer.py
+++ b/deps/v8/tools/testrunner/testproc/fuzzer.py
@@ -44,6 +44,7 @@ EXTRA_FLAGS = [
(0.1, '--regexp-tier-up-ticks=100'),
(0.1, '--stress-background-compile'),
(0.1, '--stress-concurrent-inlining'),
+ (0.1, '--stress-flush-code'),
(0.1, '--stress-lazy-source-positions'),
(0.1, '--stress-wasm-code-gc'),
(0.1, '--turbo-instruction-scheduling'),
@@ -265,6 +266,10 @@ class CompactionFuzzer(Fuzzer):
while True:
yield ['--stress-compaction-random']
+class StackSizeFuzzer(Fuzzer):
+ def create_flags_generator(self, rng, test, analysis_value):
+ while True:
+ yield ['--stack-size=%d' % rng.randint(54, 983)]
class TaskDelayFuzzer(Fuzzer):
def create_flags_generator(self, rng, test, analysis_value):
@@ -322,6 +327,7 @@ FUZZERS = {
'gc_interval': (GcIntervalAnalyzer, GcIntervalFuzzer),
'marking': (MarkingAnalyzer, MarkingFuzzer),
'scavenge': (ScavengeAnalyzer, ScavengeFuzzer),
+ 'stack': (None, StackSizeFuzzer),
'threads': (None, ThreadPoolSizeFuzzer),
}
diff --git a/deps/v8/tools/testrunner/testproc/progress.py b/deps/v8/tools/testrunner/testproc/progress.py
index ec97ab226f..c102cddec1 100644
--- a/deps/v8/tools/testrunner/testproc/progress.py
+++ b/deps/v8/tools/testrunner/testproc/progress.py
@@ -15,7 +15,6 @@ import time
from . import base
from . import util
-from ..local import junit_output
def print_failure_header(test, is_flaky=False):
@@ -362,45 +361,6 @@ class MonochromeProgressIndicator(CompactProgressIndicator):
print(("\r" + (" " * last_length) + "\r"), end='')
-class JUnitTestProgressIndicator(ProgressIndicator):
- def __init__(self, junitout, junittestsuite):
- super(JUnitTestProgressIndicator, self).__init__()
- self._requirement = base.DROP_PASS_STDOUT
-
- self.outputter = junit_output.JUnitTestOutput(junittestsuite)
- if junitout:
- self.outfile = open(junitout, "w")
- else:
- self.outfile = sys.stdout
-
- def _on_result_for(self, test, result):
- # TODO(majeski): Support for dummy/grouped results
- fail_text = ""
- output = result.output
- if result.has_unexpected_output:
- stdout = output.stdout.strip()
- if len(stdout):
- fail_text += "stdout:\n%s\n" % stdout
- stderr = output.stderr.strip()
- if len(stderr):
- fail_text += "stderr:\n%s\n" % stderr
- fail_text += "Command: %s" % result.cmd.to_string()
- if output.HasCrashed():
- fail_text += "exit code: %d\n--- CRASHED ---" % output.exit_code
- if output.HasTimedOut():
- fail_text += "--- TIMEOUT ---"
- self.outputter.HasRunTest(
- test_name=str(test),
- test_cmd=result.cmd.to_string(relative=True),
- test_duration=output.duration,
- test_failure=fail_text)
-
- def finished(self):
- self.outputter.FinishAndWrite(self.outfile)
- if self.outfile != sys.stdout:
- self.outfile.close()
-
-
class JsonTestProgressIndicator(ProgressIndicator):
def __init__(self, framework_name):
super(JsonTestProgressIndicator, self).__init__()
diff --git a/deps/v8/tools/unittests/run_tests_test.py b/deps/v8/tools/unittests/run_tests_test.py
index d9e998312e..89acacaaa3 100755
--- a/deps/v8/tools/unittests/run_tests_test.py
+++ b/deps/v8/tools/unittests/run_tests_test.py
@@ -350,7 +350,8 @@ class SystemTest(unittest.TestCase):
v8_enable_i18n_support=False, v8_target_cpu='x86',
v8_enable_verify_csa=False, v8_enable_lite_mode=False,
v8_enable_pointer_compression=False,
- v8_enable_pointer_compression_shared_cage=False)
+ v8_enable_pointer_compression_shared_cage=False,
+ v8_enable_virtual_memory_cage=False)
result = run_tests(
basedir,
'--progress=verbose',
diff --git a/deps/v8/tools/unittests/testdata/testroot1/v8_build_config.json b/deps/v8/tools/unittests/testdata/testroot1/v8_build_config.json
index 04ccbb1600..837f7ef5fc 100644
--- a/deps/v8/tools/unittests/testdata/testroot1/v8_build_config.json
+++ b/deps/v8/tools/unittests/testdata/testroot1/v8_build_config.json
@@ -22,6 +22,7 @@
"v8_enable_lite_mode": false,
"v8_enable_pointer_compression": true,
"v8_enable_pointer_compression_shared_cage": true,
+ "v8_enable_virtual_memory_cage": false,
"v8_control_flow_integrity": false,
"v8_enable_single_generation": false,
"v8_enable_third_party_heap": false,
diff --git a/deps/v8/tools/unittests/testdata/testroot2/v8_build_config.json b/deps/v8/tools/unittests/testdata/testroot2/v8_build_config.json
index b3e36ef6de..fbe348d973 100644
--- a/deps/v8/tools/unittests/testdata/testroot2/v8_build_config.json
+++ b/deps/v8/tools/unittests/testdata/testroot2/v8_build_config.json
@@ -22,6 +22,7 @@
"v8_enable_lite_mode": false,
"v8_enable_pointer_compression": false,
"v8_enable_pointer_compression_shared_cage": false,
+ "v8_enable_virtual_memory_cage": false,
"v8_control_flow_integrity": false,
"v8_enable_single_generation": false,
"v8_enable_third_party_heap": false,
diff --git a/deps/v8/tools/unittests/testdata/testroot3/v8_build_config.json b/deps/v8/tools/unittests/testdata/testroot3/v8_build_config.json
index 04ccbb1600..837f7ef5fc 100644
--- a/deps/v8/tools/unittests/testdata/testroot3/v8_build_config.json
+++ b/deps/v8/tools/unittests/testdata/testroot3/v8_build_config.json
@@ -22,6 +22,7 @@
"v8_enable_lite_mode": false,
"v8_enable_pointer_compression": true,
"v8_enable_pointer_compression_shared_cage": true,
+ "v8_enable_virtual_memory_cage": false,
"v8_control_flow_integrity": false,
"v8_enable_single_generation": false,
"v8_enable_third_party_heap": false,
diff --git a/deps/v8/tools/v8heapconst.py b/deps/v8/tools/v8heapconst.py
index 097b6a7267..5693bf147b 100644
--- a/deps/v8/tools/v8heapconst.py
+++ b/deps/v8/tools/v8heapconst.py
@@ -55,105 +55,104 @@ INSTANCE_TYPES = {
91: "ARRAY_BOILERPLATE_DESCRIPTION_TYPE",
92: "ASM_WASM_DATA_TYPE",
93: "ASYNC_GENERATOR_REQUEST_TYPE",
- 94: "BASELINE_DATA_TYPE",
- 95: "BREAK_POINT_TYPE",
- 96: "BREAK_POINT_INFO_TYPE",
- 97: "CACHED_TEMPLATE_OBJECT_TYPE",
- 98: "CALL_HANDLER_INFO_TYPE",
- 99: "CLASS_POSITIONS_TYPE",
- 100: "DEBUG_INFO_TYPE",
- 101: "ENUM_CACHE_TYPE",
- 102: "FEEDBACK_CELL_TYPE",
- 103: "FUNCTION_TEMPLATE_RARE_DATA_TYPE",
- 104: "INTERCEPTOR_INFO_TYPE",
- 105: "INTERPRETER_DATA_TYPE",
- 106: "MODULE_REQUEST_TYPE",
- 107: "PROMISE_CAPABILITY_TYPE",
- 108: "PROMISE_REACTION_TYPE",
- 109: "PROPERTY_DESCRIPTOR_OBJECT_TYPE",
- 110: "PROTOTYPE_INFO_TYPE",
- 111: "REG_EXP_BOILERPLATE_DESCRIPTION_TYPE",
- 112: "SCRIPT_TYPE",
- 113: "SOURCE_TEXT_MODULE_INFO_ENTRY_TYPE",
- 114: "STACK_FRAME_INFO_TYPE",
- 115: "TEMPLATE_OBJECT_DESCRIPTION_TYPE",
- 116: "TUPLE2_TYPE",
- 117: "WASM_EXCEPTION_TAG_TYPE",
- 118: "WASM_INDIRECT_FUNCTION_TABLE_TYPE",
- 119: "FIXED_ARRAY_TYPE",
- 120: "HASH_TABLE_TYPE",
- 121: "EPHEMERON_HASH_TABLE_TYPE",
- 122: "GLOBAL_DICTIONARY_TYPE",
- 123: "NAME_DICTIONARY_TYPE",
- 124: "NUMBER_DICTIONARY_TYPE",
- 125: "ORDERED_HASH_MAP_TYPE",
- 126: "ORDERED_HASH_SET_TYPE",
- 127: "ORDERED_NAME_DICTIONARY_TYPE",
- 128: "SIMPLE_NUMBER_DICTIONARY_TYPE",
- 129: "CLOSURE_FEEDBACK_CELL_ARRAY_TYPE",
- 130: "OBJECT_BOILERPLATE_DESCRIPTION_TYPE",
- 131: "SCRIPT_CONTEXT_TABLE_TYPE",
- 132: "BYTE_ARRAY_TYPE",
- 133: "BYTECODE_ARRAY_TYPE",
- 134: "FIXED_DOUBLE_ARRAY_TYPE",
- 135: "INTERNAL_CLASS_WITH_SMI_ELEMENTS_TYPE",
- 136: "SLOPPY_ARGUMENTS_ELEMENTS_TYPE",
- 137: "AWAIT_CONTEXT_TYPE",
- 138: "BLOCK_CONTEXT_TYPE",
- 139: "CATCH_CONTEXT_TYPE",
- 140: "DEBUG_EVALUATE_CONTEXT_TYPE",
- 141: "EVAL_CONTEXT_TYPE",
- 142: "FUNCTION_CONTEXT_TYPE",
- 143: "MODULE_CONTEXT_TYPE",
- 144: "NATIVE_CONTEXT_TYPE",
- 145: "SCRIPT_CONTEXT_TYPE",
- 146: "WITH_CONTEXT_TYPE",
- 147: "EXPORTED_SUB_CLASS_BASE_TYPE",
- 148: "EXPORTED_SUB_CLASS_TYPE",
- 149: "EXPORTED_SUB_CLASS2_TYPE",
- 150: "SMALL_ORDERED_HASH_MAP_TYPE",
- 151: "SMALL_ORDERED_HASH_SET_TYPE",
- 152: "SMALL_ORDERED_NAME_DICTIONARY_TYPE",
- 153: "DESCRIPTOR_ARRAY_TYPE",
- 154: "STRONG_DESCRIPTOR_ARRAY_TYPE",
- 155: "SOURCE_TEXT_MODULE_TYPE",
- 156: "SYNTHETIC_MODULE_TYPE",
- 157: "UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE",
- 158: "UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE",
- 159: "WEAK_FIXED_ARRAY_TYPE",
- 160: "TRANSITION_ARRAY_TYPE",
- 161: "CELL_TYPE",
- 162: "CODE_TYPE",
- 163: "CODE_DATA_CONTAINER_TYPE",
- 164: "COVERAGE_INFO_TYPE",
- 165: "EMBEDDER_DATA_ARRAY_TYPE",
- 166: "FEEDBACK_METADATA_TYPE",
- 167: "FEEDBACK_VECTOR_TYPE",
- 168: "FILLER_TYPE",
- 169: "FREE_SPACE_TYPE",
- 170: "INTERNAL_CLASS_TYPE",
- 171: "INTERNAL_CLASS_WITH_STRUCT_ELEMENTS_TYPE",
- 172: "MAP_TYPE",
- 173: "MEGA_DOM_HANDLER_TYPE",
- 174: "ON_HEAP_BASIC_BLOCK_PROFILER_DATA_TYPE",
- 175: "PREPARSE_DATA_TYPE",
- 176: "PROPERTY_ARRAY_TYPE",
- 177: "PROPERTY_CELL_TYPE",
- 178: "SCOPE_INFO_TYPE",
- 179: "SHARED_FUNCTION_INFO_TYPE",
- 180: "SMI_BOX_TYPE",
- 181: "SMI_PAIR_TYPE",
- 182: "SORT_STATE_TYPE",
- 183: "SWISS_NAME_DICTIONARY_TYPE",
- 184: "WEAK_ARRAY_LIST_TYPE",
- 185: "WEAK_CELL_TYPE",
- 186: "WASM_ARRAY_TYPE",
- 187: "WASM_STRUCT_TYPE",
- 188: "JS_PROXY_TYPE",
+ 94: "BREAK_POINT_TYPE",
+ 95: "BREAK_POINT_INFO_TYPE",
+ 96: "CACHED_TEMPLATE_OBJECT_TYPE",
+ 97: "CALL_HANDLER_INFO_TYPE",
+ 98: "CLASS_POSITIONS_TYPE",
+ 99: "DEBUG_INFO_TYPE",
+ 100: "ENUM_CACHE_TYPE",
+ 101: "FEEDBACK_CELL_TYPE",
+ 102: "FUNCTION_TEMPLATE_RARE_DATA_TYPE",
+ 103: "INTERCEPTOR_INFO_TYPE",
+ 104: "INTERPRETER_DATA_TYPE",
+ 105: "MODULE_REQUEST_TYPE",
+ 106: "PROMISE_CAPABILITY_TYPE",
+ 107: "PROMISE_REACTION_TYPE",
+ 108: "PROPERTY_DESCRIPTOR_OBJECT_TYPE",
+ 109: "PROTOTYPE_INFO_TYPE",
+ 110: "REG_EXP_BOILERPLATE_DESCRIPTION_TYPE",
+ 111: "SCRIPT_TYPE",
+ 112: "SOURCE_TEXT_MODULE_INFO_ENTRY_TYPE",
+ 113: "STACK_FRAME_INFO_TYPE",
+ 114: "TEMPLATE_OBJECT_DESCRIPTION_TYPE",
+ 115: "TUPLE2_TYPE",
+ 116: "WASM_EXCEPTION_TAG_TYPE",
+ 117: "WASM_INDIRECT_FUNCTION_TABLE_TYPE",
+ 118: "FIXED_ARRAY_TYPE",
+ 119: "HASH_TABLE_TYPE",
+ 120: "EPHEMERON_HASH_TABLE_TYPE",
+ 121: "GLOBAL_DICTIONARY_TYPE",
+ 122: "NAME_DICTIONARY_TYPE",
+ 123: "NUMBER_DICTIONARY_TYPE",
+ 124: "ORDERED_HASH_MAP_TYPE",
+ 125: "ORDERED_HASH_SET_TYPE",
+ 126: "ORDERED_NAME_DICTIONARY_TYPE",
+ 127: "SIMPLE_NUMBER_DICTIONARY_TYPE",
+ 128: "CLOSURE_FEEDBACK_CELL_ARRAY_TYPE",
+ 129: "OBJECT_BOILERPLATE_DESCRIPTION_TYPE",
+ 130: "SCRIPT_CONTEXT_TABLE_TYPE",
+ 131: "BYTE_ARRAY_TYPE",
+ 132: "BYTECODE_ARRAY_TYPE",
+ 133: "FIXED_DOUBLE_ARRAY_TYPE",
+ 134: "INTERNAL_CLASS_WITH_SMI_ELEMENTS_TYPE",
+ 135: "SLOPPY_ARGUMENTS_ELEMENTS_TYPE",
+ 136: "AWAIT_CONTEXT_TYPE",
+ 137: "BLOCK_CONTEXT_TYPE",
+ 138: "CATCH_CONTEXT_TYPE",
+ 139: "DEBUG_EVALUATE_CONTEXT_TYPE",
+ 140: "EVAL_CONTEXT_TYPE",
+ 141: "FUNCTION_CONTEXT_TYPE",
+ 142: "MODULE_CONTEXT_TYPE",
+ 143: "NATIVE_CONTEXT_TYPE",
+ 144: "SCRIPT_CONTEXT_TYPE",
+ 145: "WITH_CONTEXT_TYPE",
+ 146: "EXPORTED_SUB_CLASS_BASE_TYPE",
+ 147: "EXPORTED_SUB_CLASS_TYPE",
+ 148: "EXPORTED_SUB_CLASS2_TYPE",
+ 149: "SMALL_ORDERED_HASH_MAP_TYPE",
+ 150: "SMALL_ORDERED_HASH_SET_TYPE",
+ 151: "SMALL_ORDERED_NAME_DICTIONARY_TYPE",
+ 152: "DESCRIPTOR_ARRAY_TYPE",
+ 153: "STRONG_DESCRIPTOR_ARRAY_TYPE",
+ 154: "SOURCE_TEXT_MODULE_TYPE",
+ 155: "SYNTHETIC_MODULE_TYPE",
+ 156: "UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE",
+ 157: "UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE",
+ 158: "WEAK_FIXED_ARRAY_TYPE",
+ 159: "TRANSITION_ARRAY_TYPE",
+ 160: "CELL_TYPE",
+ 161: "CODE_TYPE",
+ 162: "CODE_DATA_CONTAINER_TYPE",
+ 163: "COVERAGE_INFO_TYPE",
+ 164: "EMBEDDER_DATA_ARRAY_TYPE",
+ 165: "FEEDBACK_METADATA_TYPE",
+ 166: "FEEDBACK_VECTOR_TYPE",
+ 167: "FILLER_TYPE",
+ 168: "FREE_SPACE_TYPE",
+ 169: "INTERNAL_CLASS_TYPE",
+ 170: "INTERNAL_CLASS_WITH_STRUCT_ELEMENTS_TYPE",
+ 171: "MAP_TYPE",
+ 172: "MEGA_DOM_HANDLER_TYPE",
+ 173: "ON_HEAP_BASIC_BLOCK_PROFILER_DATA_TYPE",
+ 174: "PREPARSE_DATA_TYPE",
+ 175: "PROPERTY_ARRAY_TYPE",
+ 176: "PROPERTY_CELL_TYPE",
+ 177: "SCOPE_INFO_TYPE",
+ 178: "SHARED_FUNCTION_INFO_TYPE",
+ 179: "SMI_BOX_TYPE",
+ 180: "SMI_PAIR_TYPE",
+ 181: "SORT_STATE_TYPE",
+ 182: "SWISS_NAME_DICTIONARY_TYPE",
+ 183: "WEAK_ARRAY_LIST_TYPE",
+ 184: "WEAK_CELL_TYPE",
+ 185: "WASM_ARRAY_TYPE",
+ 186: "WASM_STRUCT_TYPE",
+ 187: "JS_PROXY_TYPE",
1057: "JS_OBJECT_TYPE",
- 189: "JS_GLOBAL_OBJECT_TYPE",
- 190: "JS_GLOBAL_PROXY_TYPE",
- 191: "JS_MODULE_NAMESPACE_TYPE",
+ 188: "JS_GLOBAL_OBJECT_TYPE",
+ 189: "JS_GLOBAL_PROXY_TYPE",
+ 190: "JS_MODULE_NAMESPACE_TYPE",
1040: "JS_SPECIAL_API_OBJECT_TYPE",
1041: "JS_PRIMITIVE_WRAPPER_TYPE",
1058: "JS_API_OBJECT_TYPE",
@@ -236,81 +235,81 @@ INSTANCE_TYPES = {
# List of known V8 maps.
KNOWN_MAPS = {
- ("read_only_space", 0x02119): (172, "MetaMap"),
+ ("read_only_space", 0x02119): (171, "MetaMap"),
("read_only_space", 0x02141): (67, "NullMap"),
- ("read_only_space", 0x02169): (154, "StrongDescriptorArrayMap"),
- ("read_only_space", 0x02191): (159, "WeakFixedArrayMap"),
- ("read_only_space", 0x021d1): (101, "EnumCacheMap"),
- ("read_only_space", 0x02205): (119, "FixedArrayMap"),
+ ("read_only_space", 0x02169): (153, "StrongDescriptorArrayMap"),
+ ("read_only_space", 0x02191): (158, "WeakFixedArrayMap"),
+ ("read_only_space", 0x021d1): (100, "EnumCacheMap"),
+ ("read_only_space", 0x02205): (118, "FixedArrayMap"),
("read_only_space", 0x02251): (8, "OneByteInternalizedStringMap"),
- ("read_only_space", 0x0229d): (169, "FreeSpaceMap"),
- ("read_only_space", 0x022c5): (168, "OnePointerFillerMap"),
- ("read_only_space", 0x022ed): (168, "TwoPointerFillerMap"),
+ ("read_only_space", 0x0229d): (168, "FreeSpaceMap"),
+ ("read_only_space", 0x022c5): (167, "OnePointerFillerMap"),
+ ("read_only_space", 0x022ed): (167, "TwoPointerFillerMap"),
("read_only_space", 0x02315): (67, "UninitializedMap"),
("read_only_space", 0x0238d): (67, "UndefinedMap"),
("read_only_space", 0x023d1): (66, "HeapNumberMap"),
("read_only_space", 0x02405): (67, "TheHoleMap"),
("read_only_space", 0x02465): (67, "BooleanMap"),
- ("read_only_space", 0x02509): (132, "ByteArrayMap"),
- ("read_only_space", 0x02531): (119, "FixedCOWArrayMap"),
- ("read_only_space", 0x02559): (120, "HashTableMap"),
+ ("read_only_space", 0x02509): (131, "ByteArrayMap"),
+ ("read_only_space", 0x02531): (118, "FixedCOWArrayMap"),
+ ("read_only_space", 0x02559): (119, "HashTableMap"),
("read_only_space", 0x02581): (64, "SymbolMap"),
("read_only_space", 0x025a9): (40, "OneByteStringMap"),
- ("read_only_space", 0x025d1): (178, "ScopeInfoMap"),
- ("read_only_space", 0x025f9): (179, "SharedFunctionInfoMap"),
- ("read_only_space", 0x02621): (162, "CodeMap"),
- ("read_only_space", 0x02649): (161, "CellMap"),
- ("read_only_space", 0x02671): (177, "GlobalPropertyCellMap"),
+ ("read_only_space", 0x025d1): (177, "ScopeInfoMap"),
+ ("read_only_space", 0x025f9): (178, "SharedFunctionInfoMap"),
+ ("read_only_space", 0x02621): (161, "CodeMap"),
+ ("read_only_space", 0x02649): (160, "CellMap"),
+ ("read_only_space", 0x02671): (176, "GlobalPropertyCellMap"),
("read_only_space", 0x02699): (70, "ForeignMap"),
- ("read_only_space", 0x026c1): (160, "TransitionArrayMap"),
+ ("read_only_space", 0x026c1): (159, "TransitionArrayMap"),
("read_only_space", 0x026e9): (45, "ThinOneByteStringMap"),
- ("read_only_space", 0x02711): (167, "FeedbackVectorMap"),
+ ("read_only_space", 0x02711): (166, "FeedbackVectorMap"),
("read_only_space", 0x02749): (67, "ArgumentsMarkerMap"),
("read_only_space", 0x027a9): (67, "ExceptionMap"),
("read_only_space", 0x02805): (67, "TerminationExceptionMap"),
("read_only_space", 0x0286d): (67, "OptimizedOutMap"),
("read_only_space", 0x028cd): (67, "StaleRegisterMap"),
- ("read_only_space", 0x0292d): (131, "ScriptContextTableMap"),
- ("read_only_space", 0x02955): (129, "ClosureFeedbackCellArrayMap"),
- ("read_only_space", 0x0297d): (166, "FeedbackMetadataArrayMap"),
- ("read_only_space", 0x029a5): (119, "ArrayListMap"),
+ ("read_only_space", 0x0292d): (130, "ScriptContextTableMap"),
+ ("read_only_space", 0x02955): (128, "ClosureFeedbackCellArrayMap"),
+ ("read_only_space", 0x0297d): (165, "FeedbackMetadataArrayMap"),
+ ("read_only_space", 0x029a5): (118, "ArrayListMap"),
("read_only_space", 0x029cd): (65, "BigIntMap"),
- ("read_only_space", 0x029f5): (130, "ObjectBoilerplateDescriptionMap"),
- ("read_only_space", 0x02a1d): (133, "BytecodeArrayMap"),
- ("read_only_space", 0x02a45): (163, "CodeDataContainerMap"),
- ("read_only_space", 0x02a6d): (164, "CoverageInfoMap"),
- ("read_only_space", 0x02a95): (134, "FixedDoubleArrayMap"),
- ("read_only_space", 0x02abd): (122, "GlobalDictionaryMap"),
- ("read_only_space", 0x02ae5): (102, "ManyClosuresCellMap"),
- ("read_only_space", 0x02b0d): (173, "MegaDomHandlerMap"),
- ("read_only_space", 0x02b35): (119, "ModuleInfoMap"),
- ("read_only_space", 0x02b5d): (123, "NameDictionaryMap"),
- ("read_only_space", 0x02b85): (102, "NoClosuresCellMap"),
- ("read_only_space", 0x02bad): (124, "NumberDictionaryMap"),
- ("read_only_space", 0x02bd5): (102, "OneClosureCellMap"),
- ("read_only_space", 0x02bfd): (125, "OrderedHashMapMap"),
- ("read_only_space", 0x02c25): (126, "OrderedHashSetMap"),
- ("read_only_space", 0x02c4d): (127, "OrderedNameDictionaryMap"),
- ("read_only_space", 0x02c75): (175, "PreparseDataMap"),
- ("read_only_space", 0x02c9d): (176, "PropertyArrayMap"),
- ("read_only_space", 0x02cc5): (98, "SideEffectCallHandlerInfoMap"),
- ("read_only_space", 0x02ced): (98, "SideEffectFreeCallHandlerInfoMap"),
- ("read_only_space", 0x02d15): (98, "NextCallSideEffectFreeCallHandlerInfoMap"),
- ("read_only_space", 0x02d3d): (128, "SimpleNumberDictionaryMap"),
- ("read_only_space", 0x02d65): (150, "SmallOrderedHashMapMap"),
- ("read_only_space", 0x02d8d): (151, "SmallOrderedHashSetMap"),
- ("read_only_space", 0x02db5): (152, "SmallOrderedNameDictionaryMap"),
- ("read_only_space", 0x02ddd): (155, "SourceTextModuleMap"),
- ("read_only_space", 0x02e05): (183, "SwissNameDictionaryMap"),
- ("read_only_space", 0x02e2d): (156, "SyntheticModuleMap"),
+ ("read_only_space", 0x029f5): (129, "ObjectBoilerplateDescriptionMap"),
+ ("read_only_space", 0x02a1d): (132, "BytecodeArrayMap"),
+ ("read_only_space", 0x02a45): (162, "CodeDataContainerMap"),
+ ("read_only_space", 0x02a6d): (163, "CoverageInfoMap"),
+ ("read_only_space", 0x02a95): (133, "FixedDoubleArrayMap"),
+ ("read_only_space", 0x02abd): (121, "GlobalDictionaryMap"),
+ ("read_only_space", 0x02ae5): (101, "ManyClosuresCellMap"),
+ ("read_only_space", 0x02b0d): (172, "MegaDomHandlerMap"),
+ ("read_only_space", 0x02b35): (118, "ModuleInfoMap"),
+ ("read_only_space", 0x02b5d): (122, "NameDictionaryMap"),
+ ("read_only_space", 0x02b85): (101, "NoClosuresCellMap"),
+ ("read_only_space", 0x02bad): (123, "NumberDictionaryMap"),
+ ("read_only_space", 0x02bd5): (101, "OneClosureCellMap"),
+ ("read_only_space", 0x02bfd): (124, "OrderedHashMapMap"),
+ ("read_only_space", 0x02c25): (125, "OrderedHashSetMap"),
+ ("read_only_space", 0x02c4d): (126, "OrderedNameDictionaryMap"),
+ ("read_only_space", 0x02c75): (174, "PreparseDataMap"),
+ ("read_only_space", 0x02c9d): (175, "PropertyArrayMap"),
+ ("read_only_space", 0x02cc5): (97, "SideEffectCallHandlerInfoMap"),
+ ("read_only_space", 0x02ced): (97, "SideEffectFreeCallHandlerInfoMap"),
+ ("read_only_space", 0x02d15): (97, "NextCallSideEffectFreeCallHandlerInfoMap"),
+ ("read_only_space", 0x02d3d): (127, "SimpleNumberDictionaryMap"),
+ ("read_only_space", 0x02d65): (149, "SmallOrderedHashMapMap"),
+ ("read_only_space", 0x02d8d): (150, "SmallOrderedHashSetMap"),
+ ("read_only_space", 0x02db5): (151, "SmallOrderedNameDictionaryMap"),
+ ("read_only_space", 0x02ddd): (154, "SourceTextModuleMap"),
+ ("read_only_space", 0x02e05): (182, "SwissNameDictionaryMap"),
+ ("read_only_space", 0x02e2d): (155, "SyntheticModuleMap"),
("read_only_space", 0x02e55): (72, "WasmCapiFunctionDataMap"),
("read_only_space", 0x02e7d): (73, "WasmExportedFunctionDataMap"),
("read_only_space", 0x02ea5): (74, "WasmJSFunctionDataMap"),
("read_only_space", 0x02ecd): (75, "WasmTypeInfoMap"),
- ("read_only_space", 0x02ef5): (184, "WeakArrayListMap"),
- ("read_only_space", 0x02f1d): (121, "EphemeronHashTableMap"),
- ("read_only_space", 0x02f45): (165, "EmbedderDataArrayMap"),
- ("read_only_space", 0x02f6d): (185, "WeakCellMap"),
+ ("read_only_space", 0x02ef5): (183, "WeakArrayListMap"),
+ ("read_only_space", 0x02f1d): (120, "EphemeronHashTableMap"),
+ ("read_only_space", 0x02f45): (164, "EmbedderDataArrayMap"),
+ ("read_only_space", 0x02f6d): (184, "WeakCellMap"),
("read_only_space", 0x02f95): (32, "StringMap"),
("read_only_space", 0x02fbd): (41, "ConsOneByteStringMap"),
("read_only_space", 0x02fe5): (33, "ConsStringMap"),
@@ -329,7 +328,7 @@ KNOWN_MAPS = {
("read_only_space", 0x031ed): (67, "SelfReferenceMarkerMap"),
("read_only_space", 0x03215): (67, "BasicBlockCountersMarkerMap"),
("read_only_space", 0x03259): (91, "ArrayBoilerplateDescriptionMap"),
- ("read_only_space", 0x03359): (104, "InterceptorInfoMap"),
+ ("read_only_space", 0x03359): (103, "InterceptorInfoMap"),
("read_only_space", 0x05699): (76, "PromiseFulfillReactionJobTaskMap"),
("read_only_space", 0x056c1): (77, "PromiseRejectReactionJobTaskMap"),
("read_only_space", 0x056e9): (78, "CallableTaskMap"),
@@ -344,52 +343,51 @@ KNOWN_MAPS = {
("read_only_space", 0x05851): (89, "AllocationMementoMap"),
("read_only_space", 0x05879): (92, "AsmWasmDataMap"),
("read_only_space", 0x058a1): (93, "AsyncGeneratorRequestMap"),
- ("read_only_space", 0x058c9): (94, "BaselineDataMap"),
- ("read_only_space", 0x058f1): (95, "BreakPointMap"),
- ("read_only_space", 0x05919): (96, "BreakPointInfoMap"),
- ("read_only_space", 0x05941): (97, "CachedTemplateObjectMap"),
- ("read_only_space", 0x05969): (99, "ClassPositionsMap"),
- ("read_only_space", 0x05991): (100, "DebugInfoMap"),
- ("read_only_space", 0x059b9): (103, "FunctionTemplateRareDataMap"),
- ("read_only_space", 0x059e1): (105, "InterpreterDataMap"),
- ("read_only_space", 0x05a09): (106, "ModuleRequestMap"),
- ("read_only_space", 0x05a31): (107, "PromiseCapabilityMap"),
- ("read_only_space", 0x05a59): (108, "PromiseReactionMap"),
- ("read_only_space", 0x05a81): (109, "PropertyDescriptorObjectMap"),
- ("read_only_space", 0x05aa9): (110, "PrototypeInfoMap"),
- ("read_only_space", 0x05ad1): (111, "RegExpBoilerplateDescriptionMap"),
- ("read_only_space", 0x05af9): (112, "ScriptMap"),
- ("read_only_space", 0x05b21): (113, "SourceTextModuleInfoEntryMap"),
- ("read_only_space", 0x05b49): (114, "StackFrameInfoMap"),
- ("read_only_space", 0x05b71): (115, "TemplateObjectDescriptionMap"),
- ("read_only_space", 0x05b99): (116, "Tuple2Map"),
- ("read_only_space", 0x05bc1): (117, "WasmExceptionTagMap"),
- ("read_only_space", 0x05be9): (118, "WasmIndirectFunctionTableMap"),
- ("read_only_space", 0x05c11): (136, "SloppyArgumentsElementsMap"),
- ("read_only_space", 0x05c39): (153, "DescriptorArrayMap"),
- ("read_only_space", 0x05c61): (158, "UncompiledDataWithoutPreparseDataMap"),
- ("read_only_space", 0x05c89): (157, "UncompiledDataWithPreparseDataMap"),
- ("read_only_space", 0x05cb1): (174, "OnHeapBasicBlockProfilerDataMap"),
- ("read_only_space", 0x05cd9): (170, "InternalClassMap"),
- ("read_only_space", 0x05d01): (181, "SmiPairMap"),
- ("read_only_space", 0x05d29): (180, "SmiBoxMap"),
- ("read_only_space", 0x05d51): (147, "ExportedSubClassBaseMap"),
- ("read_only_space", 0x05d79): (148, "ExportedSubClassMap"),
- ("read_only_space", 0x05da1): (68, "AbstractInternalClassSubclass1Map"),
- ("read_only_space", 0x05dc9): (69, "AbstractInternalClassSubclass2Map"),
- ("read_only_space", 0x05df1): (135, "InternalClassWithSmiElementsMap"),
- ("read_only_space", 0x05e19): (171, "InternalClassWithStructElementsMap"),
- ("read_only_space", 0x05e41): (149, "ExportedSubClass2Map"),
- ("read_only_space", 0x05e69): (182, "SortStateMap"),
- ("read_only_space", 0x05e91): (90, "AllocationSiteWithWeakNextMap"),
- ("read_only_space", 0x05eb9): (90, "AllocationSiteWithoutWeakNextMap"),
- ("read_only_space", 0x05ee1): (81, "LoadHandler1Map"),
- ("read_only_space", 0x05f09): (81, "LoadHandler2Map"),
- ("read_only_space", 0x05f31): (81, "LoadHandler3Map"),
- ("read_only_space", 0x05f59): (82, "StoreHandler0Map"),
- ("read_only_space", 0x05f81): (82, "StoreHandler1Map"),
- ("read_only_space", 0x05fa9): (82, "StoreHandler2Map"),
- ("read_only_space", 0x05fd1): (82, "StoreHandler3Map"),
+ ("read_only_space", 0x058c9): (94, "BreakPointMap"),
+ ("read_only_space", 0x058f1): (95, "BreakPointInfoMap"),
+ ("read_only_space", 0x05919): (96, "CachedTemplateObjectMap"),
+ ("read_only_space", 0x05941): (98, "ClassPositionsMap"),
+ ("read_only_space", 0x05969): (99, "DebugInfoMap"),
+ ("read_only_space", 0x05991): (102, "FunctionTemplateRareDataMap"),
+ ("read_only_space", 0x059b9): (104, "InterpreterDataMap"),
+ ("read_only_space", 0x059e1): (105, "ModuleRequestMap"),
+ ("read_only_space", 0x05a09): (106, "PromiseCapabilityMap"),
+ ("read_only_space", 0x05a31): (107, "PromiseReactionMap"),
+ ("read_only_space", 0x05a59): (108, "PropertyDescriptorObjectMap"),
+ ("read_only_space", 0x05a81): (109, "PrototypeInfoMap"),
+ ("read_only_space", 0x05aa9): (110, "RegExpBoilerplateDescriptionMap"),
+ ("read_only_space", 0x05ad1): (111, "ScriptMap"),
+ ("read_only_space", 0x05af9): (112, "SourceTextModuleInfoEntryMap"),
+ ("read_only_space", 0x05b21): (113, "StackFrameInfoMap"),
+ ("read_only_space", 0x05b49): (114, "TemplateObjectDescriptionMap"),
+ ("read_only_space", 0x05b71): (115, "Tuple2Map"),
+ ("read_only_space", 0x05b99): (116, "WasmExceptionTagMap"),
+ ("read_only_space", 0x05bc1): (117, "WasmIndirectFunctionTableMap"),
+ ("read_only_space", 0x05be9): (135, "SloppyArgumentsElementsMap"),
+ ("read_only_space", 0x05c11): (152, "DescriptorArrayMap"),
+ ("read_only_space", 0x05c39): (157, "UncompiledDataWithoutPreparseDataMap"),
+ ("read_only_space", 0x05c61): (156, "UncompiledDataWithPreparseDataMap"),
+ ("read_only_space", 0x05c89): (173, "OnHeapBasicBlockProfilerDataMap"),
+ ("read_only_space", 0x05cb1): (169, "InternalClassMap"),
+ ("read_only_space", 0x05cd9): (180, "SmiPairMap"),
+ ("read_only_space", 0x05d01): (179, "SmiBoxMap"),
+ ("read_only_space", 0x05d29): (146, "ExportedSubClassBaseMap"),
+ ("read_only_space", 0x05d51): (147, "ExportedSubClassMap"),
+ ("read_only_space", 0x05d79): (68, "AbstractInternalClassSubclass1Map"),
+ ("read_only_space", 0x05da1): (69, "AbstractInternalClassSubclass2Map"),
+ ("read_only_space", 0x05dc9): (134, "InternalClassWithSmiElementsMap"),
+ ("read_only_space", 0x05df1): (170, "InternalClassWithStructElementsMap"),
+ ("read_only_space", 0x05e19): (148, "ExportedSubClass2Map"),
+ ("read_only_space", 0x05e41): (181, "SortStateMap"),
+ ("read_only_space", 0x05e69): (90, "AllocationSiteWithWeakNextMap"),
+ ("read_only_space", 0x05e91): (90, "AllocationSiteWithoutWeakNextMap"),
+ ("read_only_space", 0x05eb9): (81, "LoadHandler1Map"),
+ ("read_only_space", 0x05ee1): (81, "LoadHandler2Map"),
+ ("read_only_space", 0x05f09): (81, "LoadHandler3Map"),
+ ("read_only_space", 0x05f31): (82, "StoreHandler0Map"),
+ ("read_only_space", 0x05f59): (82, "StoreHandler1Map"),
+ ("read_only_space", 0x05f81): (82, "StoreHandler2Map"),
+ ("read_only_space", 0x05fa9): (82, "StoreHandler3Map"),
("map_space", 0x02119): (1057, "ExternalMap"),
("map_space", 0x02141): (2113, "JSMessageObjectMap"),
}
diff --git a/deps/v8/tools/whitespace.txt b/deps/v8/tools/whitespace.txt
index 60b58be703..f890e67970 100644
--- a/deps/v8/tools/whitespace.txt
+++ b/deps/v8/tools/whitespace.txt
@@ -7,7 +7,7 @@ A Smi balks into a war and says:
The doubles heard this and started to unbox.
The Smi looked at them when a crazy v8-autoroll account showed up...
The autoroller bought a round of Himbeerbrause. Suddenly.......
-The bartender starts to shake the bottles............................
+The bartender starts to shake the bottles...........................
I can't add trailing whitespaces, so I'm adding this line............
I'm starting to think that just adding trailing whitespaces might not be bad.