diff options
author | Allan Sandfeld Jensen <allan.jensen@qt.io> | 2020-10-12 14:27:29 +0200 |
---|---|---|
committer | Allan Sandfeld Jensen <allan.jensen@qt.io> | 2020-10-13 09:35:20 +0000 |
commit | c30a6232df03e1efbd9f3b226777b07e087a1122 (patch) | |
tree | e992f45784689f373bcc38d1b79a239ebe17ee23 /chromium/build/android/gyp/util | |
parent | 7b5b123ac58f58ffde0f4f6e488bcd09aa4decd3 (diff) | |
download | qtwebengine-chromium-85-based.tar.gz |
BASELINE: Update Chromium to 85.0.4183.14085-based
Change-Id: Iaa42f4680837c57725b1344f108c0196741f6057
Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/build/android/gyp/util')
-rw-r--r-- | chromium/build/android/gyp/util/build_utils.py | 47 | ||||
-rw-r--r-- | chromium/build/android/gyp/util/md5_check.py | 34 | ||||
-rwxr-xr-x | chromium/build/android/gyp/util/md5_check_test.py | 17 | ||||
-rw-r--r-- | chromium/build/android/gyp/util/parallel.py | 214 | ||||
-rw-r--r-- | chromium/build/android/gyp/util/resource_utils.py | 1 |
5 files changed, 255 insertions, 58 deletions
diff --git a/chromium/build/android/gyp/util/build_utils.py b/chromium/build/android/gyp/util/build_utils.py index bc15fbb61f2..067f62e4b9b 100644 --- a/chromium/build/android/gyp/util/build_utils.py +++ b/chromium/build/android/gyp/util/build_utils.py @@ -541,49 +541,6 @@ def GetSortedTransitiveDependencies(top, deps_func): return list(deps_map) -def ComputePythonDependencies(): - """Gets the paths of imported non-system python modules. - - A path is assumed to be a "system" import if it is outside of chromium's - src/. The paths will be relative to the current directory. - """ - _ForceLazyModulesToLoad() - module_paths = (m.__file__ for m in sys.modules.values() - if m is not None and hasattr(m, '__file__')) - abs_module_paths = map(os.path.abspath, module_paths) - - abs_dir_source_root = os.path.abspath(DIR_SOURCE_ROOT) - non_system_module_paths = [ - p for p in abs_module_paths if p.startswith(abs_dir_source_root) - ] - - def ConvertPycToPy(s): - if s.endswith('.pyc'): - return s[:-1] - return s - - non_system_module_paths = map(ConvertPycToPy, non_system_module_paths) - non_system_module_paths = map(os.path.relpath, non_system_module_paths) - return sorted(set(non_system_module_paths)) - - -def _ForceLazyModulesToLoad(): - """Forces any lazily imported modules to fully load themselves. - - Inspecting the modules' __file__ attribute causes lazily imported modules - (e.g. from email) to get fully imported and update sys.modules. Iterate - over the values until sys.modules stabilizes so that no modules are missed. - """ - while True: - num_modules_before = len(sys.modules.keys()) - for m in sys.modules.values(): - if m is not None and hasattr(m, '__file__'): - _ = m.__file__ - num_modules_after = len(sys.modules.keys()) - if num_modules_before == num_modules_after: - break - - def InitLogging(enabling_env): logging.basicConfig( level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING, @@ -611,12 +568,10 @@ def AddDepfileOption(parser): help='Path to depfile (refer to `gn help depfile`)') -def WriteDepfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True): +def WriteDepfile(depfile_path, first_gn_output, inputs=None): assert depfile_path != first_gn_output # http://crbug.com/646165 assert not isinstance(inputs, string_types) # Easy mistake to make inputs = inputs or [] - if add_pydeps: - inputs = ComputePythonDependencies() + inputs MakeDirectory(os.path.dirname(depfile_path)) # Ninja does not support multiple outputs in depfiles. with open(depfile_path, 'w') as depfile: diff --git a/chromium/build/android/gyp/util/md5_check.py b/chromium/build/android/gyp/util/md5_check.py index a8a815e7e4f..2830d25c969 100644 --- a/chromium/build/android/gyp/util/md5_check.py +++ b/chromium/build/android/gyp/util/md5_check.py @@ -14,6 +14,9 @@ import zipfile from util import build_utils +sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build')) +import print_python_deps + # When set and a difference is detected, a diff of what changed is printed. PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0)) @@ -48,7 +51,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5, input_strings = list(input_strings or []) output_paths = list(output_paths or []) - input_paths += build_utils.ComputePythonDependencies() + input_paths += print_python_deps.ComputePythonDependencies() CallAndRecordIfStale( on_stale_md5, @@ -64,8 +67,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5, # on bots that build with & without patch, and the patch changes the depfile # location. if hasattr(options, 'depfile') and options.depfile: - build_utils.WriteDepfile( - options.depfile, output_paths[0], depfile_deps, add_pydeps=False) + build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps) def CallAndRecordIfStale(function, @@ -125,15 +127,21 @@ def CallAndRecordIfStale(function, old_metadata = None force = force or _FORCE_REBUILD missing_outputs = [x for x in output_paths if force or not os.path.exists(x)] + too_new = [] # When outputs are missing, don't bother gathering change information. if not missing_outputs and os.path.exists(record_path): - with open(record_path, 'r') as jsonfile: - try: - old_metadata = _Metadata.FromFile(jsonfile) - except: # pylint: disable=bare-except - pass # Not yet using new file format. - - changes = Changes(old_metadata, new_metadata, force, missing_outputs) + record_mtime = os.path.getmtime(record_path) + # Outputs newer than the change information must have been modified outside + # of the build, and should be considered stale. + too_new = [x for x in output_paths if os.path.getmtime(x) > record_mtime] + if not too_new: + with open(record_path, 'r') as jsonfile: + try: + old_metadata = _Metadata.FromFile(jsonfile) + except: # pylint: disable=bare-except + pass # Not yet using new file format. + + changes = Changes(old_metadata, new_metadata, force, missing_outputs, too_new) if not changes.HasChanges(): return @@ -153,11 +161,13 @@ def CallAndRecordIfStale(function, class Changes(object): """Provides and API for querying what changed between runs.""" - def __init__(self, old_metadata, new_metadata, force, missing_outputs): + def __init__(self, old_metadata, new_metadata, force, missing_outputs, + too_new): self.old_metadata = old_metadata self.new_metadata = new_metadata self.force = force self.missing_outputs = missing_outputs + self.too_new = too_new def _GetOldTag(self, path, subpath=None): return self.old_metadata and self.old_metadata.GetTag(path, subpath) @@ -254,6 +264,8 @@ class Changes(object): return 'force=True' elif self.missing_outputs: return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs) + elif self.too_new: + return 'Outputs newer than stamp file:\n ' + '\n '.join(self.too_new) elif self.old_metadata is None: return 'Previous stamp file not found.' diff --git a/chromium/build/android/gyp/util/md5_check_test.py b/chromium/build/android/gyp/util/md5_check_test.py index 9b3b9039f39..2169320ee54 100755 --- a/chromium/build/android/gyp/util/md5_check_test.py +++ b/chromium/build/android/gyp/util/md5_check_test.py @@ -47,13 +47,21 @@ class TestMd5Check(unittest.TestCase): outputs_missing=False, expected_changes=None, added_or_modified_only=None, - track_subentries=False): + track_subentries=False, + output_newer_than_record=False): output_paths = None if outputs_specified: output_file1 = tempfile.NamedTemporaryFile() if outputs_missing: output_file1.close() # Gets deleted on close(). output_paths = [output_file1.name] + if output_newer_than_record: + output_mtime = os.path.getmtime(output_file1.name) + os.utime(record_path.name, (output_mtime - 1, output_mtime - 1)) + else: + # touch the record file so it doesn't look like it's older that + # the output we've just created + os.utime(record_path.name, None) self.called = False self.changes = None @@ -97,6 +105,13 @@ class TestMd5Check(unittest.TestCase): outputs_specified=True, outputs_missing=True, expected_changes='Outputs do not exist:*', added_or_modified_only=False) + CheckCallAndRecord(True, + 'should call when output is newer than record', + expected_changes='Outputs newer than stamp file:*', + outputs_specified=True, + outputs_missing=False, + added_or_modified_only=False, + output_newer_than_record=True) CheckCallAndRecord(True, force=True, message='should call when forced', expected_changes='force=True', added_or_modified_only=False) diff --git a/chromium/build/android/gyp/util/parallel.py b/chromium/build/android/gyp/util/parallel.py new file mode 100644 index 00000000000..082ad97225e --- /dev/null +++ b/chromium/build/android/gyp/util/parallel.py @@ -0,0 +1,214 @@ +# Copyright 2020 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Helpers related to multiprocessing. + +Based on: //tools/binary_size/libsupersize/parallel.py +""" + +import atexit +import logging +import multiprocessing +import os +import sys +import threading +import traceback + +DISABLE_ASYNC = os.environ.get('DISABLE_ASYNC') == '1' +if DISABLE_ASYNC: + logging.warning('Running in synchronous mode.') + +_all_pools = None +_is_child_process = False +_silence_exceptions = False + +# Used to pass parameters to forked processes without pickling. +_fork_params = None +_fork_kwargs = None + + +class _ImmediateResult(object): + def __init__(self, value): + self._value = value + + def get(self): + return self._value + + def wait(self): + pass + + def ready(self): + return True + + def successful(self): + return True + + +class _ExceptionWrapper(object): + """Used to marshal exception messages back to main process.""" + + def __init__(self, msg, exception_type=None): + self.msg = msg + self.exception_type = exception_type + + def MaybeThrow(self): + if self.exception_type: + raise getattr(__builtins__, + self.exception_type)('Originally caused by: ' + self.msg) + + +class _FuncWrapper(object): + """Runs on the fork()'ed side to catch exceptions and spread *args.""" + + def __init__(self, func): + global _is_child_process + _is_child_process = True + self._func = func + + def __call__(self, index, _=None): + try: + return self._func(*_fork_params[index], **_fork_kwargs) + except Exception as e: + # Only keep the exception type for builtin exception types or else risk + # further marshalling exceptions. + exception_type = None + if hasattr(__builtins__, type(e).__name__): + exception_type = type(e).__name__ + # multiprocessing is supposed to catch and return exceptions automatically + # but it doesn't seem to work properly :(. + return _ExceptionWrapper(traceback.format_exc(), exception_type) + except: # pylint: disable=bare-except + return _ExceptionWrapper(traceback.format_exc()) + + +class _WrappedResult(object): + """Allows for host-side logic to be run after child process has terminated. + + * Unregisters associated pool _all_pools. + * Raises exception caught by _FuncWrapper. + """ + + def __init__(self, result, pool=None): + self._result = result + self._pool = pool + + def get(self): + self.wait() + value = self._result.get() + _CheckForException(value) + return value + + def wait(self): + self._result.wait() + if self._pool: + _all_pools.remove(self._pool) + self._pool = None + + def ready(self): + return self._result.ready() + + def successful(self): + return self._result.successful() + + +def _TerminatePools(): + """Calls .terminate() on all active process pools. + + Not supposed to be necessary according to the docs, but seems to be required + when child process throws an exception or Ctrl-C is hit. + """ + global _silence_exceptions + _silence_exceptions = True + # Child processes cannot have pools, but atexit runs this function because + # it was registered before fork()ing. + if _is_child_process: + return + + def close_pool(pool): + try: + pool.terminate() + except: # pylint: disable=bare-except + pass + + for i, pool in enumerate(_all_pools): + # Without calling terminate() on a separate thread, the call can block + # forever. + thread = threading.Thread(name='Pool-Terminate-{}'.format(i), + target=close_pool, + args=(pool, )) + thread.daemon = True + thread.start() + + +def _CheckForException(value): + if isinstance(value, _ExceptionWrapper): + global _silence_exceptions + if not _silence_exceptions: + value.MaybeThrow() + _silence_exceptions = True + logging.error('Subprocess raised an exception:\n%s', value.msg) + sys.exit(1) + + +def _MakeProcessPool(job_params, **job_kwargs): + global _all_pools + global _fork_params + global _fork_kwargs + assert _fork_params is None + assert _fork_kwargs is None + pool_size = min(len(job_params), multiprocessing.cpu_count()) + _fork_params = job_params + _fork_kwargs = job_kwargs + ret = multiprocessing.Pool(pool_size) + _fork_params = None + _fork_kwargs = None + if _all_pools is None: + _all_pools = [] + atexit.register(_TerminatePools) + _all_pools.append(ret) + return ret + + +def ForkAndCall(func, args): + """Runs |func| in a fork'ed process. + + Returns: + A Result object (call .get() to get the return value) + """ + if DISABLE_ASYNC: + pool = None + result = _ImmediateResult(func(*args)) + else: + pool = _MakeProcessPool([args]) # Omit |kwargs|. + result = pool.apply_async(_FuncWrapper(func), (0, )) + pool.close() + return _WrappedResult(result, pool=pool) + + +def BulkForkAndCall(func, arg_tuples, **kwargs): + """Calls |func| in a fork'ed process for each set of args within |arg_tuples|. + + Args: + kwargs: Common keyword arguments to be passed to |func|. + + Yields the return values in order. + """ + arg_tuples = list(arg_tuples) + if not arg_tuples: + return + + if DISABLE_ASYNC: + for args in arg_tuples: + yield func(*args, **kwargs) + return + + pool = _MakeProcessPool(arg_tuples, **kwargs) + wrapped_func = _FuncWrapper(func) + try: + for result in pool.imap(wrapped_func, xrange(len(arg_tuples))): + _CheckForException(result) + yield result + finally: + pool.close() + pool.join() + _all_pools.remove(pool) diff --git a/chromium/build/android/gyp/util/resource_utils.py b/chromium/build/android/gyp/util/resource_utils.py index 1b92c4fb49e..7b16949f9d3 100644 --- a/chromium/build/android/gyp/util/resource_utils.py +++ b/chromium/build/android/gyp/util/resource_utils.py @@ -57,6 +57,7 @@ AAPT_IGNORE_PATTERN = ':'.join([ '*~', # Some editors create these as temp files. '.*', # Never makes sense to include dot(files/dirs). '*.d.stamp', # Ignore stamp files + '*.backup', # Some tools create temporary backup files. ]) MULTIPLE_RES_MAGIC_STRING = b'magic' |