summaryrefslogtreecommitdiff
path: root/chromium/tools/binary_size
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-12 14:27:29 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-13 09:35:20 +0000
commitc30a6232df03e1efbd9f3b226777b07e087a1122 (patch)
treee992f45784689f373bcc38d1b79a239ebe17ee23 /chromium/tools/binary_size
parent7b5b123ac58f58ffde0f4f6e488bcd09aa4decd3 (diff)
downloadqtwebengine-chromium-85-based.tar.gz
BASELINE: Update Chromium to 85.0.4183.14085-based
Change-Id: Iaa42f4680837c57725b1344f108c0196741f6057 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/tools/binary_size')
-rw-r--r--chromium/tools/binary_size/.style.yapf2
-rw-r--r--chromium/tools/binary_size/README.md3
-rwxr-xr-xchromium/tools/binary_size/diagnose_bloat.py6
-rwxr-xr-xchromium/tools/binary_size/generate_commit_size_analysis.py116
-rwxr-xr-xchromium/tools/binary_size/generate_milestone_reports.py5
-rw-r--r--chromium/tools/binary_size/libsupersize/archive.py461
-rw-r--r--chromium/tools/binary_size/libsupersize/canned_queries.py5
-rw-r--r--chromium/tools/binary_size/libsupersize/caspian/lens.cc2
-rw-r--r--chromium/tools/binary_size/libsupersize/console.py20
-rw-r--r--chromium/tools/binary_size/libsupersize/describe.py295
-rw-r--r--chromium/tools/binary_size/libsupersize/diff.py64
-rwxr-xr-xchromium/tools/binary_size/libsupersize/diff_test.py80
-rw-r--r--chromium/tools/binary_size/libsupersize/file_format.py148
-rw-r--r--chromium/tools/binary_size/libsupersize/html_report.py12
-rwxr-xr-xchromium/tools/binary_size/libsupersize/integration_test.py120
-rw-r--r--chromium/tools/binary_size/libsupersize/models.py307
-rw-r--r--chromium/tools/binary_size/libsupersize/parallel.py4
17 files changed, 1202 insertions, 448 deletions
diff --git a/chromium/tools/binary_size/.style.yapf b/chromium/tools/binary_size/.style.yapf
index 5e055c89722..b4ebbe24670 100644
--- a/chromium/tools/binary_size/.style.yapf
+++ b/chromium/tools/binary_size/.style.yapf
@@ -1,4 +1,6 @@
[style]
based_on_style = pep8
+
+# New directories should use a .style.yapf that does not include the following:
column_limit = 80
indent_width = 2
diff --git a/chromium/tools/binary_size/README.md b/chromium/tools/binary_size/README.md
index dda94ba0979..1866ed19257 100644
--- a/chromium/tools/binary_size/README.md
+++ b/chromium/tools/binary_size/README.md
@@ -342,6 +342,9 @@ tools/binary_size/diagnose_bloat.py HEAD --gn-args="is_official_build=false" -v
# Build and diff all contiguous revs in range BEFORE_REV..AFTER_REV for src/v8.
tools/binary_size/diagnose_bloat.py AFTER_REV --reference-rev BEFORE_REV --subrepo v8 --all -v
+# Build and diff system_webview_apk HEAD^ and HEAD with arsc obfucstion disabled.
+tools/binary_size/diagnose_bloat.py HEAD --target system_webview_apk --gn-args enable_arsc_obfuscation=false
+
# Display detailed usage info (there are many options).
tools/binary_size/diagnose_bloat.py -h
```
diff --git a/chromium/tools/binary_size/diagnose_bloat.py b/chromium/tools/binary_size/diagnose_bloat.py
index 69dce4fae98..134f99c78a4 100755
--- a/chromium/tools/binary_size/diagnose_bloat.py
+++ b/chromium/tools/binary_size/diagnose_bloat.py
@@ -500,9 +500,9 @@ class _DiffArchiveManager(object):
logging.info('Creating .sizediff')
_RunCmd(supersize_cmd)
- logging.info('View using a local server via: %s start_server %s',
- os.path.relpath(supersize_path),
- os.path.relpath(report_path))
+ logging.info('Report created: %s', os.path.relpath(report_path))
+ logging.info('View it here: '
+ 'https://chrome-supersize.firebaseapp.com/viewer.html')
def Summarize(self):
path = os.path.join(self.archive_dir, 'last_diff_summary.txt')
diff --git a/chromium/tools/binary_size/generate_commit_size_analysis.py b/chromium/tools/binary_size/generate_commit_size_analysis.py
new file mode 100755
index 00000000000..c32a246a605
--- /dev/null
+++ b/chromium/tools/binary_size/generate_commit_size_analysis.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Lint as: python3
+"""Creates files required to feed into trybot_commit_size_checker"""
+
+import argparse
+import os
+import logging
+import shutil
+import subprocess
+
+_SRC_ROOT = os.path.normpath(
+ os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+_RESOURCE_SIZES_PATH = os.path.join(_SRC_ROOT, 'build', 'android',
+ 'resource_sizes.py')
+_BINARY_SIZE_DIR = os.path.join(_SRC_ROOT, 'tools', 'binary_size')
+_CLANG_UPDATE_PATH = os.path.join(_SRC_ROOT, 'tools', 'clang', 'scripts',
+ 'update.py')
+
+
+def extract_proguard_mapping(apk_name, mapping_name, staging_dir,
+ chromium_output_directory):
+ """Copies proguard mapping file to staging_dir"""
+ mapping_path = os.path.join(chromium_output_directory, 'apks', mapping_name)
+
+ shutil.copy(mapping_path, os.path.join(staging_dir, apk_name + '.mapping'))
+
+
+def generate_resource_sizes(apk_name, staging_dir, chromium_output_directory):
+ """Creates results-chart.json file in staging_dir"""
+ apk_path = os.path.join(chromium_output_directory, 'apks', apk_name)
+
+ subprocess.run(
+ [
+ _RESOURCE_SIZES_PATH,
+ apk_path,
+ '--output-format=chartjson',
+ '--output-dir',
+ staging_dir,
+ '--chromium-output-directory',
+ chromium_output_directory,
+ ],
+ check=True,
+ )
+
+
+def generate_supersize_archive(apk_name, staging_dir,
+ chromium_output_directory):
+ """Creates a .size file for the given .apk or .minimal.apks"""
+ subprocess.run([_CLANG_UPDATE_PATH, '--package=objdump'], check=True)
+ apk_path = os.path.join(chromium_output_directory, 'apks', apk_name)
+ size_path = os.path.join(staging_dir, apk_name + '.size')
+
+ supersize_script_path = os.path.join(_BINARY_SIZE_DIR, 'supersize')
+
+ subprocess.run(
+ [
+ supersize_script_path,
+ 'archive',
+ size_path,
+ '-f',
+ apk_path,
+ '-v',
+ ],
+ check=True,
+ )
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--apk-name',
+ required=True,
+ help='Name of the apk (ex. Name.apk)',
+ )
+ parser.add_argument(
+ '--chromium-output-directory',
+ required=True,
+ help='Location of the build artifacts.',
+ )
+ parser.add_argument(
+ '--mapping-name',
+ required=True,
+ help='Filename of the proguard mapping file.',
+ )
+ parser.add_argument(
+ '--staging-dir',
+ required=True,
+ help='Directory to write generated files to.',
+ )
+
+ args = parser.parse_args()
+
+ extract_proguard_mapping(
+ args.apk_name,
+ args.mapping_name,
+ args.staging_dir,
+ args.chromium_output_directory,
+ )
+ generate_resource_sizes(
+ args.apk_name,
+ args.staging_dir,
+ args.chromium_output_directory,
+ )
+ generate_supersize_archive(
+ args.apk_name,
+ args.staging_dir,
+ args.chromium_output_directory,
+ )
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/binary_size/generate_milestone_reports.py b/chromium/tools/binary_size/generate_milestone_reports.py
index 61c81da2135..be7d1b52466 100755
--- a/chromium/tools/binary_size/generate_milestone_reports.py
+++ b/chromium/tools/binary_size/generate_milestone_reports.py
@@ -71,8 +71,9 @@ _DESIRED_VERSIONS = [
'78.0.3904.62',
'79.0.3945.136',
'80.0.3987.99',
- '81.0.4044.96', # Beta
- '83.0.4103.5', # Canary
+ '81.0.4044.138',
+ '83.0.4103.60',
+ '84.0.4147.20', # Canary
]
diff --git a/chromium/tools/binary_size/libsupersize/archive.py b/chromium/tools/binary_size/libsupersize/archive.py
index 842a81138a7..f36f8b616c9 100644
--- a/chromium/tools/binary_size/libsupersize/archive.py
+++ b/chromium/tools/binary_size/libsupersize/archive.py
@@ -15,6 +15,7 @@ import logging
import os
import posixpath
import re
+import shlex
import string
import subprocess
import sys
@@ -65,7 +66,7 @@ _OutputDirectoryContext = collections.namedtuple('_OutputDirectoryContext', [
_SECTION_SIZE_BLACKLIST = ['.symtab', '.shstrtab', '.strtab']
-# Tunable constant "knobs" for CreateSectionSizesAndSymbols().
+# Tunable constant "knobs" for CreateContainerAndSymbols().
class SectionSizeKnobs(object):
def __init__(self):
# A limit on the number of symbols an address can have, before these symbols
@@ -725,9 +726,9 @@ def _ExtendSectionRange(section_range_by_name, section_name, delta_size):
section_range_by_name[section_name] = (prev_address, prev_size + delta_size)
-def CreateMetadata(map_path, elf_path, apk_path, minimal_apks_path,
- tool_prefix, output_directory, linker_name):
- """Creates metadata dict.
+def CreateMetadata(map_path, elf_path, apk_path, minimal_apks_path, tool_prefix,
+ output_directory, linker_name, build_config):
+ """Creates metadata dict while updating |build_config|.
Args:
map_path: Path to the linker .map(.gz) file to parse.
@@ -738,6 +739,8 @@ def CreateMetadata(map_path, elf_path, apk_path, minimal_apks_path,
tool_prefix: Prefix for c++filt & nm.
output_directory: Build output directory.
linker_name: A coded linker name (see linker_map_parser.py).
+ builg_config: Common build configurations to update or to undergo
+ consistency checks.
Returns:
A dict mapping string costants to values, or None if empty. Performs
@@ -748,21 +751,31 @@ def CreateMetadata(map_path, elf_path, apk_path, minimal_apks_path,
"""
assert not (apk_path and minimal_apks_path)
logging.debug('Constructing metadata')
+
+ def update_build_config(key, value):
+ if key in build_config:
+ old_value = build_config[key]
+ if value != old_value:
+ raise ValueError('Inconsistent {}: {} (was {})'.format(
+ key, value, old_value))
+ else:
+ build_config[key] = value
+
metadata = {}
if output_directory:
shorten_path = lambda path: os.path.relpath(path, output_directory)
gn_args = _ParseGnArgs(os.path.join(output_directory, 'args.gn'))
- metadata[models.METADATA_GN_ARGS] = gn_args
+ update_build_config(models.BUILD_CONFIG_GN_ARGS, gn_args)
else:
shorten_path = os.path.basename
if tool_prefix:
relative_tool_prefix = path_util.ToToolsSrcRootRelative(tool_prefix)
- metadata[models.METADATA_TOOL_PREFIX] = relative_tool_prefix
+ update_build_config(models.BUILD_CONFIG_TOOL_PREFIX, relative_tool_prefix)
if linker_name:
- metadata[models.METADATA_LINKER_NAME] = linker_name
+ update_build_config(models.BUILD_CONFIG_LINKER_NAME, linker_name)
# Deduce GIT revision.
path_candidates = [elf_path, apk_path, minimal_apks_path]
@@ -774,8 +787,7 @@ def CreateMetadata(map_path, elf_path, apk_path, minimal_apks_path,
if dirname:
git_rev = _DetectGitRevision(dirname)
if git_rev:
- metadata[models.METADATA_GIT_REVISION] = git_rev
- break
+ update_build_config(models.BUILD_CONFIG_GIT_REVISION, git_rev)
if elf_path:
metadata[models.METADATA_ELF_FILENAME] = shorten_path(elf_path)
@@ -1120,6 +1132,8 @@ def _ParsePakSymbols(symbols_by_id, object_paths_by_pak_id):
full_name=symbol.full_name, object_path=path, aliases=aliases)
aliases.append(new_sym)
raw_symbols.append(new_sym)
+ # Sorting can ignore containers because symbols created here are all in the
+ # same container.
raw_symbols.sort(key=lambda s: (s.section_name, s.address, s.object_path))
raw_total = 0.0
int_total = 0
@@ -1160,8 +1174,9 @@ def _ParseApkElfSectionRanges(section_ranges, metadata, apk_elf_result):
# hasn't been used since switching from gold -> lld.
apk_section_ranges['%s (unpacked)' %
packed_section_name] = unpacked_range
- return apk_section_ranges, elf_overhead_size
- return section_ranges, 0
+ else:
+ _, apk_section_ranges, elf_overhead_size = apk_elf_result.get()
+ return apk_section_ranges, elf_overhead_size
class _ResourcePathDeobfuscator(object):
@@ -1357,10 +1372,9 @@ def _OverwriteSymbolSizesWithRelocationCount(raw_symbols, tool_prefix,
raw_symbols[:] = [sym for sym in raw_symbols if sym.size or sym.IsNative()]
-def _AddUnattributedSectionSymbols(raw_symbols, section_ranges, elf_result):
+def _AddUnattributedSectionSymbols(raw_symbols, section_ranges):
# Create symbols for ELF sections not covered by existing symbols.
logging.info('Searching for symbol gaps...')
- _, section_ranges, _ = elf_result.get()
last_symbol_ends = collections.defaultdict(int)
for sym in raw_symbols:
if sym.end_address > last_symbol_ends[sym.section_name]:
@@ -1383,11 +1397,15 @@ def _AddUnattributedSectionSymbols(raw_symbols, section_ranges, elf_result):
logging.info('Last symbol in %s does not reach end of section, gap=%d',
section_name, overhead)
+ # Sections that should not bundle into ".other".
+ unsummed_sections, summed_sections = models.ClassifySections(
+ section_ranges.keys())
# Sort keys to ensure consistent order (> 1 sections may have address = 0).
for section_name in sorted(section_ranges.keys()):
# Handle sections that don't appear in |raw_symbols|.
- if section_name not in last_symbol_ends:
- address, section_size = section_ranges[section_name]
+ address, section_size = section_ranges[section_name]
+ if (section_name not in unsummed_sections
+ and section_name not in summed_sections):
logging.info('All bytes in %s are unattributed, gap=%d', section_name,
overhead)
raw_symbols.append(
@@ -1399,27 +1417,31 @@ def _AddUnattributedSectionSymbols(raw_symbols, section_ranges, elf_result):
_ExtendSectionRange(section_ranges, models.SECTION_OTHER, section_size)
-def CreateSectionSizesAndSymbols(knobs=None,
- opts=None,
- map_path=None,
- tool_prefix=None,
- output_directory=None,
- elf_path=None,
- apk_path=None,
- mapping_path=None,
- resources_pathmap_path=None,
- track_string_literals=True,
- metadata=None,
- apk_so_path=None,
- pak_files=None,
- pak_info_file=None,
- linker_name=None,
- size_info_prefix=None):
- """Creates sections sizes and symbols for a SizeInfo.
+def CreateContainerAndSymbols(knobs=None,
+ opts=None,
+ container_name=None,
+ metadata=None,
+ map_path=None,
+ tool_prefix=None,
+ output_directory=None,
+ elf_path=None,
+ apk_path=None,
+ mapping_path=None,
+ resources_pathmap_path=None,
+ track_string_literals=True,
+ apk_so_path=None,
+ pak_files=None,
+ pak_info_file=None,
+ linker_name=None,
+ size_info_prefix=None):
+ """Creates a Container (with sections sizes) and symbols for a SizeInfo.
Args:
knobs: Instance of SectionSizeKnobs.
opts: Instance of ContainerArchiveOptions.
+ container_name: Name for the created Container. May be '' if only one
+ Container exists.
+ metadata: Metadata dict from CreateMetadata().
map_path: Path to the linker .map(.gz) file to parse.
tool_prefix: Prefix for c++filt & nm (required).
output_directory: Build output directory. If None, source_paths and symbol
@@ -1432,7 +1454,6 @@ def CreateSectionSizesAndSymbols(knobs=None,
resource paths to shortened resource paths.
track_string_literals: Whether to break down "** merge string" sections into
smaller symbols (requires output_directory).
- metadata: Metadata dict from CreateMetadata().
apk_so_path: Path to an .so file within an APK file.
pak_files: List of paths to .pak files.
pak_info_file: Path to a .pak.info file.
@@ -1440,8 +1461,9 @@ def CreateSectionSizesAndSymbols(knobs=None,
size_info_prefix: Path to $out/size-info/$ApkName.
Returns:
- A tuple of (section_sizes, raw_symbols).
- section_ranges is a dict mapping section names to their (address, size).
+ A tuple of (container, raw_symbols).
+ containers is a Container instance that stores metadata and section_sizes
+ (section_sizes maps section names to respective sizes).
raw_symbols is a list of Symbol objects.
"""
knobs = knobs or SectionSizeKnobs()
@@ -1449,6 +1471,8 @@ def CreateSectionSizesAndSymbols(knobs=None,
# Extraction takes around 1 second, so do it in parallel.
apk_elf_result = parallel.ForkAndCall(_ElfInfoFromApk,
(apk_path, apk_so_path, tool_prefix))
+ else:
+ apk_elf_result = None
outdir_context = None
source_mapper = None
@@ -1509,16 +1533,16 @@ def CreateSectionSizesAndSymbols(knobs=None,
else:
section_ranges, raw_symbols, object_paths_by_name = {}, [], None
- elf_overhead_size = _CalculateElfOverhead(section_ranges, elf_path)
+ if apk_elf_result:
+ section_ranges, elf_overhead_size = _ParseApkElfSectionRanges(
+ section_ranges, metadata, apk_elf_result)
+ else:
+ elf_overhead_size = _CalculateElfOverhead(section_ranges, elf_path)
+ if elf_path:
+ _AddUnattributedSectionSymbols(raw_symbols, section_ranges)
pak_symbols_by_id = None
if apk_path and size_info_prefix:
- if elf_path:
- section_ranges, elf_overhead_size = _ParseApkElfSectionRanges(
- section_ranges, metadata, apk_elf_result)
- _AddUnattributedSectionSymbols(raw_symbols, section_ranges,
- apk_elf_result)
-
# Can modify |section_ranges|.
pak_symbols_by_id = _FindPakSymbolsFromApk(opts, section_ranges, apk_path,
size_info_prefix)
@@ -1591,14 +1615,22 @@ def CreateSectionSizesAndSymbols(knobs=None,
_OverwriteSymbolSizesWithRelocationCount(raw_symbols, tool_prefix, elf_path)
section_sizes = {k: size for k, (address, size) in section_ranges.items()}
- return section_sizes, raw_symbols
+ container = models.Container(name=container_name,
+ metadata=metadata,
+ section_sizes=section_sizes)
+ for symbol in raw_symbols:
+ symbol.container = container
+ return container, raw_symbols
-def CreateSizeInfo(section_sizes_list,
+def CreateSizeInfo(build_config,
+ container_list,
raw_symbols_list,
- metadata_list,
normalize_names=True):
"""Performs operations on all symbols and creates a SizeInfo object."""
+ assert len(container_list) == len(raw_symbols_list)
+
+ all_raw_symbols = []
for raw_symbols in raw_symbols_list:
file_format.SortSymbols(raw_symbols)
file_format.CalculatePadding(raw_symbols)
@@ -1609,13 +1641,9 @@ def CreateSizeInfo(section_sizes_list,
if normalize_names:
_NormalizeNames(raw_symbols)
- # TODO(huangs): Implement data fusing to compute the following for real.
- assert len(section_sizes_list) == 1
- section_sizes = section_sizes_list[0]
- raw_symbols = raw_symbols_list[0]
- metadata = metadata_list[0]
+ all_raw_symbols += raw_symbols
- return models.SizeInfo(section_sizes, raw_symbols, metadata=metadata)
+ return models.SizeInfo(build_config, container_list, all_raw_symbols)
def _DetectGitRevision(directory):
@@ -1709,63 +1737,47 @@ def _ElfInfoFromApk(apk_path, apk_so_path, tool_prefix):
return build_id, section_ranges, elf_overhead_size
-def _AutoIdentifyInputFile(args):
- if args.f.endswith('.minimal.apks'):
- args.minimal_apks_file = args.f
- logging.info('Auto-identified --minimal-apks-file.')
- elif args.f.endswith('.apk'):
- args.apk_file = args.f
- logging.info('Auto-identified --apk-file.')
- elif args.f.endswith('.so') or '.' not in os.path.basename(args.f):
- logging.info('Auto-identified --elf-file.')
- args.elf_file = args.f
- elif args.f.endswith('.map') or args.f.endswith('.map.gz'):
- logging.info('Auto-identified --map-file.')
- args.map_file = args.f
- else:
- return False
- return True
-
+def _AddContainerArguments(parser):
+ """Add arguments applicable to a single container."""
-def AddMainPathsArguments(parser):
- """Add arguments for _DeduceMainPaths()."""
+ # Special: Use _IdentifyInputFile() to detect main file argument.
parser.add_argument('-f', metavar='FILE',
help='Auto-identify input file type.')
+
+ # Main file argument: Exactly one should be specified (perhaps via -f), with
+ # the exception that --map-file can be specified in addition.
+ # _IdentifyInputFile() and _GetMainFiles() should be kept updated.
parser.add_argument('--apk-file',
help='.apk file to measure. Other flags can generally be '
- 'derived when this is used.')
- parser.add_argument(
- '--resources-pathmap-file',
- help='.pathmap.txt file that contains a maping from '
- 'original resource paths to shortened resource paths.')
+ 'derived when this is used.')
parser.add_argument('--minimal-apks-file',
help='.minimal.apks file to measure. Other flags can '
- 'generally be derived when this is used.')
- parser.add_argument('--mapping-file',
- help='Proguard .mapping file for deobfuscation.')
- parser.add_argument('--elf-file',
- help='Path to input ELF file. Currently used for '
- 'capturing metadata.')
+ 'generally be derived when this is used.')
+ parser.add_argument('--elf-file', help='Path to input ELF file.')
parser.add_argument('--map-file',
help='Path to input .map(.gz) file. Defaults to '
'{{elf_file}}.map(.gz)?. If given without '
'--elf-file, no size metadata will be recorded.')
- parser.add_argument('--no-source-paths', action='store_true',
- help='Do not use .ninja files to map '
- 'object_path -> source_path')
- parser.add_argument('--output-directory',
- help='Path to the root build directory.')
- parser.add_argument('--tool-prefix',
- help='Path prefix for c++filt, nm, readelf.')
-
+ parser.add_argument('--ssargs-file',
+ help='Path to SuperSize multi-container arguments '
+ 'file.')
-def AddArguments(parser):
- parser.add_argument('size_file', help='Path to output .size file.')
+ # Auxiliary file arguments.
+ parser.add_argument('--mapping-file',
+ help='Proguard .mapping file for deobfuscation.')
+ parser.add_argument('--resources-pathmap-file',
+ help='.pathmap.txt file that contains a maping from '
+ 'original resource paths to shortened resource paths.')
parser.add_argument('--pak-file', action='append',
help='Paths to pak files.')
parser.add_argument('--pak-info-file',
help='This file should contain all ids found in the pak '
'files that have been passed in.')
+ parser.add_argument('--aux-elf-file',
+ help='Path to auxiliary ELF if the main file is APK, '
+ 'useful for capturing metadata.')
+
+ # Non-file argument.
parser.add_argument('--no-string-literals', dest='track_string_literals',
default=True, action='store_false',
help='Disable breaking down "** merge strings" into more '
@@ -1775,8 +1787,10 @@ def AddArguments(parser):
action='store_true',
help='Instead of counting binary size, count number of relative'
'relocation instructions in ELF code.')
- parser.add_argument('--source-directory',
- help='Custom path to the root source directory.')
+ parser.add_argument('--no-source-paths',
+ action='store_true',
+ help='Do not use .ninja files to map '
+ 'object_path -> source_path')
parser.add_argument(
'--java-only', action='store_true', help='Run on only Java symbols')
parser.add_argument(
@@ -1790,7 +1804,134 @@ def AddArguments(parser):
action='store_true',
help='Include a padding field for each symbol, instead of rederiving '
'from consecutive symbols on file load.')
- AddMainPathsArguments(parser)
+
+
+def AddArguments(parser):
+ parser.add_argument('size_file', help='Path to output .size file.')
+ parser.add_argument('--source-directory',
+ help='Custom path to the root source directory.')
+ parser.add_argument('--output-directory',
+ help='Path to the root build directory.')
+ parser.add_argument('--tool-prefix',
+ help='Path prefix for c++filt, nm, readelf.')
+ _AddContainerArguments(parser)
+
+
+def _IdentifyInputFile(args):
+ """Identifies main input file type from |args.f|, and updates |args|.
+
+ Identification is performed on filename alone, i.e., the file need not exist.
+ The result is written to a field in |args|. If the field exists then it
+ simply gets overwritten.
+
+ If '.' is missing from |args.f| then --elf-file is assumed.
+
+ Returns:
+ True if identification was successful, else False.
+"""
+ if args.f.endswith('.minimal.apks'):
+ args.minimal_apks_file = args.f
+ logging.info('Auto-identified --minimal-apks-file.')
+ elif args.f.endswith('.apk'):
+ args.apk_file = args.f
+ logging.info('Auto-identified --apk-file.')
+ elif args.f.endswith('.so') or '.' not in os.path.basename(args.f):
+ args.elf_file = args.f
+ logging.info('Auto-identified --elf-file.')
+ elif args.f.endswith('.map') or args.f.endswith('.map.gz'):
+ args.map_file = args.f
+ logging.info('Auto-identified --map-file.')
+ elif args.f.endswith('.ssargs'):
+ args.ssargs_file = args.f
+ logging.info('Auto-identified --ssargs-file.')
+ else:
+ return False
+ return True
+
+
+def _GetMainFiles(args):
+ ret = [args.apk_file, args.elf_file, args.minimal_apks_file, args.ssargs_file]
+ ret = [v for v in ret if v]
+ # --map-file can be a main file or used with another main file. So only add it
+ # if no main file is found yet
+ if not ret and args.map_file:
+ ret.append(args.map_file)
+ # |ret| should only one element; the caller should check and handle errors.
+ return ret
+
+
+def _DeduceDerivedArgsAndCheckMainInput(args, is_top_level_args,
+ on_config_error):
+ """Stores values derived from |args|, and ensures one main input exists.
+
+ Args:
+ args: Parsed command-line arguments, or .ssargs input.
+ is_top_level_args: Whether this is processing SuperSize command line
+ (instead of .ssargs input).
+ on_config_error: Error callback.
+ """
+ setattr(args, 'is_bundle', args.minimal_apks_file is not None)
+ main_files = _GetMainFiles(args)
+ if not main_files:
+ on_config_error(
+ 'Must pass at least one of --apk-file, --minimal-apks-file, '
+ '--elf-file, --map-file, --ssargs-file')
+ # --map-file can be a main file, or used with another main file.
+ if len(main_files) > 1:
+ on_config_error(
+ 'Found colliding --apk-file, --minimal-apk-file, --elf-file, '
+ '--ssargs-file')
+ if is_top_level_args:
+ setattr(args, 'any_path_within_output_directory', main_files[0])
+
+
+def ParseSsargs(lines):
+ """Parses .ssargs data.
+
+ An .ssargs file is a text file to specify multiple containers as input to
+ SuperSize-archive. After '#'-based comments, start / end whitespaces, and
+ empty lines are stripped, each line specifies a distinct container. Format:
+ * Positional argument: |name| for the container.
+ * Main input file specified by -f, --apk-file, --elf-file, etc.:
+ * Can be an absolute path.
+ * Can be a relative path. In this case, it's up to the caller to supply the
+ base directory.
+ * -f switch must not specify another .ssargs file.
+ * For supported switches: See _AddContainerArguments().
+
+ Args:
+ lines: An iterator containing lines of .ssargs data.
+ Returns:
+ A list of arguments, one for each container.
+ Raises:
+ ValueError: Parse error, including input line number.
+ """
+ sub_args_list = []
+ parser = argparse.ArgumentParser(add_help=False)
+ parser.error = lambda msg: (_ for _ in ()).throw(ValueError(msg))
+ parser.add_argument('name')
+ _AddContainerArguments(parser)
+ try:
+ for lineno, line in enumerate(lines, 1):
+ toks = shlex.split(line, comments=True)
+ if not toks: # Skip if line is empty after stripping comments.
+ continue
+ sub_args = parser.parse_args(toks)
+ if set(sub_args.name) & set('<>'):
+ parser.error('container name cannot have characters in "<>"')
+ if sub_args.f:
+ if not _IdentifyInputFile(sub_args):
+ parser.error('cannot identify file type: {}'.format(sub_args.f))
+ if sub_args.ssargs_file: # May be added by the -f flag.
+ parser.error('cannot nest .ssargs files')
+ _DeduceDerivedArgsAndCheckMainInput(sub_args,
+ is_top_level_args=False,
+ on_config_error=parser.error)
+ sub_args_list.append(sub_args)
+ except ValueError as e:
+ e.args = ('Line %d: %s' % (lineno, e.args[0]), )
+ raise e
+ return sub_args_list
def _DeduceNativeInfo(tentative_output_dir, apk_path, elf_path, map_path,
@@ -1858,6 +1999,40 @@ def _DeduceAuxPaths(args, apk_prefix):
return mapping_path, resources_pathmap_path
+def _ReadMultipleArgsFromStream(lines, base_dir, err_prefix, args,
+ on_config_error):
+ try:
+ container_args_list = ParseSsargs(lines)
+ except ValueError as e:
+ on_config_error('%s: %s' % (err_prefix, e.args[0]))
+ sub_args_list = []
+ for container_args in container_args_list:
+ # Clone |args| keys but assign empty values.
+ sub_args = argparse.Namespace(**{k: None for k in vars(args)})
+ # Copy parsed values to |sub_args|.
+ for k, v in container_args.__dict__.items():
+ # Translate file arguments to be relative to |sub_dir|.
+ if (k.endswith('_file') or k == 'f') and v is not None:
+ v = os.path.join(base_dir, v)
+ sub_args.__dict__[k] = v
+ logging.info('Container: %r' %
+ {k: v
+ for k, v in sub_args.__dict__.items() if v is not None})
+ sub_args_list.append(sub_args)
+ return sub_args_list
+
+
+def _ReadMultipleArgsFromFile(args, on_config_error):
+ with open(args.ssargs_file, 'r') as fh:
+ lines = list(fh)
+ err_prefix = 'In file ' + args.ssargs_file
+ # Supply |base_dir| as the directory containing the .ssargs file, to ensure
+ # consistent behavior wherever SuperSize-archive runs.
+ base_dir = os.path.dirname(os.path.abspath(args.ssargs_file))
+ return _ReadMultipleArgsFromStream(lines, base_dir, err_prefix, args,
+ on_config_error)
+
+
def _DeduceMainPaths(args, on_config_error):
"""Generates main paths (may be deduced) for each containers given by input.
@@ -1869,10 +2044,11 @@ def _DeduceMainPaths(args, on_config_error):
value=args.output_directory,
any_path_within_output_directory=args.any_path_within_output_directory)
- def _Inner(sub_args, apk_prefix, apk_path):
+ def _Inner(idx, sub_args, apk_prefix, apk_path):
"""Inner helper for _DeduceMainPaths(), for one container.
Params:
+ idx: Numeric index of the container.
sub_args: Arguments specific to a container.
apk_prefix: Prefix used to search for auxiliary .apk related files.
apk_path: Path to .apk file that can be opened for processing, but whose
@@ -1880,7 +2056,10 @@ def _DeduceMainPaths(args, on_config_error):
"""
output_directory = output_directory_finder.Tentative()
opts = ContainerArchiveOptions(sub_args, output_directory=output_directory)
+ container_name = sub_args.name if hasattr(sub_args, 'name') else None
if apk_prefix:
+ if not container_name:
+ container_name = apk_prefix
# Allow either .minimal.apks or just .apks.
apk_prefix = apk_prefix.replace('.minimal.apks', '.aab')
apk_prefix = apk_prefix.replace('.apks', '.aab')
@@ -1890,8 +2069,8 @@ def _DeduceMainPaths(args, on_config_error):
tool_prefix = None
if opts.analyze_native:
elf_path, map_path, apk_so_path = _DeduceNativeInfo(
- output_directory, apk_path, sub_args.elf_file, sub_args.map_file,
- on_config_error)
+ output_directory, apk_path, sub_args.elf_file
+ or sub_args.aux_elf_file, sub_args.map_file, on_config_error)
if map_path:
linker_name = _DetectLinkerName(map_path)
logging.info('Linker name: %s' % linker_name)
@@ -1901,6 +2080,8 @@ def _DeduceMainPaths(args, on_config_error):
output_directory_finder=output_directory_finder,
linker_name=linker_name)
tool_prefix = tool_prefix_finder.Finalized()
+ if not container_name and elf_path:
+ container_name = elf_path
else:
# Trust that these values will not be used, and set to None.
elf_path = None
@@ -1917,17 +2098,27 @@ def _DeduceMainPaths(args, on_config_error):
size_info_prefix = os.path.join(output_directory, 'size-info',
os.path.basename(apk_prefix))
- return (opts, output_directory, tool_prefix, apk_path, mapping_path,
- apk_so_path, elf_path, map_path, resources_pathmap_path,
- linker_name, size_info_prefix)
+ if not container_name:
+ container_name = 'Container %d' % idx
+
+ return (opts, output_directory, tool_prefix, container_name, apk_path,
+ mapping_path, apk_so_path, elf_path, map_path,
+ resources_pathmap_path, linker_name, size_info_prefix)
- # Process each container.
- # If needed, extract .apk file to a temp file and process that instead.
- if args.minimal_apks_file:
- with zip_util.UnzipToTemp(args.minimal_apks_file, _APKS_MAIN_APK) as temp:
- yield _Inner(args, args.minimal_apks_file, temp)
+ if args.ssargs_file:
+ sub_args_list = _ReadMultipleArgsFromFile(args, on_config_error)
else:
- yield _Inner(args, args.apk_file, args.apk_file)
+ sub_args_list = [args]
+
+ # Each element in |sub_args_list| specifies a container.
+ for idx, sub_args in enumerate(sub_args_list):
+ # If needed, extract .apk file to a temp file and process that instead.
+ if sub_args.minimal_apks_file:
+ with zip_util.UnzipToTemp(sub_args.minimal_apks_file,
+ _APKS_MAIN_APK) as temp:
+ yield _Inner(idx, sub_args, sub_args.minimal_apks_file, temp)
+ else:
+ yield _Inner(idx, sub_args, sub_args.apk_file, sub_args.apk_file)
def Run(args, on_config_error):
@@ -1935,38 +2126,36 @@ def Run(args, on_config_error):
on_config_error('size_file must end with .size')
if args.f is not None:
- if not _AutoIdentifyInputFile(args):
+ if not _IdentifyInputFile(args):
on_config_error('Cannot identify file %s' % args.f)
- if args.apk_file and args.minimal_apks_file:
- on_config_error('Cannot use both --apk-file and --minimal-apks-file.')
-
- # Deduce arguments.
- setattr(args, 'is_bundle', args.minimal_apks_file is not None)
- any_path = (args.apk_file or args.minimal_apks_file or args.elf_file
- or args.map_file)
- if any_path is None:
- on_config_error(
- 'Must pass at least one of --apk-file, --minimal-apks-file, '
- '--elf-file, --map-file')
- setattr(args, 'any_path_within_output_directory', any_path)
-
+ _DeduceDerivedArgsAndCheckMainInput(args,
+ is_top_level_args=True,
+ on_config_error=on_config_error)
knobs = SectionSizeKnobs()
- metadata_list = []
- section_sizes_list = []
+ build_config = {}
+ seen_container_names = set()
+ container_list = []
raw_symbols_list = []
- # Generate one size info for each container.
- for (opts, output_directory, tool_prefix, apk_path, mapping_path, apk_so_path,
- elf_path, map_path, resources_pathmap_path, linker_name,
+ # Iterate over each container.
+ for (opts, output_directory, tool_prefix, container_name, apk_path,
+ mapping_path, apk_so_path, elf_path, map_path, resources_pathmap_path,
+ linker_name,
size_info_prefix) in _DeduceMainPaths(args, on_config_error):
+ if container_name in seen_container_names:
+ raise ValueError('Duplicate container name: {}'.format(container_name))
+ seen_container_names.add(container_name)
+
# Note that |args.apk_file| is used instead of |apk_path|, since the latter
# may be an extracted temporary file.
metadata = CreateMetadata(map_path, elf_path, args.apk_file,
args.minimal_apks_file, tool_prefix,
- output_directory, linker_name)
- section_sizes, raw_symbols = CreateSectionSizesAndSymbols(
+ output_directory, linker_name, build_config)
+ container, raw_symbols = CreateContainerAndSymbols(
knobs=knobs,
opts=opts,
+ container_name=container_name,
+ metadata=metadata,
map_path=map_path,
tool_prefix=tool_prefix,
elf_path=elf_path,
@@ -1975,29 +2164,27 @@ def Run(args, on_config_error):
output_directory=output_directory,
resources_pathmap_path=resources_pathmap_path,
track_string_literals=args.track_string_literals,
- metadata=metadata,
apk_so_path=apk_so_path,
pak_files=args.pak_file,
pak_info_file=args.pak_info_file,
linker_name=linker_name,
size_info_prefix=size_info_prefix)
- metadata_list.append(metadata)
- section_sizes_list.append(section_sizes)
+ container_list.append(container)
raw_symbols_list.append(raw_symbols)
- size_info = CreateSizeInfo(
- section_sizes_list,
- raw_symbols_list,
- metadata_list,
- normalize_names=False)
+ size_info = CreateSizeInfo(build_config,
+ container_list,
+ raw_symbols_list,
+ normalize_names=False)
if logging.getLogger().isEnabledFor(logging.DEBUG):
for line in describe.DescribeSizeInfoCoverage(size_info):
logging.debug(line)
logging.info('Recorded info for %d symbols', len(size_info.raw_symbols))
- logging.info('Recording metadata: \n %s', '\n '.join(
- describe.DescribeMetadata(size_info.metadata)))
+ for container in size_info.containers:
+ logging.info('Recording metadata: \n %s',
+ '\n '.join(describe.DescribeDict(container.metadata)))
logging.info('Saving result to %s', args.size_file)
file_format.SaveSizeInfo(
diff --git a/chromium/tools/binary_size/libsupersize/canned_queries.py b/chromium/tools/binary_size/libsupersize/canned_queries.py
index e534a1e036e..2bd3fe7d96f 100644
--- a/chromium/tools/binary_size/libsupersize/canned_queries.py
+++ b/chromium/tools/binary_size/libsupersize/canned_queries.py
@@ -125,8 +125,9 @@ def _CategorizeGenerated(symbols):
symbols.WherePathMatches('gl_bindings_autogen'))
symbols = symbols.WhereSourceIsGenerated()
- symbols = g.Add('Java Protocol Buffers', symbols.Filter(lambda s: (
- s.source_path.endswith('Proto.java'))))
+ symbols = g.Add(
+ 'Java Protocol Buffers',
+ symbols.Filter(lambda s: '__protoc_java.srcjar' in s.source_path))
symbols = g.Add('C++ Protocol Buffers', symbols.Filter(lambda s: (
'/protobuf/' in s.object_path or
s.object_path.endswith('.pbzero.o') or
diff --git a/chromium/tools/binary_size/libsupersize/caspian/lens.cc b/chromium/tools/binary_size/libsupersize/caspian/lens.cc
index 7011e4f0157..1c9ca5bcc83 100644
--- a/chromium/tools/binary_size/libsupersize/caspian/lens.cc
+++ b/chromium/tools/binary_size/libsupersize/caspian/lens.cc
@@ -61,7 +61,7 @@ std::string_view GeneratedLens::ParentName(const BaseSymbol& symbol) {
return "Not generated";
}
- static LazyRE2 java_protobuf_regex = {R"(Proto\.java$)"};
+ static LazyRE2 java_protobuf_regex = {R"(__protoc_java\.srcjar)"};
if (PartialMatch(symbol.SourcePath(), *java_protobuf_regex)) {
return "Java Protocol Buffers";
}
diff --git a/chromium/tools/binary_size/libsupersize/console.py b/chromium/tools/binary_size/libsupersize/console.py
index f8ef12e0b62..76e54201801 100644
--- a/chromium/tools/binary_size/libsupersize/console.py
+++ b/chromium/tools/binary_size/libsupersize/console.py
@@ -125,9 +125,10 @@ class _Session(object):
if not first_sym:
return []
size_info = self._SizeInfoForSymbol(first_sym)
+ container = first_sym.container
tool_prefix = self._ToolPrefixForSymbol(size_info)
- elf_path = self._ElfPathForSymbol(
- size_info, tool_prefix, elf_path)
+ elf_path = self._ElfPathForSymbol(size_info, container, tool_prefix,
+ elf_path)
return string_extract.ReadStringLiterals(
thing, elf_path, tool_prefix, all_rodata=all_rodata)
@@ -243,7 +244,8 @@ class _Session(object):
def _ToolPrefixForSymbol(self, size_info):
tool_prefix = self._tool_prefix_finder.Tentative()
- orig_tool_prefix = size_info.metadata.get(models.METADATA_TOOL_PREFIX)
+ orig_tool_prefix = size_info.build_config.get(
+ models.BUILD_CONFIG_TOOL_PREFIX)
if orig_tool_prefix:
orig_tool_prefix = path_util.FromToolsSrcRootRelative(orig_tool_prefix)
if os.path.exists(path_util.GetObjDumpPath(orig_tool_prefix)):
@@ -256,13 +258,13 @@ class _Session(object):
'--tool-prefix, or setting --output-directory')
return tool_prefix
- def _ElfPathForSymbol(self, size_info, tool_prefix, elf_path):
+ def _ElfPathForSymbol(self, size_info, container, tool_prefix, elf_path):
def build_id_matches(elf_path):
found_build_id = archive.BuildIdFromElf(elf_path, tool_prefix)
- expected_build_id = size_info.metadata.get(models.METADATA_ELF_BUILD_ID)
+ expected_build_id = container.metadata.get(models.METADATA_ELF_BUILD_ID)
return found_build_id == expected_build_id
- filename = size_info.metadata.get(models.METADATA_ELF_FILENAME)
+ filename = container.metadata.get(models.METADATA_ELF_FILENAME)
paths_to_try = []
if elf_path:
paths_to_try.append(elf_path)
@@ -317,8 +319,10 @@ class _Session(object):
assert not symbol.IsDelta(), ('Cannot disasseble a Diff\'ed symbol. Try '
'passing .before_symbol or .after_symbol.')
size_info = self._SizeInfoForSymbol(symbol)
+ container = symbol.container
tool_prefix = self._ToolPrefixForSymbol(size_info)
- elf_path = self._ElfPathForSymbol(size_info, tool_prefix, elf_path)
+ elf_path = self._ElfPathForSymbol(size_info, container, tool_prefix,
+ elf_path)
# Always use Android NDK's objdump because llvm-objdump does not seem to
# correctly disassemble.
output_directory_finder = self._output_directory_finder
@@ -489,7 +493,7 @@ def Run(args, on_config_error):
output_directory_finder = path_util.OutputDirectoryFinder(
value=args.output_directory,
any_path_within_output_directory=args.inputs[0])
- linker_name = size_infos[-1].metadata.get(models.METADATA_LINKER_NAME)
+ linker_name = size_infos[-1].build_config.get(models.BUILD_CONFIG_LINKER_NAME)
tool_prefix_finder = path_util.ToolPrefixFinder(
value=args.tool_prefix,
output_directory_finder=output_directory_finder,
diff --git a/chromium/tools/binary_size/libsupersize/describe.py b/chromium/tools/binary_size/libsupersize/describe.py
index 1efeadccd6c..319e7f6781c 100644
--- a/chromium/tools/binary_size/libsupersize/describe.py
+++ b/chromium/tools/binary_size/libsupersize/describe.py
@@ -51,23 +51,21 @@ def _Divide(a, b):
return float(a) / b if b else 0
-def _IncludeInTotals(section_name):
- return section_name not in models.BSS_SECTIONS and '(' not in section_name
+def _GetSectionSizeInfo(unsummed_sections, summed_sections, section_sizes):
+ sizes = [v for k, v in section_sizes.items() if k in summed_sections]
+ total_bytes = sum(sizes)
+ max_bytes = max(sizes)
-
-def _GetSectionSizeInfo(section_sizes):
- total_bytes = sum(v for k, v in section_sizes.items() if _IncludeInTotals(k))
- max_bytes = max(
- abs(v) for k, v in section_sizes.items() if _IncludeInTotals(k))
+ maybe_significant_sections = unsummed_sections | summed_sections
def is_significant_section(name, size):
# Show all sections containing symbols, plus relocations.
# As a catch-all, also include any section that comprises > 4% of the
# largest section. Use largest section rather than total so that it still
# works out when showing a diff containing +100, -100 (total=0).
- return (name in list(models.SECTION_TO_SECTION_NAME.values())
- or name in ('.rela.dyn', '.rel.dyn')
- or _IncludeInTotals(name) and abs(_Divide(size, max_bytes)) > .04)
+ return (name in maybe_significant_sections
+ or name in ['.rela.dyn', '.rel.dyn']
+ or abs(_Divide(size, max_bytes)) > .04)
section_names = sorted(
k for k, v in section_sizes.items() if is_significant_section(k, v))
@@ -179,35 +177,50 @@ class DescriberText(Describer):
self.recursive = recursive
self.summarize = summarize
- def _DescribeSectionSizes(self, section_sizes):
- total_bytes, section_names = _GetSectionSizeInfo(section_sizes)
+ def _DescribeSectionSizes(self,
+ unsummed_sections,
+ summed_sections,
+ section_sizes,
+ indent=''):
+ total_bytes, section_names = _GetSectionSizeInfo(unsummed_sections,
+ summed_sections,
+ section_sizes)
yield ''
- yield 'Section Sizes (Total={} ({} bytes)):'.format(
- _PrettySize(total_bytes), total_bytes)
+ yield '{}Section Sizes (Total={} ({} bytes)):'.format(
+ indent, _PrettySize(total_bytes), total_bytes)
for name in section_names:
size = section_sizes[name]
- if not _IncludeInTotals(name):
- yield ' {}: {} ({} bytes) (not included in totals)'.format(
- name, _PrettySize(size), size)
+ if name in unsummed_sections:
+ yield '{} {}: {} ({} bytes) (not included in totals)'.format(
+ indent, name, _PrettySize(size), size)
else:
+ notes = ''
+ if name not in summed_sections:
+ notes = ' (counted in .other)'
percent = _Divide(size, total_bytes)
- yield ' {}: {} ({} bytes) ({:.1%})'.format(
- name, _PrettySize(size), size, percent)
+ yield '{} {}: {} ({} bytes) ({:.1%}){}'.format(
+ indent, name, _PrettySize(size), size, percent, notes)
if self.verbose:
yield ''
- yield 'Other section sizes:'
+ yield '{}Other section sizes:'.format(indent)
section_names = sorted(
k for k in section_sizes.keys() if k not in section_names)
for name in section_names:
- not_included_part = ''
- if not _IncludeInTotals(name):
- not_included_part = ' (not included in totals)'
- yield ' {}: {} ({} bytes){}'.format(
- name, _PrettySize(section_sizes[name]), section_sizes[name],
- not_included_part)
+ notes = ''
+ if name in unsummed_sections:
+ notes = ' (not included in totals)'
+ elif name not in summed_sections:
+ notes = ' (counted in .other)'
+ yield '{} {}: {} ({} bytes){}'.format(
+ indent, name, _PrettySize(section_sizes[name]), section_sizes[name],
+ notes)
def _DescribeSymbol(self, sym, single_line=False):
+ container_str = sym.container_short_name
+ if container_str:
+ container_str = '<{}>'.format(container_str)
+
address = 'Group' if sym.IsGroup() else hex(sym.address)
last_field = ''
@@ -235,11 +248,13 @@ class DescriberText(Describer):
if last_field:
last_field = ' ' + last_field
if sym.IsDelta():
- yield '{}@{:<9s} {}{}'.format(
- sym.section, address, pss_field, last_field)
+ yield '{}{}@{:<9s} {}{}'.format(container_str, sym.section, address,
+ pss_field, last_field)
else:
- l = '{}@{:<9s} pss={} padding={}{}'.format(
- sym.section, address, pss_field, sym.padding, last_field)
+ l = '{}{}@{:<9s} pss={} padding={}{}'.format(container_str,
+ sym.section, address,
+ pss_field, sym.padding,
+ last_field)
yield l
yield ' source_path={} \tobject_path={}'.format(
sym.source_path, sym.object_path)
@@ -258,16 +273,17 @@ class DescriberText(Describer):
else:
pss_field = '{:<14}'.format(pss_field)
if single_line:
- yield '{}@{:<9s} {} {}{}'.format(
- sym.section, address, pss_field, sym.name, last_field)
+ yield '{}{}@{:<9s} {} {}{}'.format(container_str, sym.section,
+ address, pss_field, sym.name,
+ last_field)
else:
path = sym.source_path or sym.object_path
if path and sym.generated_source:
path = '$root_gen_dir/' + path
path = path or '{no path}'
- yield '{}@{:<9s} {} {}'.format(
- sym.section, address, pss_field, path)
+ yield '{}{}@{:<9s} {} {}'.format(container_str, sym.section, address,
+ pss_field, path)
if sym.name:
yield ' {}{}'.format(sym.name, last_field)
@@ -387,13 +403,20 @@ class DescriberText(Describer):
else:
summary_desc = ()
+ title_parts = ['Index', 'Running Total']
+ if group.container_name == '':
+ title_parts.append('Section@Address')
+ else:
+ title_parts.append('<Container>Section@Address')
if self.verbose:
- titles = 'Index | Running Total | Section@Address | ...'
- elif group.IsDelta():
- titles = (u'Index | Running Total | Section@Address | \u0394 PSS '
- u'(\u0394 size_without_padding) | Path')
+ title_parts.append('...')
else:
- titles = ('Index | Running Total | Section@Address | PSS | Path')
+ if group.IsDelta():
+ title_parts.append(u'\u0394 PSS (\u0394 size_without_padding)')
+ else:
+ title_parts.append('PSS')
+ title_parts.append('Path')
+ titles = ' | '.join(title_parts)
header_desc = (titles, '-' * 60)
@@ -483,50 +506,105 @@ class DescriberText(Describer):
group_desc = self._DescribeSymbolGroup(delta_group)
return itertools.chain(diff_summary_desc, path_delta_desc, group_desc)
- def _DescribeDeltaSizeInfo(self, diff):
- common_metadata = {
+ def _DescribeDeltaDict(self, data_name, before_dict, after_dict, indent=''):
+ common_items = {
k: v
- for k, v in diff.before.metadata.items()
- if diff.after.metadata.get(k) == v
+ for k, v in before_dict.items() if after_dict.get(k) == v
}
- before_metadata = {
+ before_items = {
k: v
- for k, v in diff.before.metadata.items() if k not in common_metadata
+ for k, v in before_dict.items() if k not in common_items
}
- after_metadata = {
- k: v
- for k, v in diff.after.metadata.items() if k not in common_metadata
- }
- metadata_desc = itertools.chain(
- ('Common Metadata:',),
- (' %s' % line for line in DescribeMetadata(common_metadata)),
- ('Old Metadata:',),
- (' %s' % line for line in DescribeMetadata(before_metadata)),
- ('New Metadata:',),
- (' %s' % line for line in DescribeMetadata(after_metadata)))
- section_desc = self._DescribeSectionSizes(diff.section_sizes)
- group_desc = self.GenerateLines(diff.symbols)
- return itertools.chain(metadata_desc, section_desc, ('',), group_desc)
+ after_items = {k: v for k, v in after_dict.items() if k not in common_items}
+ return itertools.chain(
+ (indent + 'Common %s:' % data_name, ),
+ (indent + ' %s' % line for line in DescribeDict(common_items)),
+ (indent + 'Old %s:' % data_name, ),
+ (indent + ' %s' % line for line in DescribeDict(before_items)),
+ (indent + 'New %s:' % data_name, ),
+ (indent + ' %s' % line for line in DescribeDict(after_items)))
+
+ def _DescribeDeltaSizeInfo(self, diff):
+ desc_list = []
+ # Describe |build_config| and each container. If there is only one container
+ # then support legacy output by reporting |build_config| as part of the
+ # first container's metadata.
+ if len(diff.containers) > 1:
+ desc_list.append(
+ self._DescribeDeltaDict('Build config', diff.before.build_config,
+ diff.after.build_config))
+ for c in diff.containers:
+ name = c.name
+ desc_list.append(('', ))
+ desc_list.append(('Container: <%s>' % name, ))
+ c_before = diff.before.ContainerForName(
+ name, default=models.Container.Empty())
+ c_after = diff.after.ContainerForName(name,
+ default=models.Container.Empty())
+ desc_list.append(
+ self._DescribeDeltaDict('Metadata',
+ c_before.metadata,
+ c_after.metadata,
+ indent=' '))
+ unsummed_sections, summed_sections = c.ClassifySections()
+ desc_list.append(
+ self._DescribeSectionSizes(unsummed_sections,
+ summed_sections,
+ c.section_sizes,
+ indent=' '))
+ else: # Legacy output for single Container case.
+ desc_list.append(
+ self._DescribeDeltaDict('Metadata', diff.before.metadata_legacy,
+ diff.after.metadata_legacy))
+ c = diff.containers[0]
+ unsummed_sections, summed_sections = c.ClassifySections()
+ desc_list.append(
+ self._DescribeSectionSizes(unsummed_sections, summed_sections,
+ c.section_sizes))
+ desc_list.append(('', ))
+ desc_list.append(self.GenerateLines(diff.symbols))
+ return itertools.chain.from_iterable(desc_list)
def _DescribeSizeInfo(self, size_info):
- metadata_desc = itertools.chain(
- ('Metadata:',),
- (' %s' % line for line in DescribeMetadata(size_info.metadata)))
- section_desc = self._DescribeSectionSizes(size_info.section_sizes)
- coverage_desc = ()
+ desc_list = []
+ # Describe |build_config| and each container. If there is only one container
+ # then support legacy output by reporting |build_config| as part of the
+ # first container's metadata.
+ if len(size_info.containers) > 1:
+ desc_list.append(('Build Configs:', ))
+ desc_list.append(' %s' % line
+ for line in DescribeDict(size_info.build_config))
+ containers = size_info.containers
+ else:
+ containers = [
+ models.Container(name='',
+ metadata=size_info.metadata_legacy,
+ section_sizes=size_info.containers[0].section_sizes)
+ ]
+ for c in containers:
+ if c.name:
+ desc_list.append(('', ))
+ desc_list.append(('Container <%s>' % c.name, ))
+ desc_list.append(('Metadata:', ))
+ desc_list.append(' %s' % line for line in DescribeDict(c.metadata))
+ unsummed_sections, summed_sections = c.ClassifySections()
+ desc_list.append(
+ self._DescribeSectionSizes(unsummed_sections, summed_sections,
+ c.section_sizes))
+
if self.verbose:
- coverage_desc = itertools.chain(
- ('',), DescribeSizeInfoCoverage(size_info))
- group_desc = self.GenerateLines(size_info.symbols)
- return itertools.chain(metadata_desc, section_desc, coverage_desc, ('',),
- group_desc)
+ desc_list.append(('', ))
+ desc_list.append(DescribeSizeInfoCoverage(size_info))
+ desc_list.append(('', ))
+ desc_list.append(self.GenerateLines(size_info.symbols))
+ return itertools.chain.from_iterable(desc_list)
-def DescribeSizeInfoCoverage(size_info):
+def _DescribeSizeInfoContainerCoverage(raw_symbols, container):
"""Yields lines describing how accurate |size_info| is."""
for section, section_name in models.SECTION_TO_SECTION_NAME.items():
- expected_size = size_info.section_sizes.get(section_name)
- in_section = size_info.raw_symbols.WhereInSection(section_name)
+ expected_size = container.section_sizes.get(section_name)
+ in_section = raw_symbols.WhereInSection(section_name, container=container)
actual_size = in_section.size
if expected_size is None:
@@ -615,6 +693,18 @@ def DescribeSizeInfoCoverage(size_info):
yield ' B) ' + repr(sym)
+def DescribeSizeInfoCoverage(size_info):
+ for i, container in enumerate(size_info.containers):
+ if i > 0:
+ yield ''
+ if container.name:
+ yield 'Container <%s>' % container.name
+ # TODO(huangs): Change to use "yield from" once linters allow this.
+ for line in _DescribeSizeInfoContainerCoverage(size_info.raw_symbols,
+ container):
+ yield line
+
+
class DescriberCsv(Describer):
def __init__(self, verbose=False):
super(DescriberCsv, self).__init__()
@@ -628,9 +718,11 @@ class DescriberCsv(Describer):
self.csv_writer.writerow(data)
return self.stringio.getvalue().rstrip()
- def _DescribeSectionSizes(self, section_sizes):
- significant_section_names = _GetSectionSizeInfo(section_sizes)[1]
-
+ def _DescribeSectionSizes(self, unsummed_sections, summed_section,
+ section_sizes):
+ _, significant_section_names = _GetSectionSizeInfo(unsummed_sections,
+ summed_section,
+ section_sizes)
if self.verbose:
significant_set = set(significant_section_names)
section_names = sorted(section_sizes.keys())
@@ -645,14 +737,26 @@ class DescriberCsv(Describer):
yield self._RenderCsv([name, size])
def _DescribeDeltaSizeInfo(self, diff):
- section_desc = self._DescribeSectionSizes(diff.section_sizes)
- group_desc = self.GenerateLines(diff.symbols)
- return itertools.chain(section_desc, ('',), group_desc)
+ desc_list = []
+ for c in diff.containers:
+ unsummed_sections, summed_sections = c.ClassifySections()
+ desc_list.append(
+ self._DescribeSectionSizes(unsummed_sections, summed_sections,
+ c.section_sizes))
+ desc_list.append(('', ))
+ desc_list.append(self.GenerateLines(diff.symbols))
+ return itertools.chain.from_iterable(desc_list)
def _DescribeSizeInfo(self, size_info):
- section_desc = self._DescribeSectionSizes(size_info.section_sizes)
- group_desc = self.GenerateLines(size_info.symbols)
- return itertools.chain(section_desc, ('',), group_desc)
+ desc_list = []
+ for c in size_info.containers:
+ unsummed_sections, summed_sections = c.ClassifySections()
+ desc_list.append(
+ self._DescribeSectionSizes(unsummed_sections, summed_sections,
+ c.section_sizes))
+ desc_list.append(('', ))
+ desc_list.append(self.GenerateLines(size_info.symbols))
+ return itertools.chain.from_iterable(desc_list)
def _DescribeDeltaSymbolGroup(self, delta_group):
yield self._RenderSymbolHeader(True);
@@ -724,16 +828,25 @@ def _UtcToLocal(utc):
return utc + offset
-def DescribeMetadata(metadata):
- display_dict = metadata.copy()
- timestamp = display_dict.get(models.METADATA_ELF_MTIME)
- if timestamp:
- timestamp_obj = datetime.datetime.utcfromtimestamp(timestamp)
- display_dict[models.METADATA_ELF_MTIME] = (
- _UtcToLocal(timestamp_obj).strftime('%Y-%m-%d %H:%M:%S'))
- gn_args = display_dict.get(models.METADATA_GN_ARGS)
- if gn_args:
- display_dict[models.METADATA_GN_ARGS] = ' '.join(gn_args)
+def DescribeDict(input_dict):
+ display_dict = {}
+ for k, v in input_dict.items():
+ if k == models.METADATA_ELF_MTIME:
+ timestamp_obj = datetime.datetime.utcfromtimestamp(v)
+ display_dict[k] = (
+ _UtcToLocal(timestamp_obj).strftime('%Y-%m-%d %H:%M:%S'))
+ elif isinstance(v, str):
+ display_dict[k] = v
+ elif isinstance(v, list):
+ if v:
+ if isinstance(v[0], str):
+ display_dict[k] = ' '.join(str(t) for t in v)
+ else:
+ display_dict[k] = repr(v)
+ else:
+ display_dict[k] = ''
+ else:
+ display_dict[k] = repr(v)
return sorted('%s=%s' % t for t in display_dict.items())
diff --git a/chromium/tools/binary_size/libsupersize/diff.py b/chromium/tools/binary_size/libsupersize/diff.py
index 09aafe43e1a..b7035b6474c 100644
--- a/chromium/tools/binary_size/libsupersize/diff.py
+++ b/chromium/tools/binary_size/libsupersize/diff.py
@@ -4,6 +4,7 @@
"""Logic for diffing two SizeInfo objects."""
import collections
+import itertools
import logging
import re
@@ -91,6 +92,8 @@ def _MatchSymbols(before, after, key_func, padding_by_section_name):
def _DiffSymbolGroups(before, after):
# For changed symbols, padding is zeroed out. In order to not lose the
# information entirely, store it in aggregate.
+ # Ignoring Containers, i.e., paddings from sections across Containers are
+ # combined.
padding_by_section_name = collections.defaultdict(int)
# Usually >90% of symbols are exact matches, so all of the time is spent in
@@ -121,20 +124,63 @@ def _DiffSymbolGroups(before, after):
return models.DeltaSymbolGroup(all_deltas)
+def _DiffObj(before_obj, after_obj):
+ """Computes recursive diff of nested plain Python objects.
+
+ Assumes no cyclical links exist.
+ """
+ if before_obj is None:
+ if after_obj is None:
+ return None
+ before_obj = type(after_obj)()
+ elif after_obj is None:
+ after_obj = type(before_obj)()
+ if not isinstance(before_obj, type(after_obj)):
+ return '(type mismatch)'
+ if isinstance(before_obj, dict):
+ keys = set(before_obj.keys()) | set(after_obj.keys())
+ return {k: _DiffObj(before_obj.get(k), after_obj.get(k)) for k in keys}
+ elif isinstance(before_obj, list):
+ return [
+ _DiffObj(b, a) for b, a in itertools.zip_longest(before_obj, after_obj)
+ ]
+ elif isinstance(before_obj, (bool, str)):
+ return '%r -> %r' % (before_obj, after_obj)
+ elif isinstance(before_obj, (int, float, complex)):
+ return after_obj - before_obj
+ return '(unknown type)'
+
+
+def _DiffContainerLists(before_containers, after_containers):
+ """Computes diff of Containers lists, matching names."""
+ # Find ordered unique names, preferring order of |container_after|.
+ pairs = collections.OrderedDict()
+ for c in after_containers:
+ pairs[c.name] = [models.Container.Empty(), c]
+ for c in before_containers:
+ if c.name in pairs:
+ pairs[c.name][0] = c
+ else:
+ pairs[c.name] = [c, models.Container.Empty()]
+ ret = []
+ for name, [before_c, after_c] in pairs.items():
+ ret.append(
+ models.Container(name=name,
+ metadata=_DiffObj(before_c.metadata, after_c.metadata),
+ section_sizes=_DiffObj(before_c.section_sizes,
+ after_c.section_sizes)))
+ # This update newly created diff Containers, not existing ones or EMPTY.
+ models.Container.AssignShortNames(ret)
+ return ret
+
+
def Diff(before, after, sort=False):
"""Diffs two SizeInfo objects. Returns a DeltaSizeInfo."""
assert isinstance(before, models.SizeInfo)
assert isinstance(after, models.SizeInfo)
- section_sizes = {
- k: after.section_sizes.get(k, 0) - v
- for k, v in before.section_sizes.items()
- }
- for k, v in after.section_sizes.items():
- if k not in section_sizes:
- section_sizes[k] = v
-
+ containers_diff = _DiffContainerLists(before.containers, after.containers)
symbol_diff = _DiffSymbolGroups(before.raw_symbols, after.raw_symbols)
- ret = models.DeltaSizeInfo(before, after, section_sizes, symbol_diff)
+ ret = models.DeltaSizeInfo(before, after, containers_diff, symbol_diff)
if sort:
syms = ret.symbols # Triggers clustering.
diff --git a/chromium/tools/binary_size/libsupersize/diff_test.py b/chromium/tools/binary_size/libsupersize/diff_test.py
index 80a4d8996e6..33ec9d851bc 100755
--- a/chromium/tools/binary_size/libsupersize/diff_test.py
+++ b/chromium/tools/binary_size/libsupersize/diff_test.py
@@ -31,7 +31,13 @@ def _SetName(symbol, full_name, name=None):
def _CreateSizeInfo(aliases=None):
+ build_config = {}
+ metadata = {}
section_sizes = {'.text': 100, '.bss': 40}
+ containers = [
+ models.Container(name='', metadata=metadata, section_sizes=section_sizes)
+ ]
+ models.Container.AssignShortNames(containers)
TEXT = models.SECTION_TEXT
symbols = [
_MakeSym(models.SECTION_DEX_METHOD, 10, 'a', 'com.Foo#bar()'),
@@ -41,12 +47,13 @@ def _CreateSizeInfo(aliases=None):
_MakeSym(TEXT, 50, 'b'),
_MakeSym(TEXT, 60, ''),
]
+ # For simplicity, not associating |symbols| with |containers|.
if aliases:
for tup in aliases:
syms = symbols[tup[0]:tup[1]]
for sym in syms:
sym.aliases = syms
- return models.SizeInfo(section_sizes, symbols)
+ return models.SizeInfo(build_config, containers, symbols)
class DiffTest(unittest.TestCase):
@@ -55,27 +62,27 @@ class DiffTest(unittest.TestCase):
size_info1 = _CreateSizeInfo()
size_info2 = _CreateSizeInfo()
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
- self.assertEquals(0, d.raw_symbols.padding)
+ self.assertEqual((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
+ self.assertEqual(0, d.raw_symbols.padding)
def testSimple_Add(self):
size_info1 = _CreateSizeInfo()
size_info2 = _CreateSizeInfo()
size_info1.raw_symbols -= [size_info1.raw_symbols[0]]
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 1, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(10, d.raw_symbols.size)
- self.assertEquals(0, d.raw_symbols.padding)
+ self.assertEqual((0, 1, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(10, d.raw_symbols.size)
+ self.assertEqual(0, d.raw_symbols.padding)
def testSimple_Delete(self):
size_info1 = _CreateSizeInfo()
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols -= [size_info2.raw_symbols[0]]
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 0, 1), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(-10, d.raw_symbols.size)
- self.assertEquals(0, d.raw_symbols.padding)
+ self.assertEqual((0, 0, 1), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(-10, d.raw_symbols.size)
+ self.assertEqual(0, d.raw_symbols.padding)
def testSimple_Change(self):
size_info1 = _CreateSizeInfo()
@@ -84,9 +91,9 @@ class DiffTest(unittest.TestCase):
size_info2.raw_symbols[0].padding += 20
size_info2.raw_symbols[-1].size += 11
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((2, 1, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(22, d.raw_symbols.size)
- self.assertEquals(20, d.raw_symbols.padding)
+ self.assertEqual((2, 1, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(22, d.raw_symbols.size)
+ self.assertEqual(20, d.raw_symbols.padding)
def testDontMatchAcrossSections(self):
size_info1 = _CreateSizeInfo()
@@ -97,33 +104,34 @@ class DiffTest(unittest.TestCase):
size_info2.raw_symbols += [
_MakeSym(models.SECTION_RODATA, 11, 'asdf', name='Hello'),
]
+ # For simplicity, not associating |symbols| with |containers|.
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testAliases_Remove(self):
size_info1 = _CreateSizeInfo(aliases=[(0, 3)])
size_info2 = _CreateSizeInfo(aliases=[(0, 2)])
d = diff.Diff(size_info1, size_info2)
# Aliases cause all sizes to change.
- self.assertEquals((3, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((3, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testAliases_Add(self):
size_info1 = _CreateSizeInfo(aliases=[(0, 2)])
size_info2 = _CreateSizeInfo(aliases=[(0, 3)])
d = diff.Diff(size_info1, size_info2)
# Aliases cause all sizes to change.
- self.assertEquals((3, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((3, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testAliases_ChangeGroup(self):
size_info1 = _CreateSizeInfo(aliases=[(0, 2), (2, 5)])
size_info2 = _CreateSizeInfo(aliases=[(0, 3), (3, 5)])
d = diff.Diff(size_info1, size_info2)
# Aliases cause all sizes to change.
- self.assertEquals((4, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((4, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testStarSymbolNormalization(self):
size_info1 = _CreateSizeInfo()
@@ -131,8 +139,8 @@ class DiffTest(unittest.TestCase):
size_info2 = _CreateSizeInfo()
_SetName(size_info2.raw_symbols[0], '* symbol gap 2 (end of section)')
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testNumberNormalization(self):
TEXT = models.SECTION_TEXT
@@ -150,9 +158,10 @@ class DiffTest(unittest.TestCase):
_MakeSym(TEXT, 33, 'a', name='SingleCategoryPreferences$9#this$009'),
_MakeSym(TEXT, 44, 'a', name='.L.ref.tmp.137'),
]
+ # For simplicity, not associating |symbols| with |containers|.
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testChangedParams(self):
# Ensure that params changes match up so long as path doesn't change.
@@ -163,8 +172,8 @@ class DiffTest(unittest.TestCase):
size_info2.raw_symbols[0].full_name = 'Foo(bool)'
size_info2.raw_symbols[0].name = 'Foo'
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testChangedPaths_Native(self):
# Ensure that non-globally-unique symbols are not matched when path changes.
@@ -172,8 +181,8 @@ class DiffTest(unittest.TestCase):
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols[1].object_path = 'asdf'
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testChangedPaths_StringLiterals(self):
# Ensure that string literals are not matched up.
@@ -183,8 +192,8 @@ class DiffTest(unittest.TestCase):
size_info2.raw_symbols[0].full_name = models.STRING_LITERAL_NAME
size_info2.raw_symbols[0].object_path = 'asdf'
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testChangedPaths_Java(self):
# Ensure that Java symbols are matched up.
@@ -192,8 +201,8 @@ class DiffTest(unittest.TestCase):
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols[0].object_path = 'asdf'
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
+ self.assertEqual((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
def testChangedPaths_ChangedParams(self):
# Ensure that path changes are not matched when params also change.
@@ -205,9 +214,8 @@ class DiffTest(unittest.TestCase):
size_info2.raw_symbols[0].name = 'Foo'
size_info2.raw_symbols[0].object_path = 'asdf'
d = diff.Diff(size_info1, size_info2)
- self.assertEquals((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
- self.assertEquals(0, d.raw_symbols.size)
-
+ self.assertEqual((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
+ self.assertEqual(0, d.raw_symbols.size)
if __name__ == '__main__':
diff --git a/chromium/tools/binary_size/libsupersize/file_format.py b/chromium/tools/binary_size/libsupersize/file_format.py
index 20b5ec11383..a65ac0f2f7b 100644
--- a/chromium/tools/binary_size/libsupersize/file_format.py
+++ b/chromium/tools/binary_size/libsupersize/file_format.py
@@ -5,9 +5,9 @@
"""Deals with loading & saving .size and .sizediff files.
The .size file is written in the following format. There are no section
-delimiters, instead the end of a section is usually determined by a row count
-on the first line of a section, followed by that number of rows. In other
-cases, the sections have a known size.
+delimiters, instead the end of a section is usually determined by a row count on
+the first line of a section, followed by that number of rows. In other cases,
+the sections have a known size.
Header
------
@@ -116,7 +116,8 @@ import parallel
# File format version for .size files.
-_SERIALIZATION_VERSION = 'Size File Format v1'
+_SERIALIZATION_VERSION_SINGLE_CONTAINER = 'Size File Format v1'
+_SERIALIZATION_VERSION_MULTI_CONTAINER = 'Size File Format v1.1'
# Header for .sizediff files
_SIZEDIFF_HEADER = '# Created by //tools/binary_size\nDIFF\n'
@@ -171,16 +172,18 @@ def CalculatePadding(raw_symbols):
# Padding not really required, but it is useful to check for large padding and
# log a warning.
- seen_sections = set()
+ seen_container_and_sections = set()
for i, symbol in enumerate(raw_symbols[1:]):
prev_symbol = raw_symbols[i]
if symbol.IsOverhead():
# Overhead symbols are not actionable so should be padding-only.
symbol.padding = symbol.size
- if prev_symbol.section_name != symbol.section_name:
- assert symbol.section_name not in seen_sections, (
- 'Input symbols must be sorted by section, then address.')
- seen_sections.add(symbol.section_name)
+ if (prev_symbol.container.name != symbol.container.name
+ or prev_symbol.section_name != symbol.section_name):
+ container_and_section = (symbol.container.name, symbol.section_name)
+ assert container_and_section not in seen_container_and_sections, (
+ 'Input symbols must be sorted by container, section, then address.')
+ seen_container_and_sections.add(container_and_section)
continue
if (symbol.address <= 0 or prev_symbol.address <= 0
or not symbol.IsNative() or not prev_symbol.IsNative()):
@@ -246,24 +249,45 @@ def _SaveSizeInfoToFile(size_info,
else:
raw_symbols = size_info.raw_symbols
+ num_containers = len(size_info.containers)
+ has_multi_containers = (num_containers > 1)
+
w = _Writer(file_obj)
- # Created by supersize header
+ # "Created by SuperSize" header
w.WriteLine('# Created by //tools/binary_size')
- w.WriteLine(_SERIALIZATION_VERSION)
+ if has_multi_containers:
+ w.WriteLine(_SERIALIZATION_VERSION_MULTI_CONTAINER)
+ else:
+ w.WriteLine(_SERIALIZATION_VERSION_SINGLE_CONTAINER)
+
# JSON header fields
fields = {
- 'metadata': size_info.metadata,
- 'section_sizes': size_info.section_sizes,
'has_components': True,
'has_padding': include_padding,
}
+
+ if has_multi_containers:
+ # Write using new format.
+ assert len(set(c.name for c in size_info.containers)) == num_containers, (
+ 'Container names must be distinct.')
+ fields['build_config'] = size_info.build_config
+ fields['containers'] = [{
+ 'name': c.name,
+ 'metadata': c.metadata,
+ 'section_sizes': c.section_sizes,
+ } for c in size_info.containers]
+ else:
+ # Write using old format.
+ fields['metadata'] = size_info.metadata_legacy
+ fields['section_sizes'] = size_info.containers[0].section_sizes
+
fields_str = json.dumps(fields, indent=2, sort_keys=True)
w.WriteLine(str(len(fields_str)))
w.WriteLine(fields_str)
w.LogSize('header') # For libchrome: 570 bytes.
- # Store a single copy of all paths and have them referenced by index.
+ # Store a single copy of all paths and reference them by index.
unique_path_tuples = sorted(
set((s.object_path, s.source_path) for s in raw_symbols))
path_tuples = {tup: i for i, tup in enumerate(unique_path_tuples)}
@@ -280,9 +304,18 @@ def _SaveSizeInfoToFile(size_info,
w.WriteLine(comp)
w.LogSize('components')
- # Symbol counts by section.
- symbol_group_by_section = raw_symbols.GroupedBySectionName()
- w.WriteLine('\t'.join(g.name for g in symbol_group_by_section))
+ # Symbol counts by container and section.
+ symbol_group_by_section = raw_symbols.GroupedByContainerAndSectionName()
+ if has_multi_containers:
+ container_name_to_index = {
+ c.name: i
+ for i, c in enumerate(size_info.containers)
+ }
+ w.WriteLine('\t'.join('<%d>%s' %
+ (container_name_to_index[g.name[0]], g.name[1])
+ for g in symbol_group_by_section))
+ else:
+ w.WriteLine('\t'.join(g.name[1] for g in symbol_group_by_section))
w.WriteLine('\t'.join(str(len(g)) for g in symbol_group_by_section))
def gen_delta(gen, prev_value=0):
@@ -371,17 +404,49 @@ def _LoadSizeInfoFromFile(file_obj, size_path):
"""
# Split lines on '\n', since '\r' can appear in some lines!
lines = io.TextIOWrapper(file_obj, newline='\n')
- _ReadLine(lines) # Line 0: Created by supersize header
+ _ReadLine(lines) # Line 0: "Created by SuperSize" header
actual_version = _ReadLine(lines)
- assert actual_version == _SERIALIZATION_VERSION, (
- 'Version mismatch. Need to write some upgrade code.')
+ if actual_version == _SERIALIZATION_VERSION_SINGLE_CONTAINER:
+ has_multi_containers = False
+ elif actual_version == _SERIALIZATION_VERSION_MULTI_CONTAINER:
+ has_multi_containers = True
+ else:
+ raise ValueError('Version mismatch. Need to write some upgrade code.')
+
# JSON header fields
json_len = int(_ReadLine(lines))
json_str = lines.read(json_len)
fields = json.loads(json_str)
- section_sizes = fields['section_sizes']
- metadata = fields.get('metadata')
+ assert ('containers' in fields) == has_multi_containers
+ assert ('build_config' in fields) == has_multi_containers
+ assert ('containers' in fields) == has_multi_containers
+ assert ('metadata' not in fields) == has_multi_containers
+ assert ('section_sizes' not in fields) == has_multi_containers
+
+ containers = []
+ if has_multi_containers: # New format.
+ build_config = fields['build_config']
+ for cfield in fields['containers']:
+ c = models.Container(name=cfield['name'],
+ metadata=cfield['metadata'],
+ section_sizes=cfield['section_sizes'])
+ containers.append(c)
+ else: # Old format.
+ build_config = {}
+ metadata = fields.get('metadata')
+ if metadata:
+ for key in models.BUILD_CONFIG_KEYS:
+ if key in metadata:
+ build_config[key] = metadata[key]
+ del metadata[key]
+ section_sizes = fields['section_sizes']
+ containers.append(
+ models.Container(name='',
+ metadata=metadata,
+ section_sizes=section_sizes))
+ models.Container.AssignShortNames(containers)
+
has_components = fields.get('has_components', False)
has_padding = fields.get('has_padding', False)
@@ -389,7 +454,7 @@ def _LoadSizeInfoFromFile(file_obj, size_path):
_ReadLine(lines)
# Path list
- num_path_tuples = int(_ReadLine(lines)) # Line 4 - number of paths in list
+ num_path_tuples = int(_ReadLine(lines)) # Number of paths in list
# Read the path list values and store for later
path_tuples = [
_ReadValuesFromLine(lines, split='\t') for _ in range(num_path_tuples)
@@ -401,8 +466,8 @@ def _LoadSizeInfoFromFile(file_obj, size_path):
components = [_ReadLine(lines) for _ in range(num_components)]
# Symbol counts by section.
- section_names = _ReadValuesFromLine(lines, split='\t')
- section_counts = [int(c) for c in _ReadValuesFromLine(lines, split='\t')]
+ container_and_section_names = _ReadValuesFromLine(lines, split='\t')
+ symbol_counts = [int(c) for c in _ReadValuesFromLine(lines, split='\t')]
# Addresses, sizes, paddings, path indices, component indices
def read_numeric(delta=False):
@@ -414,7 +479,7 @@ def _LoadSizeInfoFromFile(file_obj, size_path):
"""
ret = []
delta_multiplier = int(delta)
- for _ in section_counts:
+ for _ in symbol_counts:
value = 0
fields = []
for f in _ReadValuesFromLine(lines, split=' '):
@@ -428,21 +493,31 @@ def _LoadSizeInfoFromFile(file_obj, size_path):
if has_padding:
paddings = read_numeric(delta=False)
else:
- paddings = [None] * len(section_names)
+ paddings = [None] * len(container_and_section_names)
path_indices = read_numeric(delta=True)
if has_components:
component_indices = read_numeric(delta=True)
else:
- component_indices = [None] * len(section_names)
+ component_indices = [None] * len(container_and_section_names)
- raw_symbols = [None] * sum(section_counts)
+ raw_symbols = [None] * sum(symbol_counts)
symbol_idx = 0
- for (cur_section_name, cur_section_count, cur_addresses, cur_sizes,
- cur_paddings, cur_path_indices, cur_component_indices) in zip(
- section_names, section_counts, addresses, sizes, paddings,
- path_indices, component_indices):
+ for (cur_container_and_section_name, cur_symbol_count, cur_addresses,
+ cur_sizes, cur_paddings, cur_path_indices,
+ cur_component_indices) in zip(container_and_section_names, symbol_counts,
+ addresses, sizes, paddings, path_indices,
+ component_indices):
+ if has_multi_containers:
+ # Extract '<cur_container_idx_str>cur_section_name'.
+ assert cur_container_and_section_name.startswith('<')
+ cur_container_idx_str, cur_section_name = (
+ cur_container_and_section_name[1:].split('>', 1))
+ cur_container = containers[int(cur_container_idx_str)]
+ else:
+ cur_section_name = cur_container_and_section_name
+ cur_container = containers[0]
alias_counter = 0
- for i in range(cur_section_count):
+ for i in range(cur_symbol_count):
parts = _ReadValuesFromLine(lines, split='\t')
full_name = parts[0]
flags_part = None
@@ -469,6 +544,7 @@ def _LoadSizeInfoFromFile(file_obj, size_path):
# Skip the constructor to avoid default value checks
new_sym = models.Symbol.__new__(models.Symbol)
+ new_sym.container = cur_container
new_sym.section_name = cur_section_name
new_sym.full_name = full_name
new_sym.address = cur_addresses[i]
@@ -505,7 +581,9 @@ def _LoadSizeInfoFromFile(file_obj, size_path):
if not has_padding:
CalculatePadding(raw_symbols)
- return models.SizeInfo(section_sizes, raw_symbols, metadata=metadata,
+ return models.SizeInfo(build_config,
+ containers,
+ raw_symbols,
size_path=size_path)
diff --git a/chromium/tools/binary_size/libsupersize/html_report.py b/chromium/tools/binary_size/libsupersize/html_report.py
index 72d9e27a119..bad1c8367cd 100644
--- a/chromium/tools/binary_size/libsupersize/html_report.py
+++ b/chromium/tools/binary_size/libsupersize/html_report.py
@@ -212,18 +212,20 @@ def BuildReportFromSizeInfo(out_path, size_info, all_symbols=False):
symbols = symbols.WhereDiffStatusIs(models.DIFF_STATUS_UNCHANGED).Inverted()
meta, tree_nodes = _MakeTreeViewList(symbols, all_symbols)
+ assert len(size_info.containers) == 1
+ c = size_info.containers[0]
logging.info('Created %d tree nodes', len(tree_nodes))
meta.update({
- 'diff_mode': is_diff,
- 'section_sizes': size_info.section_sizes,
+ 'diff_mode': is_diff,
+ 'section_sizes': c.section_sizes,
})
if is_diff:
meta.update({
- 'before_metadata': size_info.before.metadata,
- 'after_metadata': size_info.after.metadata,
+ 'before_metadata': size_info.before.metadata_legacy,
+ 'after_metadata': size_info.after.metadata_legacy,
})
else:
- meta['metadata'] = size_info.metadata
+ meta['metadata'] = size_info.metadata_legacy
# Write newline-delimited JSON file
logging.info('Serializing JSON')
diff --git a/chromium/tools/binary_size/libsupersize/integration_test.py b/chromium/tools/binary_size/libsupersize/integration_test.py
index 805ea14945a..bd47604cf37 100755
--- a/chromium/tools/binary_size/libsupersize/integration_test.py
+++ b/chromium/tools/binary_size/libsupersize/integration_test.py
@@ -170,14 +170,19 @@ class IntegrationTest(unittest.TestCase):
'source_directory': _TEST_SOURCE_DIR,
})
- def _CloneSizeInfo(self, use_output_directory=True, use_elf=True,
- use_apk=False, use_minimal_apks=False, use_pak=False):
+ def _CloneSizeInfo(self,
+ use_output_directory=True,
+ use_elf=False,
+ use_apk=False,
+ use_minimal_apks=False,
+ use_pak=False,
+ use_aux_elf=False):
assert not use_elf or use_output_directory
assert not (use_apk and use_pak)
- cache_key = (
- use_output_directory, use_elf, use_apk, use_minimal_apks, use_pak)
+ cache_key = (use_output_directory, use_elf, use_apk, use_minimal_apks,
+ use_pak, use_aux_elf)
if cache_key not in IntegrationTest.cached_size_info:
- elf_path = _TEST_ELF_PATH if use_elf else None
+ elf_path = _TEST_ELF_PATH if use_elf or use_aux_elf else None
output_directory = _TEST_OUTPUT_DIR if use_output_directory else None
knobs = archive.SectionSizeKnobs()
opts = archive.ContainerArchiveOptions(self._CreateTestArgs())
@@ -210,34 +215,38 @@ class IntegrationTest(unittest.TestCase):
pak_info_file = _TEST_PAK_INFO_PATH
linker_name = 'gold'
with _AddMocksToPath():
+ build_config = {}
metadata = archive.CreateMetadata(_TEST_MAP_PATH, elf_path, apk_path,
minimal_apks_path, _TEST_TOOL_PREFIX,
- output_directory, linker_name)
- section_sizes, raw_symbols = archive.CreateSectionSizesAndSymbols(
+ output_directory, linker_name,
+ build_config)
+ container, raw_symbols = archive.CreateContainerAndSymbols(
knobs=knobs,
opts=opts,
+ container_name='',
+ metadata=metadata,
map_path=_TEST_MAP_PATH,
tool_prefix=_TEST_TOOL_PREFIX,
- elf_path=elf_path,
output_directory=output_directory,
+ elf_path=elf_path,
apk_path=apk_path or extracted_minimal_apk_path,
apk_so_path=apk_so_path,
- metadata=metadata,
pak_files=pak_files,
pak_info_file=pak_info_file,
linker_name=linker_name,
size_info_prefix=size_info_prefix)
IntegrationTest.cached_size_info[cache_key] = archive.CreateSizeInfo(
- [section_sizes], [raw_symbols], [metadata])
+ build_config, [container], [raw_symbols])
return copy.deepcopy(IntegrationTest.cached_size_info[cache_key])
def _DoArchive(self,
archive_path,
use_output_directory=True,
- use_elf=True,
+ use_elf=False,
use_apk=False,
use_minimal_apks=False,
use_pak=False,
+ use_aux_elf=None,
debug_measures=False,
include_padding=False):
args = [
@@ -255,42 +264,46 @@ class IntegrationTest(unittest.TestCase):
args += ['-f', _TEST_APK_PATH]
elif use_minimal_apks:
args += ['-f', _TEST_MINIMAL_APKS_PATH]
- if use_elf:
- if use_apk or use_minimal_apks:
- args += ['--elf-file', _TEST_ELF_PATH]
- else:
- args += ['-f', _TEST_ELF_PATH]
+ elif use_elf:
+ args += ['-f', _TEST_ELF_PATH]
if use_pak:
args += ['--pak-file', _TEST_APK_LOCALE_PAK_PATH,
'--pak-file', _TEST_APK_PAK_PATH,
'--pak-info-file', _TEST_PAK_INFO_PATH]
+ if use_aux_elf:
+ args += ['--aux-elf-file', _TEST_ELF_PATH]
if include_padding:
args += ['--include-padding']
_RunApp('archive', args, debug_measures=debug_measures)
def _DoArchiveTest(self,
use_output_directory=True,
- use_elf=True,
+ use_elf=False,
use_apk=False,
use_minimal_apks=False,
use_pak=False,
+ use_aux_elf=False,
debug_measures=False,
include_padding=False):
with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
- self._DoArchive(
- temp_file.name,
- use_output_directory=use_output_directory,
- use_elf=use_elf,
- use_apk=use_apk,
- use_minimal_apks=use_minimal_apks,
- use_pak=use_pak,
- debug_measures=debug_measures,
- include_padding=include_padding)
+ self._DoArchive(temp_file.name,
+ use_output_directory=use_output_directory,
+ use_elf=use_elf,
+ use_apk=use_apk,
+ use_minimal_apks=use_minimal_apks,
+ use_pak=use_pak,
+ use_aux_elf=use_aux_elf,
+ debug_measures=debug_measures,
+ include_padding=include_padding)
size_info = archive.LoadAndPostProcessSizeInfo(temp_file.name)
# Check that saving & loading is the same as directly parsing.
expected_size_info = self._CloneSizeInfo(
- use_output_directory=use_output_directory, use_elf=use_elf,
- use_apk=use_apk, use_minimal_apks=use_minimal_apks, use_pak=use_pak)
+ use_output_directory=use_output_directory,
+ use_elf=use_elf,
+ use_apk=use_apk,
+ use_minimal_apks=use_minimal_apks,
+ use_pak=use_pak,
+ use_aux_elf=use_aux_elf)
self.assertEqual(expected_size_info.metadata, size_info.metadata)
# Don't cluster.
expected_size_info.symbols = expected_size_info.raw_symbols
@@ -301,11 +314,15 @@ class IntegrationTest(unittest.TestCase):
sym_strs = (repr(sym) for sym in size_info.symbols)
stats = describe.DescribeSizeInfoCoverage(size_info)
- if size_info.metadata:
- metadata = describe.DescribeMetadata(size_info.metadata)
+ if len(size_info.containers) == 1:
+ # If there's only one container, merge the its metadata into build_config.
+ merged_data_desc = describe.DescribeDict(size_info.metadata_legacy)
+ return itertools.chain(merged_data_desc, stats, sym_strs)
else:
- metadata = []
- return itertools.chain(metadata, stats, sym_strs)
+ build_config = describe.DescribeDict(size_info.build_config)
+ metadata = itertools.chain.from_iterable(
+ describe.DescribeDict(c.metadata) for c in size_info.containers)
+ return itertools.chain(build_config, metadata, stats, sym_strs)
@_CompareWithGolden()
def test_Archive(self):
@@ -313,38 +330,38 @@ class IntegrationTest(unittest.TestCase):
@_CompareWithGolden()
def test_Archive_OutputDirectory(self):
- return self._DoArchiveTest(use_elf=False)
+ return self._DoArchiveTest()
@_CompareWithGolden()
def test_Archive_Elf(self):
- return self._DoArchiveTest()
+ return self._DoArchiveTest(use_elf=True)
@_CompareWithGolden()
def test_Archive_Apk(self):
- return self._DoArchiveTest(use_apk=True)
+ return self._DoArchiveTest(use_apk=True, use_aux_elf=True)
@_CompareWithGolden()
def test_Archive_MinimalApks(self):
- return self._DoArchiveTest(use_minimal_apks=True)
+ return self._DoArchiveTest(use_minimal_apks=True, use_aux_elf=True)
@_CompareWithGolden()
def test_Archive_Pak_Files(self):
- return self._DoArchiveTest(use_pak=True)
+ return self._DoArchiveTest(use_pak=True, use_aux_elf=True)
@_CompareWithGolden(name='Archive_Elf')
def test_Archive_Elf_DebugMeasures(self):
- return self._DoArchiveTest(debug_measures=True)
+ return self._DoArchiveTest(use_elf=True, debug_measures=True)
@_CompareWithGolden(name='Archive')
def test_ArchiveSparse(self):
- return self._DoArchiveTest(
- use_output_directory=False, use_elf=False, include_padding=True)
+ return self._DoArchiveTest(use_output_directory=False, include_padding=True)
@_CompareWithGolden()
def test_Console(self):
with tempfile.NamedTemporaryFile(suffix='.size') as size_file, \
tempfile.NamedTemporaryFile(suffix='.txt') as output_file:
- file_format.SaveSizeInfo(self._CloneSizeInfo(), size_file.name)
+ file_format.SaveSizeInfo(self._CloneSizeInfo(use_elf=True),
+ size_file.name)
query = [
'ShowExamples()',
'ExpandRegex("_foo_")',
@@ -366,7 +383,8 @@ class IntegrationTest(unittest.TestCase):
def test_Csv(self):
with tempfile.NamedTemporaryFile(suffix='.size') as size_file, \
tempfile.NamedTemporaryFile(suffix='.txt') as output_file:
- file_format.SaveSizeInfo(self._CloneSizeInfo(), size_file.name)
+ file_format.SaveSizeInfo(self._CloneSizeInfo(use_elf=True),
+ size_file.name)
query = [
'Csv(size_info, to_file=%r)' % output_file.name,
]
@@ -378,7 +396,8 @@ class IntegrationTest(unittest.TestCase):
@_CompareWithGolden()
def test_Diff_NullDiff(self):
with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
- file_format.SaveSizeInfo(self._CloneSizeInfo(), temp_file.name)
+ file_format.SaveSizeInfo(self._CloneSizeInfo(use_elf=True),
+ temp_file.name)
return _RunApp('diff', [temp_file.name, temp_file.name])
# Runs archive 3 times, and asserts the contents are the same each time.
@@ -393,10 +412,13 @@ class IntegrationTest(unittest.TestCase):
@_CompareWithGolden()
def test_Diff_Basic(self):
- size_info1 = self._CloneSizeInfo(use_elf=False, use_pak=True)
- size_info2 = self._CloneSizeInfo(use_elf=False, use_pak=True)
- size_info1.metadata = {"foo": 1, "bar": [1,2,3], "baz": "yes"}
- size_info2.metadata = {"foo": 1, "bar": [1,3], "baz": "yes"}
+ size_info1 = self._CloneSizeInfo(use_pak=True)
+ size_info2 = self._CloneSizeInfo(use_pak=True)
+ size_info2.build_config['git_revision'] = 'xyz789'
+ container1 = size_info1.containers[0]
+ container2 = size_info2.containers[0]
+ container1.metadata = {"foo": 1, "bar": [1, 2, 3], "baz": "yes"}
+ container2.metadata = {"foo": 1, "bar": [1, 3], "baz": "yes"}
size_info1.raw_symbols -= size_info1.raw_symbols[:2]
size_info2.raw_symbols -= size_info2.raw_symbols[-3:]
@@ -434,7 +456,7 @@ class IntegrationTest(unittest.TestCase):
@_CompareWithGolden()
def test_FullDescription(self):
- size_info = self._CloneSizeInfo()
+ size_info = self._CloneSizeInfo(use_elf=True)
# Show both clustered and non-clustered so that they can be compared.
size_info.symbols = size_info.raw_symbols
return itertools.chain(
@@ -445,7 +467,7 @@ class IntegrationTest(unittest.TestCase):
@_CompareWithGolden()
def test_SymbolGroupMethods(self):
- all_syms = self._CloneSizeInfo().symbols
+ all_syms = self._CloneSizeInfo(use_elf=True).symbols
global_syms = all_syms.WhereNameMatches('GLOBAL')
# Tests Filter(), Inverted(), and __sub__().
non_global_syms = global_syms.Inverted()
diff --git a/chromium/tools/binary_size/libsupersize/models.py b/chromium/tools/binary_size/libsupersize/models.py
index 1edbb39f311..dae0f808b83 100644
--- a/chromium/tools/binary_size/libsupersize/models.py
+++ b/chromium/tools/binary_size/libsupersize/models.py
@@ -22,6 +22,7 @@ Description of common properties:
Never None, but will be '' for anonymous symbols.
* is_anonymous: True when the symbol exists in an anonymous namespace (which
are removed from both full_name and name during normalization).
+ * container: A (shared) Container instance.
* section_name: E.g. ".text", ".rodata", ".data.rel.local"
* section: The second character of |section_name|. E.g. "t", "r", "d".
* component: The team that owns this feature.
@@ -37,7 +38,18 @@ import re
import match_util
-METADATA_GIT_REVISION = 'git_revision'
+BUILD_CONFIG_GIT_REVISION = 'git_revision'
+BUILD_CONFIG_GN_ARGS = 'gn_args'
+BUILD_CONFIG_LINKER_NAME = 'linker_name'
+BUILD_CONFIG_TOOL_PREFIX = 'tool_prefix' # Path relative to SRC_ROOT.
+
+BUILD_CONFIG_KEYS = (
+ BUILD_CONFIG_GIT_REVISION,
+ BUILD_CONFIG_GN_ARGS,
+ BUILD_CONFIG_LINKER_NAME,
+ BUILD_CONFIG_TOOL_PREFIX,
+)
+
METADATA_APK_FILENAME = 'apk_file_name' # Path relative to output_directory.
METADATA_APK_SIZE = 'apk_size' # File size of apk in bytes.
METADATA_MAP_FILENAME = 'map_file_name' # Path relative to output_directory.
@@ -46,9 +58,6 @@ METADATA_ELF_FILENAME = 'elf_file_name' # Path relative to output_directory.
METADATA_ELF_MTIME = 'elf_mtime' # int timestamp in utc.
METADATA_ELF_BUILD_ID = 'elf_build_id'
METADATA_ELF_RELOCATIONS_COUNT = 'elf_relocations_count'
-METADATA_GN_ARGS = 'gn_args'
-METADATA_LINKER_NAME = 'linker_name'
-METADATA_TOOL_PREFIX = 'tool_prefix' # Path relative to SRC_ROOT.
# New sections should also be added to the SuperSize UI.
SECTION_BSS = '.bss'
@@ -93,6 +102,8 @@ PAK_SECTIONS = (
SECTION_PAK_TRANSLATIONS,
)
+CONTAINER_MULTIPLE = '*'
+
SECTION_NAME_TO_SECTION = {
SECTION_BSS: 'b',
SECTION_BSS_REL_RO: 'b',
@@ -170,35 +181,108 @@ DIFF_COUNT_DELTA = [0, 0, 1, -1]
STRING_LITERAL_NAME = 'string literal'
+
+def ClassifySections(section_names):
+ """Returns section names subsets classified by contribution to binary size.
+
+ Args:
+ section_names: A list of existing sections names.
+
+ Returns:
+ Tuple (unsummed_sections, summed_sections). |unsummed_sections| are sections
+ that don't contribute to binary size. |summed_sections| are sections that
+ *explicitly* contribute to binary size. What's excluded are sections that
+ *implicitly* contribute to binary size -- these get lumped into the .other
+ section.
+ """
+ unsummed_sections = set(name for name in section_names
+ if name in BSS_SECTIONS or '(' in name)
+ summed_sections = (set(section_names)
+ & set(SECTION_NAME_TO_SECTION.keys()) - unsummed_sections)
+ return frozenset(unsummed_sections), frozenset(summed_sections)
+
+
+class Container(object):
+ """Info for a single SuperSize input file (e.g., APK file).
+
+ Fields:
+ name: Container name. Must be unique among containers, and can be ''.
+ short_name: Short container name for compact display. This, also needs to be
+ unique among containers in the same SizeInfo, and can be ''.
+ metadata: A dict.
+ section_sizes: A dict of section_name -> size.
+ classified_sections: Cache for ClassifySections().
+ """
+ __slots__ = (
+ 'name',
+ 'short_name',
+ 'metadata',
+ 'section_sizes',
+ '_classified_sections',
+ )
+
+ def __init__(self, name, metadata, section_sizes):
+ # name == '' hints that only one container exists, and there's no need to
+ # distinguish them. This can affect console output.
+ self.name = name
+ self.short_name = None # Assigned by AssignShortNames().
+ self.metadata = metadata or {}
+ self.section_sizes = section_sizes # E.g. {SECTION_TEXT: 0}
+ self._classified_sections = None
+
+ @staticmethod
+ def AssignShortNames(containers):
+ for i, c in enumerate(containers):
+ c.short_name = str(i) if c.name else ''
+
+ def ClassifySections(self):
+ if not self._classified_sections:
+ self._classified_sections = ClassifySections(self.section_sizes.keys())
+ return self._classified_sections
+
+ @staticmethod
+ def Empty():
+ """Returns a placeholder Container that should be read-only.
+
+ For simplicity, we're not enforcing read-only checks (frozenmap does not
+ exist, unfortunately). Creating a new instance instead of using a global
+ singleton for robustness.
+ """
+ return Container(name='(empty)', metadata={}, section_sizes={})
+
+
class BaseSizeInfo(object):
"""Base class for SizeInfo and DeltaSizeInfo.
Fields:
- section_sizes: A dict of section_name -> size.
+ build_config: A dict of build configurations.
+ containers: A list of Containers.
raw_symbols: A SymbolGroup containing all top-level symbols (no groups).
symbols: A SymbolGroup of all symbols, where symbols have been
grouped by full_name (where applicable). May be re-assigned when it is
- desirable to show custom groupings while still printing metadata and
- section_sizes.
+ desirable to show custom groupings while still printing containers.
native_symbols: Subset of |symbols| that are from native code.
pak_symbols: Subset of |symbols| that are from pak files.
"""
__slots__ = (
- 'section_sizes',
+ 'build_config',
+ 'containers',
'raw_symbols',
'_symbols',
'_native_symbols',
'_pak_symbols',
)
- def __init__(self, section_sizes, raw_symbols, symbols=None):
+ def __init__(self, build_config, containers, raw_symbols, symbols=None):
if isinstance(raw_symbols, list):
raw_symbols = SymbolGroup(raw_symbols)
- self.section_sizes = section_sizes # E.g. {SECTION_TEXT: 0}
+ self.build_config = build_config
+ self.containers = containers
self.raw_symbols = raw_symbols
self._symbols = symbols
self._native_symbols = None
self._pak_symbols = None
+ Container.AssignShortNames(self.containers)
@property
def symbols(self):
@@ -226,25 +310,53 @@ class BaseSizeInfo(object):
self._pak_symbols = self.raw_symbols.WhereIsPak()
return self._pak_symbols
+ @property
+ def all_section_sizes(self):
+ return [c.section_sizes for c in self.containers]
+
+ @property
+ def metadata(self):
+ return [c.metadata for c in self.containers]
+
+ def ContainerForName(self, name, default=None):
+ return next((c for c in self.containers if c.name == name), default)
+
class SizeInfo(BaseSizeInfo):
"""Represents all size information for a single binary.
Fields:
- metadata: A dict.
size_path: Path to .size file this was loaded from (or None).
"""
__slots__ = (
- 'metadata',
'size_path',
)
- def __init__(self, section_sizes, raw_symbols, metadata=None, symbols=None,
+ def __init__(self,
+ build_config,
+ containers,
+ raw_symbols,
+ symbols=None,
size_path=None):
- super(SizeInfo, self).__init__(section_sizes, raw_symbols, symbols=symbols)
- self.metadata = metadata or {}
+ super(SizeInfo, self).__init__(build_config,
+ containers,
+ raw_symbols,
+ symbols=symbols)
self.size_path = size_path
+ @property
+ def metadata_legacy(self):
+ """Return |container[0].metadata| fused with |build_config|.
+
+ Supported only if there is one Container.
+ """
+ assert len(self.containers) == 1
+ metadata = self.containers[0].metadata.copy()
+ for k, v in self.build_config.items():
+ assert k not in metadata
+ metadata[k] = v
+ return metadata
+
class DeltaSizeInfo(BaseSizeInfo):
"""What you get when you Diff() two SizeInfo objects.
@@ -258,8 +370,8 @@ class DeltaSizeInfo(BaseSizeInfo):
'after',
)
- def __init__(self, before, after, section_sizes, raw_symbols):
- super(DeltaSizeInfo, self).__init__(section_sizes, raw_symbols)
+ def __init__(self, before, after, containers, raw_symbols):
+ super(DeltaSizeInfo, self).__init__(None, containers, raw_symbols)
self.before = before
self.after = after
@@ -272,6 +384,14 @@ class BaseSymbol(object):
__slots__ = ()
@property
+ def container_name(self):
+ return self.container.name if self.container else ''
+
+ @property
+ def container_short_name(self):
+ return self.container.short_name if self.container else ''
+
+ @property
def section(self):
"""Returns the one-letter section."""
return SECTION_NAME_TO_SECTION[self.section_name]
@@ -392,6 +512,7 @@ class Symbol(BaseSymbol):
'object_path',
'aliases',
'padding',
+ 'container',
'section_name',
'source_path',
'size',
@@ -420,16 +541,21 @@ class Symbol(BaseSymbol):
self.flags = flags
self.aliases = aliases
self.padding = 0
+ self.container = None
self.component = ''
def __repr__(self):
- template = ('{}@{:x}(size_without_padding={},padding={},full_name={},'
+ if self.container and self.container.name:
+ container_str = '<{}>'.format(self.container.name)
+ else:
+ container_str = ''
+ template = ('{}{}@{:x}(size_without_padding={},padding={},full_name={},'
'object_path={},source_path={},flags={},num_aliases={},'
'component={})')
- return template.format(
- self.section_name, self.address, self.size_without_padding,
- self.padding, self.full_name, self.object_path, self.source_path,
- self.FlagsString(), self.num_aliases, self.component)
+ return template.format(container_str, self.section_name, self.address,
+ self.size_without_padding, self.padding,
+ self.full_name, self.object_path, self.source_path,
+ self.FlagsString(), self.num_aliases, self.component)
def SetName(self, full_name, template_name=None, name=None):
# Note that _NormalizeNames() will clobber these values.
@@ -468,13 +594,24 @@ class DeltaSymbol(BaseSymbol):
self.after_symbol = after_symbol
def __repr__(self):
- template = ('{}{}@{:x}(size_without_padding={},padding={},full_name={},'
+ before_container_name = (self.before_symbol.container_name
+ if self.before_symbol else None)
+ after_container_name = (self.after_symbol.container_name
+ if self.after_symbol else None)
+ if after_container_name:
+ if before_container_name != after_container_name:
+ container_str = '<~{}>'.format(after_container_name)
+ else:
+ container_str = '<{}>'.format(after_container_name)
+ else: # None or ''.
+ container_str = ''
+ template = ('{}{}{}@{:x}(size_without_padding={},padding={},full_name={},'
'object_path={},source_path={},flags={})')
- return template.format(
- DIFF_PREFIX_BY_STATUS[self.diff_status], self.section_name,
- self.address, self.size_without_padding, self.padding,
- self.full_name, self.object_path, self.source_path,
- self.FlagsString())
+ return template.format(DIFF_PREFIX_BY_STATUS[self.diff_status],
+ container_str, self.section_name, self.address,
+ self.size_without_padding, self.padding,
+ self.full_name, self.object_path, self.source_path,
+ self.FlagsString())
def IsDelta(self):
return True
@@ -531,6 +668,10 @@ class DeltaSymbol(BaseSymbol):
return None
@property
+ def container(self):
+ return (self.after_symbol or self.before_symbol).container
+
+ @property
def section_name(self):
return (self.after_symbol or self.before_symbol).section_name
@@ -617,6 +758,7 @@ class SymbolGroup(BaseSymbol):
'is_default_sorted', # True for groups created by Sorted()
)
+
# template_name and full_name are useful when clustering symbol clones.
def __init__(self,
symbols,
@@ -626,6 +768,7 @@ class SymbolGroup(BaseSymbol):
name='',
section_name=None,
is_default_sorted=False):
+ assert isinstance(symbols, list) # Rejects non-reusable generators.
self._padding = None
self._size = None
self._pss = None
@@ -681,6 +824,20 @@ class SymbolGroup(BaseSymbol):
return self._symbols.index(item)
@property
+ def container_name(self):
+ ret = set(s.container_name for s in self._symbols)
+ if ret:
+ return CONTAINER_MULTIPLE if len(ret) > 1 else (ret.pop() or '')
+ return ''
+
+ @property
+ def container_short_name(self):
+ ret = set(s.container_short_name for s in self._symbols)
+ if ret:
+ return CONTAINER_MULTIPLE if len(ret) > 1 else (ret.pop() or '')
+ return ''
+
+ @property
def address(self):
first = self._symbols[0].address if self else 0
return first if all(s.address == first for s in self._symbols) else 0
@@ -744,16 +901,20 @@ class SymbolGroup(BaseSymbol):
self.template_name = full_name if template_name is None else template_name
self.name = full_name if name is None else name
- def IterUniqueSymbols(self):
- """Yields all symbols, but only one from each alias group."""
+ @staticmethod
+ def _IterUnique(symbol_iter):
seen_aliases_lists = set()
- for s in self:
+ for s in symbol_iter:
if not s.aliases:
yield s
elif id(s.aliases) not in seen_aliases_lists:
seen_aliases_lists.add(id(s.aliases))
yield s
+ def IterUniqueSymbols(self):
+ """Yields all symbols, but only one from each alias group."""
+ return SymbolGroup._IterUnique(self)
+
def IterLeafSymbols(self):
"""Yields all symbols, recursing into subgroups."""
for s in self:
@@ -775,14 +936,13 @@ class SymbolGroup(BaseSymbol):
is_default_sorted = self.is_default_sorted
if section_name is None:
section_name = self.section_name
- return self.__class__(
- symbols,
- filtered_symbols=filtered_symbols,
- full_name=full_name,
- template_name=template_name,
- name=name,
- section_name=section_name,
- is_default_sorted=is_default_sorted)
+ return self.__class__(symbols,
+ filtered_symbols=filtered_symbols,
+ full_name=full_name,
+ template_name=template_name,
+ name=name,
+ section_name=section_name,
+ is_default_sorted=is_default_sorted)
def Sorted(self, cmp_func=None, key=None, reverse=False):
"""Sorts by abs(PSS)."""
@@ -832,13 +992,23 @@ class SymbolGroup(BaseSymbol):
def WherePssBiggerThan(self, min_pss):
return self.Filter(lambda s: s.pss >= min_pss)
- def WhereInSection(self, section):
+ def WhereInSection(self, section, container=None):
"""|section| can be section_name ('.bss'), or section chars ('bdr')."""
if section.startswith('.'):
- ret = self.Filter(lambda s: s.section_name == section)
+ if container:
+ short_name = container.short_name
+ ret = self.Filter(lambda s: (s.container.short_name == short_name and s.
+ section_name == section))
+ else:
+ ret = self.Filter(lambda s: s.section_name == section)
ret.section_name = section
else:
- ret = self.Filter(lambda s: s.section in section)
+ if container:
+ short_name = container.short_name
+ ret = self.Filter(lambda s: (s.container.short_name == short_name and s.
+ section in section))
+ else:
+ ret = self.Filter(lambda s: s.section in section)
if section in SECTION_TO_SECTION_NAME:
ret.section_name = SECTION_TO_SECTION_NAME[section]
return ret
@@ -937,10 +1107,9 @@ class SymbolGroup(BaseSymbol):
# Symbols within third_party that do not contain the string "foo".
symbols.WherePathMatches(r'third_party').WhereMatches('foo').Inverted()
"""
- return self._CreateTransformed(
- self._filtered_symbols,
- filtered_symbols=self._symbols,
- section_name=SECTION_MULTIPLE)
+ return self._CreateTransformed(self._filtered_symbols,
+ filtered_symbols=self._symbols,
+ section_name=SECTION_MULTIPLE)
def GroupedBy(self, func, min_count=0, group_factory=None):
"""Returns a SymbolGroup of SymbolGroups, indexed by |func|.
@@ -1042,18 +1211,16 @@ class SymbolGroup(BaseSymbol):
full_name = token[1]
sym = symbols[0]
if token[1].startswith('*'):
- return self._CreateTransformed(
- symbols,
- full_name=full_name,
- template_name=full_name,
- name=full_name,
- section_name=sym.section_name)
- return self._CreateTransformed(
- symbols,
- full_name=full_name,
- template_name=sym.template_name,
- name=sym.name,
- section_name=sym.section_name)
+ return self._CreateTransformed(symbols,
+ full_name=full_name,
+ template_name=full_name,
+ name=full_name,
+ section_name=sym.section_name)
+ return self._CreateTransformed(symbols,
+ full_name=full_name,
+ template_name=sym.template_name,
+ name=sym.name,
+ section_name=sym.section_name)
# A full second faster to cluster per-section. Plus, don't need create
# (section_name, name) tuples in cluster_func.
@@ -1080,17 +1247,19 @@ class SymbolGroup(BaseSymbol):
"""
def group_factory(_, symbols):
sym = symbols[0]
- return self._CreateTransformed(
- symbols,
- full_name=sym.full_name,
- template_name=sym.template_name,
- name=sym.name,
- section_name=sym.section_name)
+ return self._CreateTransformed(symbols,
+ full_name=sym.full_name,
+ template_name=sym.template_name,
+ name=sym.name,
+ section_name=sym.section_name)
return self.GroupedBy(
lambda s: (same_name_only and s.full_name, id(s.aliases or s)),
min_count=min_count, group_factory=group_factory)
+ def GroupedByContainerAndSectionName(self):
+ return self.GroupedBy(lambda s: (s.container_name, s.section_name))
+
def GroupedBySectionName(self):
return self.GroupedBy(lambda s: s.section_name)
@@ -1195,10 +1364,12 @@ class DeltaSymbolGroup(SymbolGroup):
def CountUniqueSymbols(self):
"""Returns (num_unique_before_symbols, num_unique_after_symbols)."""
- syms = (s.before_symbol for s in self.IterLeafSymbols() if s.before_symbol)
- before_count = SymbolGroup(syms).CountUniqueSymbols()
- syms = (s.after_symbol for s in self.IterLeafSymbols() if s.after_symbol)
- after_count = SymbolGroup(syms).CountUniqueSymbols()
+ syms_iter = (s.before_symbol for s in self.IterLeafSymbols()
+ if s.before_symbol)
+ before_count = sum(1 for _ in SymbolGroup._IterUnique(syms_iter))
+ syms_iter = (s.after_symbol for s in self.IterLeafSymbols()
+ if s.after_symbol)
+ after_count = sum(1 for _ in SymbolGroup._IterUnique(syms_iter))
return before_count, after_count
@property
diff --git a/chromium/tools/binary_size/libsupersize/parallel.py b/chromium/tools/binary_size/libsupersize/parallel.py
index 19cef2d6c21..0467864c62f 100644
--- a/chromium/tools/binary_size/libsupersize/parallel.py
+++ b/chromium/tools/binary_size/libsupersize/parallel.py
@@ -192,12 +192,12 @@ def BulkForkAndCall(func, arg_tuples, **kwargs):
"""Calls |func| in a fork'ed process for each set of args within |arg_tuples|.
Args:
- kwargs: Common key word arguments to be passed to |func|.
+ kwargs: Common keyword arguments to be passed to |func|.
Yields the return values as they come in.
"""
arg_tuples = list(arg_tuples)
- if not len(arg_tuples):
+ if not arg_tuples:
return
if DISABLE_ASYNC: