summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeremy Bettis <jbettis@google.com>2022-04-12 14:17:45 -0600
committerChromeos LUCI <chromeos-scoped@luci-project-accounts.iam.gserviceaccount.com>2022-04-12 22:10:00 +0000
commit205bf7ab0a862acff1ba964483c651b86e8406e5 (patch)
tree12ebdcb5ed5fe2ad6aa5620b60dcd6e9d5f9503b
parent36e6bbfe34443a3100e07db8479195cd9baa6d39 (diff)
downloadchrome-ec-205bf7ab0a862acff1ba964483c651b86e8406e5.tar.gz
zmake: Cleanup all the pylint warnings in zmake
Added type hints in several places either to correct a warning or to cause one. For example, if pylint can't tell the type of an inherited method, it will report unused params, but if the params match exactly the superclass, it will not. Some cases were because I tried to find all references to a class or function, and the IDE couldn't find them. Moved loose logging functions in multiproc into class methods of LogWriter. Refactored pack_firmware and _get_max_image_bytes to avoid problems with the various packers taking different numbers of params and subclasses not implemented abstract methods. Stop disabling no-self-use and fix them instead. Remove JobClient.run() because it's never called. Remove JobServer, because it does nothing and is not used as a marker. Fixed lots of small warnings also. Disabled most of the too-many-* warnings in specific functions. Fixed the imports in .pylintrc, as it was finding the emerged zmake, not the local one. Disabled wrong-import-order since isort and pylint don't agree. BRANCH=None BUG=b:217969201 TEST=zmake -j8 test -a zephyr/zmake/run_tests.sh find zephyr/zmake -name '*.py' -print0 | xargs -0 cros lint Signed-off-by: Jeremy Bettis <jbettis@google.com> Change-Id: I4f1b3a9fa5881fe7bcc4a6065d326dd36701b5ef Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3583754 Commit-Queue: Jeremy Bettis <jbettis@chromium.org> Tested-by: Jeremy Bettis <jbettis@chromium.org> Auto-Submit: Jeremy Bettis <jbettis@chromium.org> Reviewed-by: Jack Rosenthal <jrosenth@chromium.org> Commit-Queue: Jack Rosenthal <jrosenth@chromium.org>
-rw-r--r--zephyr/zmake/.pylintrc4
-rw-r--r--zephyr/zmake/README.md4
-rw-r--r--zephyr/zmake/tests/test_build_config.py6
-rw-r--r--zephyr/zmake/tests/test_multiproc_logging.py12
-rw-r--r--zephyr/zmake/tests/test_packers.py27
-rw-r--r--zephyr/zmake/tests/test_zmake.py29
-rw-r--r--zephyr/zmake/zmake/__main__.py3
-rw-r--r--zephyr/zmake/zmake/build_config.py18
-rw-r--r--zephyr/zmake/zmake/configlib.py5
-rw-r--r--zephyr/zmake/zmake/generate_readme.py11
-rw-r--r--zephyr/zmake/zmake/jobserver.py37
-rw-r--r--zephyr/zmake/zmake/modules.py3
-rw-r--r--zephyr/zmake/zmake/multiproc.py319
-rw-r--r--zephyr/zmake/zmake/output_packers.py100
-rw-r--r--zephyr/zmake/zmake/project.py51
-rw-r--r--zephyr/zmake/zmake/toolchains.py3
-rw-r--r--zephyr/zmake/zmake/util.py25
-rw-r--r--zephyr/zmake/zmake/version.py2
-rw-r--r--zephyr/zmake/zmake/zmake.py121
19 files changed, 409 insertions, 371 deletions
diff --git a/zephyr/zmake/.pylintrc b/zephyr/zmake/.pylintrc
index 341cb4d687..fd798870dd 100644
--- a/zephyr/zmake/.pylintrc
+++ b/zephyr/zmake/.pylintrc
@@ -1,9 +1,9 @@
[MASTER]
-init-hook='import sys; sys.path.append("/usr/lib64/python3.6/site-packages")'
+init-hook='import sys; sys.path.extend(["zephyr/zmake", "/usr/lib64/python3.6/site-packages"])'
[MESSAGES CONTROL]
-disable=bad-continuation,bad-whitespace,format,fixme
+disable=bad-continuation,bad-whitespace,format,fixme,wrong-import-order
[format]
diff --git a/zephyr/zmake/README.md b/zephyr/zmake/README.md
index 12125d8213..6e2690959b 100644
--- a/zephyr/zmake/README.md
+++ b/zephyr/zmake/README.md
@@ -85,7 +85,7 @@ Chromium OS's meta-build tool for Zephyr
### zmake list-projects
-**Usage:** `zmake list-projects [-h] [--format FORMAT] [search_dir]`
+**Usage:** `zmake list-projects [-h] [--format FMT] [search_dir]`
#### Positional Arguments
@@ -98,7 +98,7 @@ Chromium OS's meta-build tool for Zephyr
| | |
|---|---|
| `-h`, `--help` | show this help message and exit |
-| `--format FORMAT` | Output format to print projects (str.format(config=project.config) is called on this for each project). |
+| `--format FMT` | Output format to print projects (str.format(config=project.config) is called on this for each project). |
### zmake test
diff --git a/zephyr/zmake/tests/test_build_config.py b/zephyr/zmake/tests/test_build_config.py
index 8fb1d2d3d0..76cc0a2028 100644
--- a/zephyr/zmake/tests/test_build_config.py
+++ b/zephyr/zmake/tests/test_build_config.py
@@ -150,7 +150,7 @@ def parse_cmake_args(argv):
@hypothesis.given(build_configs_no_kconfig, paths, paths)
@hypothesis.settings(deadline=60000)
-def test_popen_cmake_no_kconfig(conf, project_dir, build_dir):
+def test_popen_cmake_no_kconfig(conf: BuildConfig, project_dir, build_dir):
"""Test popen_cmake for a config with no kconfig definitions."""
job_client = FakeJobClient()
conf.popen_cmake(job_client, project_dir, build_dir)
@@ -163,7 +163,7 @@ def test_popen_cmake_no_kconfig(conf, project_dir, build_dir):
@hypothesis.given(build_configs_with_at_least_one_kconfig, paths, paths)
@hypothesis.settings(deadline=60000)
-def test_popen_cmake_kconfig_but_no_file(conf, project_dir, build_dir):
+def test_popen_cmake_kconfig_but_no_file(conf: BuildConfig, project_dir, build_dir):
"""Test that running popen_cmake with Kconfig definitions to write
out, but no path to do so, should raise an error.
"""
@@ -175,7 +175,7 @@ def test_popen_cmake_kconfig_but_no_file(conf, project_dir, build_dir):
@hypothesis.given(build_configs, paths, paths)
@hypothesis.settings(deadline=60000)
-def test_popen_cmake_kconfig(conf, project_dir, build_dir):
+def test_popen_cmake_kconfig(conf: BuildConfig, project_dir, build_dir):
"""Test calling popen_cmake and verifying the kconfig_files."""
job_client = FakeJobClient()
diff --git a/zephyr/zmake/tests/test_multiproc_logging.py b/zephyr/zmake/tests/test_multiproc_logging.py
index a64501f5a8..2694b6451e 100644
--- a/zephyr/zmake/tests/test_multiproc_logging.py
+++ b/zephyr/zmake/tests/test_multiproc_logging.py
@@ -20,7 +20,7 @@ def test_read_output_from_pipe():
file_desc = io.TextIOWrapper(os.fdopen(pipe[0], "rb"), encoding="utf-8")
logger = mock.Mock(spec=logging.Logger)
logger.log.side_effect = lambda log_lvl, line: semaphore.release()
- zmake.multiproc.log_output(logger, logging.DEBUG, file_desc, job_id="")
+ zmake.multiproc.LogWriter.log_output(logger, logging.DEBUG, file_desc, job_id="")
os.write(pipe[1], "Hello\n".encode("utf-8"))
semaphore.acquire()
logger.log.assert_called_with(logging.DEBUG, "Hello")
@@ -36,7 +36,7 @@ def test_read_output_change_log_level():
# This call will log output from fd (the file descriptor) to DEBUG, though
# when the line starts with 'World', the logging level will be switched to
# CRITICAL (see the content of the log_lvl_override_func).
- zmake.multiproc.log_output(
+ zmake.multiproc.LogWriter.log_output(
logger=logger,
log_level=logging.DEBUG,
file_descriptor=file_desc,
@@ -77,8 +77,8 @@ def test_read_output_from_second_pipe():
logger = mock.Mock(spec=logging.Logger)
logger.log.side_effect = lambda log_lvl, fmt, id, line: semaphore.release()
- zmake.multiproc.log_output(logger, logging.DEBUG, fds[0], job_id="0")
- zmake.multiproc.log_output(logger, logging.ERROR, fds[1], job_id="1")
+ zmake.multiproc.LogWriter.log_output(logger, logging.DEBUG, fds[0], job_id="0")
+ zmake.multiproc.LogWriter.log_output(logger, logging.ERROR, fds[1], job_id="1")
os.write(pipes[1][1], "Hello\n".encode("utf-8"))
semaphore.acquire()
@@ -102,8 +102,8 @@ def test_read_output_after_another_pipe_closed():
logger = mock.Mock(spec=logging.Logger)
logger.log.side_effect = lambda log_lvl, fmt, id, line: semaphore.release()
- zmake.multiproc.log_output(logger, logging.DEBUG, fds[0], job_id="0")
- zmake.multiproc.log_output(logger, logging.ERROR, fds[1], job_id="1")
+ zmake.multiproc.LogWriter.log_output(logger, logging.DEBUG, fds[0], job_id="0")
+ zmake.multiproc.LogWriter.log_output(logger, logging.ERROR, fds[1], job_id="1")
fds[0].close()
os.write(pipes[1][1], "Hello\n".encode("utf-8"))
diff --git a/zephyr/zmake/tests/test_packers.py b/zephyr/zmake/tests/test_packers.py
index f7b91ac7be..43b63a908f 100644
--- a/zephyr/zmake/tests/test_packers.py
+++ b/zephyr/zmake/tests/test_packers.py
@@ -20,14 +20,19 @@ absolute_path = st.from_regex(regex=r"\A/[\w/]*\Z")
class FakePacker(packers.BasePacker):
"""Fake packer to expose protected methods."""
- def check_packed_file_size(self, file, dirs):
+ def __init__(self, max_size):
+ super().__init__(project=None)
+ self.max_size = max_size
+
+ def check_packed_file_size(self, file, dir_map):
"""Expose the _check_packed_file_size method."""
- return self._check_packed_file_size(file, dirs)
+ return self._check_packed_file_size(file, dir_map)
- def _get_max_image_bytes(self):
- return 100
+ def _get_max_image_bytes(self, dir_map):
+ return self.max_size
- def pack_firmware(self, work_dir, jobclient, version_string=""):
+ def pack_firmware(self, work_dir, jobclient, dir_map, version_string=""):
+ del version_string
assert False
@@ -35,34 +40,34 @@ class FakePacker(packers.BasePacker):
@hypothesis.settings(deadline=60000)
def test_file_size_unbounded(data):
"""Test with file size unbounded."""
- packer = FakePacker(project=None)
+ packer = FakePacker(None)
with tempfile.TemporaryDirectory() as temp_dir_name:
file = pathlib.Path(temp_dir_name) / "zephyr.elf"
with open(file, "wb") as outfile:
outfile.write(data)
- assert packer.check_packed_file_size(file=file, dirs={}) == file
+ assert packer.check_packed_file_size(file=file, dir_map={}) == file
@hypothesis.given(st.binary(min_size=5, max_size=100))
@hypothesis.settings(deadline=60000)
def test_file_size_in_bounds(data):
"""Test with file size limited."""
- packer = FakePacker(project=None)
+ packer = FakePacker(100)
with tempfile.TemporaryDirectory() as temp_dir_name:
file = pathlib.Path(temp_dir_name) / "zephyr.bin"
with open(file, "wb") as outfile:
outfile.write(data)
- assert packer.check_packed_file_size(file=file, dirs={}) == file
+ assert packer.check_packed_file_size(file=file, dir_map={}) == file
@hypothesis.given(st.binary(min_size=101, max_size=200))
@hypothesis.settings(deadline=60000)
def test_file_size_out_of_bounds(data):
"""Test with file size limited, and file exceeds limit."""
- packer = FakePacker(project=None)
+ packer = FakePacker(100)
with tempfile.TemporaryDirectory() as temp_dir_name:
file = pathlib.Path(temp_dir_name) / "zephyr.bin"
with open(file, "wb") as outfile:
outfile.write(data)
with pytest.raises(RuntimeError):
- packer.check_packed_file_size(file=file, dirs={})
+ packer.check_packed_file_size(file=file, dir_map={})
diff --git a/zephyr/zmake/tests/test_zmake.py b/zephyr/zmake/tests/test_zmake.py
index ee00330c27..4ca1d7f077 100644
--- a/zephyr/zmake/tests/test_zmake.py
+++ b/zephyr/zmake/tests/test_zmake.py
@@ -26,7 +26,7 @@ OUR_PATH = os.path.dirname(os.path.realpath(__file__))
class FakeProject:
"""A fake project which requests two builds and does no packing"""
- # pylint: disable=too-few-public-methods,no-self-use
+ # pylint: disable=too-few-public-methods
def __init__(self):
self.packer = unittest.mock.Mock()
@@ -46,15 +46,18 @@ class FakeProject:
yield "ro", zmake.build_config.BuildConfig()
yield "rw", zmake.build_config.BuildConfig()
- def prune_modules(self, _):
+ @staticmethod
+ def prune_modules(_):
"""Fake implementation of prune_modules."""
return {} # pathlib.Path('path')]
- def find_dts_overlays(self, _):
+ @staticmethod
+ def find_dts_overlays(_):
"""Fake implementation of find_dts_overlays."""
return zmake.build_config.BuildConfig()
- def get_toolchain(self, module_paths, override=None):
+ @staticmethod
+ def get_toolchain(module_paths, override=None):
"""Fake implementation of get_toolchain."""
return zmake.toolchains.GenericToolchain(
override or "foo",
@@ -141,7 +144,7 @@ def do_test_with_log_level(zmake_factory_from_dir, log_level, fnames=None):
["fakeproject"],
clobber=True,
)
- multiproc.wait_for_log_end()
+ multiproc.LogWriter.wait_for_log_end()
recs = [rec.getMessage() for rec in cap.records]
return recs
@@ -150,14 +153,14 @@ def do_test_with_log_level(zmake_factory_from_dir, log_level, fnames=None):
class TestFilters:
"""Test filtering of stdout and stderr"""
- # pylint: disable=no-self-use
-
- def test_filter_normal(self, zmake_factory_from_dir):
+ @staticmethod
+ def test_filter_normal(zmake_factory_from_dir):
"""Test filtering of a normal build (with no errors)"""
recs = do_test_with_log_level(zmake_factory_from_dir, logging.ERROR)
assert not recs
- def test_filter_info(self, zmake_factory_from_dir, tmp_path):
+ @staticmethod
+ def test_filter_info(zmake_factory_from_dir, tmp_path):
"""Test what appears on the INFO level"""
recs = do_test_with_log_level(zmake_factory_from_dir, logging.INFO)
# TODO: Remove sets and figure out how to check the lines are in the
@@ -180,7 +183,8 @@ class TestFilters:
# This produces an easy-to-read diff if there is a difference
assert expected == set(recs)
- def test_filter_debug(self, zmake_factory_from_dir, tmp_path):
+ @staticmethod
+ def test_filter_debug(zmake_factory_from_dir, tmp_path):
"""Test what appears on the DEBUG level"""
recs = do_test_with_log_level(zmake_factory_from_dir, logging.DEBUG)
# TODO: Remove sets and figure out how to check the lines are in the
@@ -205,7 +209,8 @@ class TestFilters:
# This produces an easy-to-read diff if there is a difference
assert expected == set(recs)
- def test_filter_devicetree_error(self, zmake_factory_from_dir):
+ @staticmethod
+ def test_filter_devicetree_error(zmake_factory_from_dir):
"""Test that devicetree errors appear"""
recs = do_test_with_log_level(
zmake_factory_from_dir,
@@ -278,7 +283,7 @@ def test_list_projects(
autospec=True,
return_value=fake_projects,
):
- zmake_from_dir.list_projects(format=fmt, search_dir=search_dir)
+ zmake_from_dir.list_projects(fmt=fmt, search_dir=search_dir)
captured = capsys.readouterr()
assert captured.out == expected_output
diff --git a/zephyr/zmake/zmake/__main__.py b/zephyr/zmake/zmake/__main__.py
index 9ad509dfc5..fd1dfec13a 100644
--- a/zephyr/zmake/zmake/__main__.py
+++ b/zephyr/zmake/zmake/__main__.py
@@ -195,6 +195,7 @@ def get_argparser():
list_projects.add_argument(
"--format",
default="{config.project_name}\n",
+ dest="fmt",
help=(
"Output format to print projects (str.format(config=project.config) is "
"called on this for each project)."
@@ -351,7 +352,7 @@ def main(argv=None):
wait_rv = zmake.executor.wait()
return result or wait_rv
finally:
- multiproc.wait_for_log_end()
+ multiproc.LogWriter.wait_for_log_end()
if __name__ == "__main__":
diff --git a/zephyr/zmake/zmake/build_config.py b/zephyr/zmake/zmake/build_config.py
index ced2c9085c..0d03e22a45 100644
--- a/zephyr/zmake/zmake/build_config.py
+++ b/zephyr/zmake/zmake/build_config.py
@@ -7,6 +7,7 @@ import hashlib
import json
import pathlib
+import zmake.jobserver
import zmake.util as util
@@ -18,11 +19,11 @@ class BuildConfig:
"""
def __init__(
- self, environ_defs={}, cmake_defs={}, kconfig_defs={}, kconfig_files=[]
+ self, environ_defs=None, cmake_defs=None, kconfig_defs=None, kconfig_files=None
):
- self.environ_defs = dict(environ_defs)
- self.cmake_defs = dict(cmake_defs)
- self.kconfig_defs = dict(kconfig_defs)
+ self.environ_defs = dict(environ_defs or {})
+ self.cmake_defs = dict(cmake_defs or {})
+ self.kconfig_defs = dict(kconfig_defs or {})
def _remove_duplicate_paths(files):
# Remove multiple of the same kconfig file in a row.
@@ -32,10 +33,15 @@ class BuildConfig:
result.append(path)
return result
- self.kconfig_files = _remove_duplicate_paths(kconfig_files)
+ self.kconfig_files = _remove_duplicate_paths(kconfig_files or [])
def popen_cmake(
- self, jobclient, project_dir, build_dir, kconfig_path=None, **kwargs
+ self,
+ jobclient: zmake.jobserver.JobClient,
+ project_dir,
+ build_dir,
+ kconfig_path=None,
+ **kwargs
):
"""Run Cmake with this config using a jobclient.
diff --git a/zephyr/zmake/zmake/configlib.py b/zephyr/zmake/zmake/configlib.py
index a0463f1a86..a5c78ad22d 100644
--- a/zephyr/zmake/zmake/configlib.py
+++ b/zephyr/zmake/zmake/configlib.py
@@ -15,6 +15,7 @@ def _register_project(**kwargs):
def register_host_project(**kwargs):
+ """Register a project that runs on a posix host."""
kwargs.setdefault("zephyr_board", "native_posix")
kwargs.setdefault("supported_toolchains", ["llvm", "host"])
kwargs.setdefault("output_packer", zmake.output_packers.ElfPacker)
@@ -22,21 +23,25 @@ def register_host_project(**kwargs):
def register_host_test(test_name, **kwargs):
+ """Register a test project that runs on the host."""
kwargs.setdefault("is_test", True)
return register_host_project(project_name="test-{}".format(test_name), **kwargs)
def register_raw_project(**kwargs):
+ """Register a project that uses RawBinPacker."""
kwargs.setdefault("supported_toolchains", ["coreboot-sdk", "zephyr"])
kwargs.setdefault("output_packer", zmake.output_packers.RawBinPacker)
return _register_project(**kwargs)
def register_binman_project(**kwargs):
+ """Register a project that uses BinmanPacker."""
kwargs.setdefault("output_packer", zmake.output_packers.BinmanPacker)
return register_raw_project(**kwargs)
def register_npcx_project(**kwargs):
+ """Register a project that uses NpcxPacker."""
kwargs.setdefault("output_packer", zmake.output_packers.NpcxPacker)
return register_binman_project(**kwargs)
diff --git a/zephyr/zmake/zmake/generate_readme.py b/zephyr/zmake/zmake/generate_readme.py
index d21fd19f7c..9008f0f45d 100644
--- a/zephyr/zmake/zmake/generate_readme.py
+++ b/zephyr/zmake/zmake/generate_readme.py
@@ -25,8 +25,8 @@ class MarkdownHelpFormatter(argparse.HelpFormatter):
lst = self._section_contents
lst.append(text)
- def start_section(self, title):
- self._section_title = title.title()
+ def start_section(self, heading):
+ self._section_title = heading.title()
self._section_contents = []
def end_section(self):
@@ -35,7 +35,7 @@ class MarkdownHelpFormatter(argparse.HelpFormatter):
self._paragraphs.extend(self._section_contents)
self._section_title = None
- def add_usage(self, usage, actions, groups):
+ def add_usage(self, usage, actions, groups, prefix=None):
if not usage:
usage = self._prog
self.add_text(
@@ -55,8 +55,7 @@ class MarkdownHelpFormatter(argparse.HelpFormatter):
else:
parts.append(f"{option_string} {_get_metavar(action).upper()}")
return ", ".join(f"`{part}`" for part in parts)
- else:
- return f"`{_get_metavar(action)}`"
+ return f"`{_get_metavar(action)}`"
def _get_table_line(action):
return f"| {_format_invocation(action)} | {action.help} |"
@@ -89,7 +88,7 @@ def generate_readme():
# Normally, this would not be required, since we don't use from
# imports. But runpy's import machinery essentially does the
# equivalent of a from import on __main__.py.
- import zmake.__main__
+ import zmake.__main__ # pylint: disable=import-outside-toplevel
output = io.StringIO()
parser, sub_action = zmake.__main__.get_argparser()
diff --git a/zephyr/zmake/zmake/jobserver.py b/zephyr/zmake/zmake/jobserver.py
index 0d02a2c8d6..16f856e607 100644
--- a/zephyr/zmake/zmake/jobserver.py
+++ b/zephyr/zmake/zmake/jobserver.py
@@ -35,7 +35,8 @@ class JobClient:
"""Claim a job."""
raise NotImplementedError("Abstract method not implemented")
- def env(self):
+ @staticmethod
+ def env():
"""Get the environment variables necessary to share the job server."""
return {}
@@ -54,35 +55,10 @@ class JobClient:
logger.debug("Running %s", zmake.util.repr_command(argv))
return subprocess.Popen(argv, **kwargs)
- def run(self, *args, claim_job=True, **kwargs):
- """Run a process using subprocess.run, optionally claiming a job.
-
- Args:
- claim_job: True if a job should be claimed.
-
- All other arguments are passed to subprocess.run.
-
- Returns:
- A CompletedProcess object.
- """
- if claim_job:
- with self.get_job():
- return self.run(*args, claim_job=False, **kwargs)
-
- kwargs.setdefault("env", os.environ)
- kwargs["env"].update(self.env())
-
- return subprocess.run(*args, **kwargs)
-
-
-class JobServer(JobClient):
- """Abstract Job Server."""
-
- def __init__(self, jobs=0):
- raise NotImplementedError("Abstract method not implemented")
-
class GNUMakeJobClient(JobClient):
+ """A job client for GNU make."""
+
def __init__(self, read_fd, write_fd):
self._pipe = [read_fd, write_fd]
@@ -127,7 +103,7 @@ class GNUMakeJobClient(JobClient):
return {"MAKEFLAGS": "--jobserver-auth={},{}".format(*self._pipe)}
-class GNUMakeJobServer(JobServer, GNUMakeJobClient):
+class GNUMakeJobServer(GNUMakeJobClient):
"""Implements a GNU Make POSIX Job Server.
See https://www.gnu.org/software/make/manual/html_node/POSIX-Jobserver.html
@@ -135,10 +111,11 @@ class GNUMakeJobServer(JobServer, GNUMakeJobClient):
"""
def __init__(self, jobs=0):
+ [read_fd, write_fd] = os.pipe()
+ super().__init__(read_fd, write_fd)
if not jobs:
jobs = multiprocessing.cpu_count()
elif jobs > select.PIPE_BUF:
jobs = select.PIPE_BUF
- self._pipe = os.pipe()
os.write(self._pipe[1], b"+" * jobs)
diff --git a/zephyr/zmake/zmake/modules.py b/zephyr/zmake/zmake/modules.py
index 5ba0ef73f8..91f0dd50b9 100644
--- a/zephyr/zmake/zmake/modules.py
+++ b/zephyr/zmake/zmake/modules.py
@@ -95,5 +95,4 @@ def setup_module_symlinks(output_dir, modules):
return build_config.BuildConfig(
cmake_defs={"ZEPHYR_MODULES": ";".join(map(str, module_links))}
)
- else:
- return build_config.BuildConfig()
+ return build_config.BuildConfig()
diff --git a/zephyr/zmake/zmake/multiproc.py b/zephyr/zmake/zmake/multiproc.py
index 94f5f5b69d..7d9a88de5a 100644
--- a/zephyr/zmake/zmake/multiproc.py
+++ b/zephyr/zmake/zmake/multiproc.py
@@ -1,11 +1,6 @@
# Copyright 2020 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import collections
-import logging
-import os
-import select
-import threading
"""Zmake multiprocessing utility module.
@@ -15,22 +10,16 @@ process does not need to finish before the output is available to the developer
on the screen.
"""
-# A local pipe use to signal the look that a new file descriptor was added and
-# should be included in the select statement.
-_logging_interrupt_pipe = os.pipe()
-# A condition variable used to synchronize logging operations.
-_logging_cv = threading.Condition()
-# A map of file descriptors to their LogWriter
-_logging_map = {}
-# Should we log job names or not
-log_job_names = True
-
-
-def reset():
- """Reset this module to its starting state (useful for tests)"""
- global _logging_map
+import collections
+import io
+import logging
+import os
+import select
+import threading
+from typing import Any, ClassVar, Dict, List
- _logging_map = {}
+# Should we log job names or not
+LOG_JOB_NAMES = True
class LogWriter:
@@ -52,7 +41,22 @@ class LogWriter:
_file_descriptor: The file descriptor being logged.
"""
- def __init__(
+ # A local pipe use to signal the look that a new file descriptor was added and
+ # should be included in the select statement.
+ _logging_interrupt_pipe = os.pipe()
+ # A condition variable used to synchronize logging operations.
+ _logging_cv = threading.Condition()
+ # A map of file descriptors to their LogWriter
+ _logging_map: ClassVar[Dict[io.TextIOBase, "LogWriter"]] = {}
+ # The thread that handles the reading from pipes and writing to log.
+ _logging_thread = None
+
+ @classmethod
+ def reset(cls):
+ """Reset this module to its starting state (useful for tests)"""
+ LogWriter._logging_map.clear()
+
+ def __init__( # pylint: disable=too-many-arguments
self,
logger,
log_level,
@@ -89,7 +93,7 @@ class LogWriter:
# greatly simplifies the logic that is needed to update the log
# level.
self._log_level = self._override_func(line, self._log_level)
- if self._job_id and log_job_names:
+ if self._job_id and LOG_JOB_NAMES:
self._logger.log(self._log_level, "[%s]%s", self._job_id, line)
else:
self._logger.log(self._log_level, line)
@@ -111,141 +115,150 @@ class LogWriter:
This method will block execution until all the logs have been flushed out.
"""
- with _logging_cv:
- _logging_cv.wait_for(lambda: self._file_descriptor not in _logging_map)
+ with LogWriter._logging_cv:
+ LogWriter._logging_cv.wait_for(
+ lambda: self._file_descriptor not in LogWriter._logging_map
+ )
if self._tee_output:
self._tee_output.close()
self._tee_output = None
+ @classmethod
+ def _log_fd(cls, file_descriptor: io.TextIOBase):
+ """Log information from a single file descriptor.
-def _log_fd(fd):
- """Log information from a single file descriptor.
+ This function is BLOCKING. It will read from the given file descriptor until
+ either the end of line is read or EOF. Once EOF is read it will remove the
+ file descriptor from _logging_map so it will no longer be used.
+ Additionally, in some cases, the file descriptor will be closed (caused by
+ a call to Popen.wait()). In these cases, the file descriptor will also be
+ removed from the map as it is no longer valid.
+ """
+ with LogWriter._logging_cv:
+ writer = LogWriter._logging_map[file_descriptor]
+ if file_descriptor.closed:
+ del LogWriter._logging_map[file_descriptor]
+ LogWriter._logging_cv.notify_all()
+ return
+ line = file_descriptor.readline()
+ if not line:
+ # EOF
+ del LogWriter._logging_map[file_descriptor]
+ LogWriter._logging_cv.notify_all()
+ return
+ line = line.rstrip("\n")
+ if line:
+ writer.log_line(line)
+
+ @classmethod
+ def _prune_logging_fds(cls):
+ """Prune the current file descriptors under _logging_map.
+
+ This function will iterate over the logging map and check for closed file
+ descriptors. Every closed file descriptor will be removed.
+ """
+ with LogWriter._logging_cv:
+ remove = [
+ file_descriptor
+ for file_descriptor in LogWriter._logging_map
+ if file_descriptor.closed
+ ]
+ for file_descriptor in remove:
+ del LogWriter._logging_map[file_descriptor]
+ if remove:
+ LogWriter._logging_cv.notify_all()
+
+ @classmethod
+ def _logging_loop(cls):
+ """The primary logging thread loop.
+
+ This is the entry point of the logging thread. It will listen for (1) any
+ new data on the output file descriptors that were added via log_output() and
+ (2) any new file descriptors being added by log_output(). Once a file
+ descriptor is ready to be read, this function will call _log_fd to perform
+ the actual read and logging.
+ """
+ while True:
+ with LogWriter._logging_cv:
+ LogWriter._logging_cv.wait_for(lambda: LogWriter._logging_map)
+ keys: List[Any] = list(LogWriter._logging_map.keys())
+ keys.append(LogWriter._logging_interrupt_pipe[0])
+ try:
+ fds, _, _ = select.select(keys, [], [])
+ except ValueError:
+ # One of the file descriptors must be closed, prune them and try
+ # again.
+ LogWriter._prune_logging_fds()
+ continue
+ if LogWriter._logging_interrupt_pipe[0] in fds:
+ # We got a sentinel byte sent by log_output(), this is a signal used to
+ # break out of the blocking select.select call to tell us that the
+ # file descriptor set has changed. We just need to read the byte and
+ # remove this descriptor from the list. If we actually have data
+ # that should be read it will be read in the for loop below.
+ os.read(LogWriter._logging_interrupt_pipe[0], 1)
+ fds.remove(LogWriter._logging_interrupt_pipe[0])
+ for file in fds:
+ LogWriter._log_fd(file)
+
+ @classmethod
+ def log_output( # pylint: disable=too-many-arguments
+ cls,
+ logger,
+ log_level,
+ file_descriptor,
+ log_level_override_func=None,
+ job_id=None,
+ tee_output=None,
+ ):
+ """Log the output from the given file descriptor.
- This function is BLOCKING. It will read from the given file descriptor until
- either the end of line is read or EOF. Once EOF is read it will remove the
- file descriptor from _logging_map so it will no longer be used.
- Additionally, in some cases, the file descriptor will be closed (caused by
- a call to Popen.wait()). In these cases, the file descriptor will also be
- removed from the map as it is no longer valid.
- """
- with _logging_cv:
- writer = _logging_map[fd]
- if fd.closed:
- del _logging_map[fd]
- _logging_cv.notify_all()
- return
- line = fd.readline()
- if not line:
- # EOF
- del _logging_map[fd]
- _logging_cv.notify_all()
- return
- line = line.rstrip("\n")
- if line:
- writer.log_line(line)
-
-
-def _prune_logging_fds():
- """Prune the current file descriptors under _logging_map.
-
- This function will iterate over the logging map and check for closed file
- descriptors. Every closed file descriptor will be removed.
- """
- with _logging_cv:
- remove = [fd for fd in _logging_map.keys() if fd.closed]
- for fd in remove:
- del _logging_map[fd]
- if remove:
- _logging_cv.notify_all()
-
-
-def _logging_loop():
- """The primary logging thread loop.
-
- This is the entry point of the logging thread. It will listen for (1) any
- new data on the output file descriptors that were added via log_output() and
- (2) any new file descriptors being added by log_output(). Once a file
- descriptor is ready to be read, this function will call _log_fd to perform
- the actual read and logging.
- """
- while True:
- with _logging_cv:
- _logging_cv.wait_for(lambda: _logging_map)
- keys = list(_logging_map.keys()) + [_logging_interrupt_pipe[0]]
- try:
- fds, _, _ = select.select(keys, [], [])
- except ValueError:
- # One of the file descriptors must be closed, prune them and try
- # again.
- _prune_logging_fds()
- continue
- if _logging_interrupt_pipe[0] in fds:
- # We got a dummy byte sent by log_output(), this is a signal used to
- # break out of the blocking select.select call to tell us that the
- # file descriptor set has changed. We just need to read the byte and
- # remove this descriptor from the list. If we actually have data
- # that should be read it will be read in the for loop below.
- os.read(_logging_interrupt_pipe[0], 1)
- fds.remove(_logging_interrupt_pipe[0])
- for fd in fds:
- _log_fd(fd)
-
-
-_logging_thread = None
-
-
-def log_output(
- logger,
- log_level,
- file_descriptor,
- log_level_override_func=None,
- job_id=None,
- tee_output=None,
-):
- """Log the output from the given file descriptor.
-
- Args:
- logger: The logger object to use.
- log_level: The logging level to use.
- file_descriptor: The file descriptor to read from.
- log_level_override_func: A function used to override the log level. The
- function will be called once per line prior to logging and will be
- passed the arguments of the line and the default log level.
-
- Returns:
- LogWriter object for the resulting output
- """
- with _logging_cv:
- global _logging_thread
- if _logging_thread is None or not _logging_thread.is_alive():
- # First pass or thread must have died, create a new one.
- _logging_thread = threading.Thread(target=_logging_loop, daemon=True)
- _logging_thread.start()
-
- writer = LogWriter(
- logger,
- log_level,
- log_level_override_func,
- job_id,
- file_descriptor,
- tee_output=tee_output,
- )
- _logging_map[file_descriptor] = writer
- # Write a dummy byte to the pipe to break the select so we can add the
- # new fd.
- os.write(_logging_interrupt_pipe[1], b"x")
- # Notify the condition so we can run the select on the current fds.
- _logging_cv.notify_all()
- return writer
-
-
-def wait_for_log_end():
- """Wait for all the logs to be printed.
-
- This method will block execution until all the logs have been flushed out.
- """
- with _logging_cv:
- _logging_cv.wait_for(lambda: not _logging_map)
+ Args:
+ logger: The logger object to use.
+ log_level: The logging level to use.
+ file_descriptor: The file descriptor to read from.
+ log_level_override_func: A function used to override the log level. The
+ function will be called once per line prior to logging and will be
+ passed the arguments of the line and the default log level.
+
+ Returns:
+ LogWriter object for the resulting output
+ """
+ with LogWriter._logging_cv:
+ if (
+ LogWriter._logging_thread is None
+ or not LogWriter._logging_thread.is_alive()
+ ):
+ # First pass or thread must have died, create a new one.
+ LogWriter._logging_thread = threading.Thread(
+ target=LogWriter._logging_loop, daemon=True
+ )
+ LogWriter._logging_thread.start()
+
+ writer = LogWriter(
+ logger,
+ log_level,
+ log_level_override_func,
+ job_id,
+ file_descriptor,
+ tee_output=tee_output,
+ )
+ LogWriter._logging_map[file_descriptor] = writer
+ # Write a sentinel byte to the pipe to break the select so we can add the
+ # new fd.
+ os.write(LogWriter._logging_interrupt_pipe[1], b"x")
+ # Notify the condition so we can run the select on the current fds.
+ LogWriter._logging_cv.notify_all()
+ return writer
+
+ @classmethod
+ def wait_for_log_end(cls):
+ """Wait for all the logs to be printed.
+
+ This method will block execution until all the logs have been flushed out.
+ """
+ with LogWriter._logging_cv:
+ LogWriter._logging_cv.wait_for(lambda: not LogWriter._logging_map)
class Executor:
@@ -315,8 +328,8 @@ class Executor:
"""
try:
result = func()
- except Exception as ex:
- self.logger.exception(ex)
+ except Exception as e: # pylint: disable=broad-except
+ self.logger.exception(e)
result = -1
with self.lock:
self.results.append(result)
diff --git a/zephyr/zmake/zmake/output_packers.py b/zephyr/zmake/zmake/output_packers.py
index eae7358157..24a72425d6 100644
--- a/zephyr/zmake/zmake/output_packers.py
+++ b/zephyr/zmake/zmake/output_packers.py
@@ -5,8 +5,11 @@
import logging
import shutil
import subprocess
+from pathlib import Path
+from typing import Dict, Optional
import zmake.build_config as build_config
+import zmake.jobserver
import zmake.multiproc
import zmake.util as util
@@ -17,7 +20,8 @@ class BasePacker:
def __init__(self, project):
self.project = project
- def configs(self):
+ @staticmethod
+ def configs():
"""Get all of the build configurations necessary.
Yields:
@@ -25,7 +29,13 @@ class BasePacker:
"""
yield "singleimage", build_config.BuildConfig()
- def pack_firmware(self, work_dir, jobclient, version_string=""):
+ def pack_firmware(
+ self,
+ work_dir,
+ jobclient: zmake.jobserver.JobClient,
+ dir_map: Dict[str, Path],
+ version_string="",
+ ):
"""Pack a firmware image.
Config names from the configs generator are passed as keyword
@@ -36,6 +46,7 @@ class BasePacker:
work_dir: A directory to write outputs and temporary files
into.
jobclient: A JobClient object to use.
+ dir_map: A dict of build dirs such as {'ro': path_to_ro_dir}.
version_string: The version string, which may end up in
certain parts of the outputs.
@@ -46,44 +57,38 @@ class BasePacker:
"""
raise NotImplementedError("Abstract method not implemented")
- def _get_max_image_bytes(self):
+ @staticmethod
+ def _get_max_image_bytes(dir_map) -> Optional[int]:
"""Get the maximum allowed image size (in bytes).
This value will generally be found in CONFIG_FLASH_SIZE but may vary
depending on the specific way things are being packed.
- Returns:
- The maximum allowed size of the image in bytes.
- """
- raise NotImplementedError("Abstract method not implemented")
-
- def _is_size_bound(self, path):
- """Check whether the given path should be constrained by size.
-
- Generally, .elf files will be unconstrained while .bin files will be
- constrained.
-
Args:
- path: A file's path to test.
+ file: A file to test.
+ dir_map: A dict of build dirs such as {'ro': path_to_ro_dir}.
Returns:
- True if the file size should be checked. False otherwise.
+ The maximum allowed size of the image in bytes, or None if the size
+ is not limited.
"""
- return path.suffix == ".bin"
+ del dir_map
- def _check_packed_file_size(self, file, dirs):
+ def _check_packed_file_size(self, file, dir_map):
"""Check that a packed file passes size constraints.
Args:
file: A file to test.
- dirs: A map of the arguments to pass to _get_max_image_bytes
+ dir_map: A dict of build dirs such as {'ro': path_to_ro_dir}.
+
Returns:
The file if it passes the test.
"""
- if not self._is_size_bound(
- file
- ) or file.stat().st_size <= self._get_max_image_bytes(**dirs):
+ max_size = self._get_max_image_bytes( # pylint: disable=assignment-from-none
+ dir_map
+ )
+ if max_size is None or file.stat().st_size <= max_size:
return file
raise RuntimeError("Output file ({}) too large".format(file))
@@ -91,15 +96,17 @@ class BasePacker:
class ElfPacker(BasePacker):
"""Raw proxy for ELF output of a single build."""
- def pack_firmware(self, work_dir, jobclient, singleimage, version_string=""):
- yield singleimage / "zephyr" / "zephyr.elf", "zephyr.elf"
+ def pack_firmware(self, work_dir, jobclient, dir_map, version_string=""):
+ del version_string
+ yield dir_map["singleimage"] / "zephyr" / "zephyr.elf", "zephyr.elf"
class RawBinPacker(BasePacker):
"""Raw proxy for zephyr.bin output of a single build."""
- def pack_firmware(self, work_dir, jobclient, singleimage, version_string=""):
- yield singleimage / "zephyr" / "zephyr.bin", "zephyr.bin"
+ def pack_firmware(self, work_dir, jobclient, dir_map, version_string=""):
+ del version_string
+ yield dir_map["singleimage"] / "zephyr" / "zephyr.bin", "zephyr.bin"
class BinmanPacker(BasePacker):
@@ -116,7 +123,9 @@ class BinmanPacker(BasePacker):
yield "ro", build_config.BuildConfig(kconfig_defs={"CONFIG_CROS_EC_RO": "y"})
yield "rw", build_config.BuildConfig(kconfig_defs={"CONFIG_CROS_EC_RW": "y"})
- def pack_firmware(self, work_dir, jobclient, ro, rw, version_string=""):
+ def pack_firmware(
+ self, work_dir, jobclient: zmake.jobserver.JobClient, dir_map, version_string=""
+ ):
"""Pack RO and RW sections using Binman.
Binman configuration is expected to be found in the RO build
@@ -125,8 +134,7 @@ class BinmanPacker(BasePacker):
Args:
work_dir: The directory used for packing.
jobclient: The client used to run subprocesses.
- ro: Directory containing the RO image build.
- rw: Directory containing the RW image build.
+ dir_map: A dict of build dirs such as {'ro': path_to_ro_dir}.
version_string: The version string to use in FRID/FWID.
Yields:
@@ -134,12 +142,14 @@ class BinmanPacker(BasePacker):
should be copied into the output directory, and the output
filename.
"""
- dts_file_path = ro / "zephyr" / "zephyr.dts"
+ ro_dir = dir_map["ro"]
+ rw_dir = dir_map["rw"]
+ dts_file_path = ro_dir / "zephyr" / "zephyr.dts"
# Copy the inputs into the work directory so that Binman can
# find them under a hard-coded name.
- shutil.copy2(ro / "zephyr" / self.ro_file, work_dir / "zephyr_ro.bin")
- shutil.copy2(rw / "zephyr" / self.rw_file, work_dir / "zephyr_rw.bin")
+ shutil.copy2(ro_dir / "zephyr" / self.ro_file, work_dir / "zephyr_ro.bin")
+ shutil.copy2(rw_dir / "zephyr" / self.rw_file, work_dir / "zephyr_rw.bin")
# Version in FRID/FWID can be at most 31 bytes long (32, minus
# one for null character).
@@ -166,14 +176,14 @@ class BinmanPacker(BasePacker):
encoding="utf-8",
)
- zmake.multiproc.log_output(self.logger, logging.DEBUG, proc.stdout)
- zmake.multiproc.log_output(self.logger, logging.ERROR, proc.stderr)
+ zmake.multiproc.LogWriter.log_output(self.logger, logging.DEBUG, proc.stdout)
+ zmake.multiproc.LogWriter.log_output(self.logger, logging.ERROR, proc.stderr)
if proc.wait(timeout=60):
raise OSError("Failed to run binman")
yield work_dir / "zephyr.bin", "zephyr.bin"
- yield ro / "zephyr" / "zephyr.elf", "zephyr.ro.elf"
- yield rw / "zephyr" / "zephyr.elf", "zephyr.rw.elf"
+ yield ro_dir / "zephyr" / "zephyr.elf", "zephyr.ro.elf"
+ yield rw_dir / "zephyr" / "zephyr.elf", "zephyr.rw.elf"
class NpcxPacker(BinmanPacker):
@@ -187,37 +197,39 @@ class NpcxPacker(BinmanPacker):
ro_file = "zephyr.npcx.bin"
npcx_monitor = "npcx_monitor.bin"
- def _get_max_image_bytes(self, ro, rw):
+ def _get_max_image_bytes(self, dir_map):
+ ro_dir = dir_map["ro"]
+ rw_dir = dir_map["rw"]
ro_size = util.read_kconfig_autoconf_value(
- ro / "zephyr" / "include" / "generated",
+ ro_dir / "zephyr" / "include" / "generated",
"CONFIG_PLATFORM_EC_FLASH_SIZE_BYTES",
)
rw_size = util.read_kconfig_autoconf_value(
- ro / "zephyr" / "include" / "generated",
+ rw_dir / "zephyr" / "include" / "generated",
"CONFIG_PLATFORM_EC_FLASH_SIZE_BYTES",
)
return max(int(ro_size, 0), int(rw_size, 0))
# This can probably be removed too and just rely on binman to
# check the sizes... see the comment above.
- def pack_firmware(self, work_dir, jobclient, ro, rw, version_string=""):
+ def pack_firmware(self, work_dir, jobclient, dir_map, version_string=""):
+ ro_dir = dir_map["ro"]
for path, output_file in super().pack_firmware(
work_dir,
jobclient,
- ro,
- rw,
+ dir_map,
version_string=version_string,
):
if output_file == "zephyr.bin":
yield (
- self._check_packed_file_size(path, {"ro": ro, "rw": rw}),
+ self._check_packed_file_size(path, dir_map),
"zephyr.bin",
)
else:
yield path, output_file
# Include the NPCX monitor file as an output artifact.
- yield ro / self.npcx_monitor, self.npcx_monitor
+ yield ro_dir / self.npcx_monitor, self.npcx_monitor
# A dictionary mapping packer config names to classes.
diff --git a/zephyr/zmake/zmake/project.py b/zephyr/zmake/zmake/project.py
index b2232bb263..f52caa921d 100644
--- a/zephyr/zmake/zmake/project.py
+++ b/zephyr/zmake/zmake/project.py
@@ -6,11 +6,12 @@
import dataclasses
import logging
import pathlib
-from typing import Callable, Dict, List
+import typing
import zmake.build_config as build_config
import zmake.configlib as configlib
import zmake.modules
+import zmake.output_packers
import zmake.toolchains as toolchains
@@ -29,11 +30,14 @@ def module_dts_overlay_name(modpath, board_name):
@dataclasses.dataclass
class ProjectConfig:
+ """All the information needed to define a project."""
+
+ # pylint: disable=too-many-instance-attributes
project_name: str
zephyr_board: str
supported_toolchains: "list[str]"
output_packer: type
- modules: "list[str]" = dataclasses.field(
+ modules: "dict[str, typing.Any]" = dataclasses.field(
default_factory=lambda: zmake.modules.known_modules,
)
is_test: bool = dataclasses.field(default=False)
@@ -48,7 +52,7 @@ class Project:
def __init__(self, config: ProjectConfig):
self.config = config
- self.packer = self.config.output_packer(self)
+ self.packer: zmake.output_packers.BasePacker = self.config.output_packer(self)
def iter_builds(self):
"""Iterate thru the build combinations provided by the project's packer.
@@ -94,8 +98,7 @@ class Project:
return build_config.BuildConfig(
cmake_defs={"DTC_OVERLAY_FILE": ";".join(map(str, overlays))}
)
- else:
- return build_config.BuildConfig()
+ return build_config.BuildConfig()
def prune_modules(self, module_paths):
"""Reduce a modules dict to the ones required by this project.
@@ -129,6 +132,7 @@ class Project:
return result
def get_toolchain(self, module_paths, override=None):
+ """Get the first supported toolchain that is actually available."""
if override:
if override not in self.config.supported_toolchains:
logging.warning(
@@ -139,19 +143,18 @@ class Project:
override, toolchains.GenericToolchain
)
return support_class(name=override, modules=module_paths)
- else:
- for name in self.config.supported_toolchains:
- support_class = toolchains.support_classes[name]
- toolchain = support_class(name=name, modules=module_paths)
- if toolchain.probe():
- logging.info("Toolchain %r selected by probe function.", toolchain)
- return toolchain
- raise OSError(
- "No supported toolchains could be found on your system. If you see "
- "this message in the chroot, it indicates a bug. Otherwise, you'll "
- "either want to setup your system with a supported toolchain, or "
- "manually select an unsupported toolchain with the -t flag."
- )
+ for name in self.config.supported_toolchains:
+ support_class = toolchains.support_classes[name]
+ toolchain = support_class(name=name, modules=module_paths)
+ if toolchain.probe():
+ logging.info("Toolchain %r selected by probe function.", toolchain)
+ return toolchain
+ raise OSError(
+ "No supported toolchains could be found on your system. If you see "
+ "this message in the chroot, it indicates a bug. Otherwise, you'll "
+ "either want to setup your system with a supported toolchain, or "
+ "manually select an unsupported toolchain with the -t flag."
+ )
@dataclasses.dataclass
@@ -167,7 +170,7 @@ class ProjectRegistrationHandler:
"""
base_config: ProjectConfig
- register_func: Callable[[], "ProjectRegistrationHandler"]
+ register_func: typing.Callable[[], "ProjectRegistrationHandler"]
def variant(self, **kwargs) -> "ProjectRegistrationHandler":
"""Register a new variant based on the base config.
@@ -189,7 +192,7 @@ class ProjectRegistrationHandler:
return self.register_func(**new_config)
-def load_config_file(path) -> List[Project]:
+def load_config_file(path) -> typing.List[Project]:
"""Load a BUILD.py config file and create associated projects.
Args:
@@ -198,7 +201,7 @@ def load_config_file(path) -> List[Project]:
Returns:
A list of Project objects specified by the file.
"""
- projects: List[Project] = []
+ projects: typing.List[Project] = []
def register_project(**kwargs) -> ProjectRegistrationHandler:
config = ProjectConfig(**kwargs)
@@ -220,17 +223,17 @@ def load_config_file(path) -> List[Project]:
configlib.__file__,
"exec",
)
- exec(code, config_globals)
+ exec(code, config_globals) # pylint: disable=exec-used
# Next, load the BUILD.py
logging.debug("Loading config file %s", path)
code = compile(path.read_bytes(), str(path), "exec")
- exec(code, config_globals)
+ exec(code, config_globals) # pylint: disable=exec-used
logging.debug("Config file %s defines %s projects", path, len(projects))
return projects
-def find_projects(root_dir) -> Dict[str, Project]:
+def find_projects(root_dir) -> typing.Dict[str, Project]:
"""Finds all zmake projects in root_dir.
Args:
diff --git a/zephyr/zmake/zmake/toolchains.py b/zephyr/zmake/zmake/toolchains.py
index 13ee30de08..8ed1112e25 100644
--- a/zephyr/zmake/zmake/toolchains.py
+++ b/zephyr/zmake/zmake/toolchains.py
@@ -20,7 +20,8 @@ class GenericToolchain:
self.name = name
self.modules = modules or {}
- def probe(self): # pylint:disable=no-self-use
+ @staticmethod
+ def probe():
"""Probe if the toolchain is available on the system."""
# Since the toolchain is not known to zmake, we have no way to
# know if it's installed. Simply return False to indicate not
diff --git a/zephyr/zmake/zmake/util.py b/zephyr/zmake/zmake/util.py
index ee3b245b78..22d45d7deb 100644
--- a/zephyr/zmake/zmake/util.py
+++ b/zephyr/zmake/zmake/util.py
@@ -74,8 +74,8 @@ def read_kconfig_file(path):
A dictionary of kconfig items to their values.
"""
result = {}
- with open(path) as f:
- for line in f:
+ with open(path) as file:
+ for line in file:
line, _, _ = line.partition("#")
line = line.strip()
if line:
@@ -95,11 +95,12 @@ def read_kconfig_autoconf_value(path, key):
The value associated with the key or nothing if the key wasn't found.
"""
prog = re.compile(r"^#define\s{}\s(\S+)$".format(key))
- with open(path / "autoconf.h") as f:
- for line in f:
- m = prog.match(line)
- if m:
- return m.group(1)
+ with open(path / "autoconf.h") as file:
+ for line in file:
+ match = prog.match(line)
+ if match:
+ return match.group(1)
+ return None
def write_kconfig_file(path, config, only_if_changed=True):
@@ -114,9 +115,9 @@ def write_kconfig_file(path, config, only_if_changed=True):
if only_if_changed:
if path.exists() and read_kconfig_file(path) == config:
return
- with open(path, "w") as f:
+ with open(path, "w") as file:
for name, value in config.items():
- f.write("{}={}\n".format(name, value))
+ file.write("{}={}\n".format(name, value))
def read_zephyr_version(zephyr_base):
@@ -131,9 +132,9 @@ def read_zephyr_version(zephyr_base):
version_file = pathlib.Path(zephyr_base) / "VERSION"
file_vars = {}
- with open(version_file) as f:
- for line in f:
- key, sep, value = line.partition("=")
+ with open(version_file) as file:
+ for line in file:
+ key, _, value = line.partition("=")
file_vars[key.strip()] = value.strip()
return (
diff --git a/zephyr/zmake/zmake/version.py b/zephyr/zmake/zmake/version.py
index b2b897cf5b..2333ad46df 100644
--- a/zephyr/zmake/zmake/version.py
+++ b/zephyr/zmake/zmake/version.py
@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+"""Code to generate the ec_version.h file."""
+
import datetime
import getpass
import io
diff --git a/zephyr/zmake/zmake/zmake.py b/zephyr/zmake/zmake/zmake.py
index 9b9a709aa9..53dd6575c8 100644
--- a/zephyr/zmake/zmake/zmake.py
+++ b/zephyr/zmake/zmake/zmake.py
@@ -11,7 +11,7 @@ import pathlib
import re
import shutil
import subprocess
-from typing import Dict, List
+from typing import Dict, List, Optional, Union
import zmake.build_config
import zmake.generate_readme
@@ -37,6 +37,7 @@ def ninja_stdout_log_level_override(line, current_log_level):
current_log_level: The active logging level that would be used for the
line.
"""
+ # pylint: disable=too-many-return-statements
# Output lines from Zephyr that are not normally useful
# Send any lines that start with these strings to INFO
cmake_suppress = [
@@ -105,7 +106,7 @@ def cmake_log_level_override(line, default_log_level):
# Strange output from Zephyr that we normally ignore
if line.startswith("Including boilerplate"):
return logging.DEBUG
- elif line.startswith("devicetree error:"):
+ if line.startswith("devicetree error:"):
return logging.ERROR
if ninja_warnings.match(line):
return logging.WARNING
@@ -149,17 +150,19 @@ class Zmake:
before launching more, False to just do this after all jobs complete
"""
- def __init__(
+ # pylint: disable=too-many-instance-attributes
+
+ def __init__( # pylint: disable=too-many-arguments
self,
checkout=None,
- jobserver=None,
+ jobserver: Optional[zmake.jobserver.JobClient] = None,
jobs=0,
goma=False,
gomacc="/mnt/host/depot_tools/.cipd_bin/gomacc",
modules_dir=None,
zephyr_base=None,
):
- zmake.multiproc.reset()
+ zmake.multiproc.LogWriter.reset()
self.logger = logging.getLogger(self.__class__.__name__)
self._checkout = checkout
self.goma = goma
@@ -187,6 +190,7 @@ class Zmake:
@property
def checkout(self):
+ """Returns the location of the cros checkout."""
if not self._checkout:
self._checkout = util.locate_cros_checkout()
return self._checkout.resolve()
@@ -200,7 +204,7 @@ class Zmake:
"""
found_projects = zmake.project.find_projects(self.module_paths["ec"] / "zephyr")
if all_projects:
- projects = found_projects.values()
+ projects = list(found_projects.values())
elif host_tests_only:
projects = [p for p in found_projects.values() if p.config.is_test]
else:
@@ -212,7 +216,7 @@ class Zmake:
raise KeyError("No project named {}".format(project_name)) from e
return projects
- def configure(
+ def configure( # pylint: disable=too-many-arguments,too-many-locals
self,
project_names,
build_dir=None,
@@ -253,33 +257,33 @@ class Zmake:
)
)
if self._sequential:
- rv = self.executor.wait()
- if rv:
- return rv
- rv = self.executor.wait()
- if rv:
- return rv
+ result = self.executor.wait()
+ if result:
+ return result
+ result = self.executor.wait()
+ if result:
+ return result
test_projects = [p for p in projects if p.config.is_test]
if len(test_projects) > 1 and coverage and test_after_configure:
- rv = self._merge_lcov_files(
+ result = self._merge_lcov_files(
projects=test_projects,
build_dir=build_dir,
output_file=build_dir / "all_tests.info",
)
- if rv:
- return rv
+ if result:
+ return result
non_test_projects = [p for p in projects if not p.config.is_test]
if len(non_test_projects) > 1 and coverage and build_after_configure:
- rv = self._merge_lcov_files(
+ result = self._merge_lcov_files(
projects=non_test_projects,
build_dir=build_dir,
output_file=build_dir / "all_builds.info",
)
- if rv:
- return rv
+ if result:
+ return result
return 0
- def build(
+ def build( # pylint: disable=too-many-arguments
self,
project_names,
build_dir=None,
@@ -307,7 +311,7 @@ class Zmake:
build_after_configure=True,
)
- def test(
+ def test( # pylint: disable=too-many-arguments,too-many-locals
self,
project_names,
build_dir=None,
@@ -362,23 +366,23 @@ class Zmake:
)
)
if self._sequential:
- rv = self.executor.wait()
- if rv:
- return rv
- rv = self.executor.wait()
- if rv:
- return rv
+ result = self.executor.wait()
+ if result:
+ return result
+ result = self.executor.wait()
+ if result:
+ return result
if len(test_projects) > 1 and coverage:
- rv = self._merge_lcov_files(
+ result = self._merge_lcov_files(
projects=test_projects,
build_dir=build_dir,
output_file=build_dir / "all_tests.info",
)
- if rv:
- return rv
+ if result:
+ return result
return 0
- def testall(
+ def testall( # pylint: disable=too-many-arguments
self,
build_dir=None,
toolchain=None,
@@ -387,6 +391,7 @@ class Zmake:
coverage=False,
allow_warnings=False,
):
+ """Locate and build all the projects."""
return self.test(
[],
build_dir=build_dir,
@@ -411,6 +416,8 @@ class Zmake:
allow_warnings=False,
extra_cflags=None,
):
+ # pylint: disable=too-many-arguments,too-many-locals,too-many-branches
+ # pylint: disable=too-many-statements
"""Set up a build directory to later be built by "zmake build"."""
# Resolve build_dir if needed.
if not build_dir:
@@ -487,7 +494,7 @@ class Zmake:
files_to_write = []
self.logger.info("Building %s in %s.", project.config.project_name, build_dir)
for build_name, build_config in project.iter_builds():
- config = (
+ config: zmake.build_config.BuildConfig = (
base_config
| toolchain_config
| module_config
@@ -507,8 +514,7 @@ class Zmake:
build_name,
)
continue
- else:
- config_json_file.unlink()
+ config_json_file.unlink()
files_to_write.append((config_json_file, config_json))
@@ -534,14 +540,14 @@ class Zmake:
errors="replace",
)
job_id = "{}:{}".format(project.config.project_name, build_name)
- zmake.multiproc.log_output(
+ zmake.multiproc.LogWriter.log_output(
self.logger,
logging.DEBUG,
proc.stdout,
log_level_override_func=cmake_log_level_override,
job_id=job_id,
)
- zmake.multiproc.log_output(
+ zmake.multiproc.LogWriter.log_output(
self.logger,
logging.ERROR,
proc.stderr,
@@ -567,14 +573,14 @@ class Zmake:
output_files = []
if build_after_configure or test_after_configure:
- rv = self._build(
+ result = self._build(
build_dir=build_dir,
project=project,
coverage=coverage,
output_files_out=output_files,
)
- if rv:
- return rv
+ if result:
+ return result
if test_after_configure and project.config.is_test:
gcov = "gcov.sh-not-found"
for build_name, _ in project.iter_builds():
@@ -601,6 +607,7 @@ class Zmake:
output_files_out=None,
coverage=False,
):
+ # pylint: disable=too-many-locals,too-many-branches
"""Build a pre-configured build directory."""
def wait_and_check_success(procs, writers):
@@ -668,7 +675,7 @@ class Zmake:
"Building %s:%s: %s",
project.config.project_name,
build_name,
- zmake.util.repr_command(cmd),
+ util.repr_command(cmd),
)
proc = self.jobserver.popen(
cmd,
@@ -680,7 +687,7 @@ class Zmake:
job_id = "{}:{}".format(project.config.project_name, build_name)
dirs[build_name].mkdir(parents=True, exist_ok=True)
build_log = open(dirs[build_name] / "build.log", "w")
- out = zmake.multiproc.log_output(
+ out = zmake.multiproc.LogWriter.log_output(
logger=self.logger,
log_level=logging.INFO,
file_descriptor=proc.stdout,
@@ -688,7 +695,7 @@ class Zmake:
job_id=job_id,
tee_output=build_log,
)
- err = zmake.multiproc.log_output(
+ err = zmake.multiproc.LogWriter.log_output(
self.logger,
logging.ERROR,
proc.stderr,
@@ -708,9 +715,9 @@ class Zmake:
# Run the packer.
packer_work_dir = build_dir / "packer"
output_dir = build_dir / "output"
- for d in output_dir, packer_work_dir:
- if not d.exists():
- d.mkdir()
+ for newdir in output_dir, packer_work_dir:
+ if not newdir.exists():
+ newdir.mkdir()
if output_files_out is None:
output_files_out = []
@@ -723,7 +730,7 @@ class Zmake:
)
else:
for output_file, output_name in project.packer.pack_firmware(
- packer_work_dir, self.jobserver, version_string=version_string, **dirs
+ packer_work_dir, self.jobserver, dirs, version_string=version_string
):
shutil.copy2(output_file, output_dir / output_name)
self.logger.debug("Output file '%s' created.", output_file)
@@ -731,7 +738,7 @@ class Zmake:
return 0
- def _run_test(
+ def _run_test( # pylint: disable=too-many-arguments
self, elf_file: pathlib.Path, coverage, gcov, build_dir, lcov_file, timeout=None
):
"""Run a single test, with goma if enabled.
@@ -760,13 +767,13 @@ class Zmake:
errors="replace",
)
job_id = "test {}".format(elf_file)
- zmake.multiproc.log_output(
+ zmake.multiproc.LogWriter.log_output(
self.logger,
logging.DEBUG,
proc.stdout,
job_id=job_id,
)
- zmake.multiproc.log_output(
+ zmake.multiproc.LogWriter.log_output(
self.logger,
logging.ERROR,
proc.stderr,
@@ -791,7 +798,9 @@ class Zmake:
with self.jobserver.get_job():
_run()
- def _run_lcov(self, build_dir, lcov_file, initial=False, gcov=""):
+ def _run_lcov(
+ self, build_dir, lcov_file, initial=False, gcov: Union[os.PathLike, str] = ""
+ ):
gcov = os.path.abspath(gcov)
if initial:
self.logger.info("Running (initial) lcov on %s.", build_dir)
@@ -821,7 +830,7 @@ class Zmake:
encoding="utf-8",
errors="replace",
)
- zmake.multiproc.log_output(
+ zmake.multiproc.LogWriter.log_output(
self.logger,
logging.WARNING,
proc.stderr,
@@ -864,21 +873,21 @@ class Zmake:
encoding="utf-8",
errors="replace",
)
- zmake.multiproc.log_output(
+ zmake.multiproc.LogWriter.log_output(
self.logger, logging.ERROR, proc.stderr, job_id="lcov"
)
- zmake.multiproc.log_output(
+ zmake.multiproc.LogWriter.log_output(
self.logger, logging.DEBUG, proc.stdout, job_id="lcov"
)
if proc.wait():
raise OSError(get_process_failure_msg(proc))
return 0
- def list_projects(self, format, search_dir):
+ def list_projects(self, fmt, search_dir):
"""List project names known to zmake on stdout.
Args:
- format: The formatting string to print projects with.
+ fmt: The formatting string to print projects with.
search_dir: Directory to start the search for
BUILD.py files at.
"""
@@ -886,7 +895,7 @@ class Zmake:
search_dir = self.module_paths["ec"] / "zephyr"
for project in zmake.project.find_projects(search_dir).values():
- print(format.format(config=project.config), end="")
+ print(fmt.format(config=project.config), end="")
return 0