summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.editorconfig11
-rw-r--r--.github/workflows/linux.yml55
-rw-r--r--.github/workflows/macos.yml49
-rw-r--r--.github/workflows/windows.yml49
-rw-r--r--.gitignore8
-rw-r--r--.travis.yml22
-rw-r--r--CMakeLists.txt129
-rw-r--r--CONTRIBUTING.md34
-rw-r--r--HACKING.md252
-rw-r--r--README21
-rw-r--r--README.md50
-rw-r--r--RELEASING2
-rw-r--r--appveyor.yml25
-rwxr-xr-xconfigure.py11
-rw-r--r--doc/manual.asciidoc6
-rwxr-xr-xmisc/output_test.py18
-rw-r--r--src/build.cc52
-rw-r--r--src/build.h22
-rw-r--r--src/build_log.cc74
-rw-r--r--src/build_log.h8
-rw-r--r--src/build_log_test.cc60
-rw-r--r--src/build_test.cc227
-rw-r--r--src/clean.cc13
-rw-r--r--src/clean.h5
-rw-r--r--src/clean_test.cc82
-rw-r--r--src/depfile_parser.cc49
-rw-r--r--src/depfile_parser.h13
-rw-r--r--src/depfile_parser.in.cc49
-rw-r--r--src/depfile_parser_test.cc100
-rw-r--r--src/deps_log.cc14
-rw-r--r--src/deps_log.h3
-rw-r--r--src/graph.cc52
-rw-r--r--src/graph.h8
-rw-r--r--src/graph_test.cc2
-rw-r--r--src/load_status.h24
-rw-r--r--src/manifest_parser.cc12
-rw-r--r--src/manifest_parser_test.cc5
-rw-r--r--src/ninja.cc120
-rw-r--r--src/util.cc18
-rw-r--r--src/util_test.cc4
40 files changed, 1246 insertions, 512 deletions
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..0cc68d6
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,11 @@
+root = true
+
+[*]
+charset = utf-8
+indent_style = space
+indent_size = 2
+insert_final_newline = true
+end_of_line = lf
+
+[CMakeLists.txt]
+indent_style = tab
diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml
new file mode 100644
index 0000000..2febee2
--- /dev/null
+++ b/.github/workflows/linux.yml
@@ -0,0 +1,55 @@
+name: Linux
+
+on:
+ pull_request:
+ push:
+ release:
+ types: published
+
+jobs:
+ build:
+ runs-on: [ubuntu-latest]
+ container:
+ image: centos:7
+ steps:
+ - uses: actions/checkout@v1
+ - name: Install dependencies
+ run: |
+ curl -L -O https://github.com/Kitware/CMake/releases/download/v3.16.2/cmake-3.16.2-Linux-x86_64.sh
+ chmod +x cmake-3.16.2-Linux-x86_64.sh
+ ./cmake-3.16.2-Linux-x86_64.sh --skip-license --prefix=/usr/local
+ curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-16.02-10.el7.x86_64.rpm
+ curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-plugins-16.02-10.el7.x86_64.rpm
+ rpm -U --quiet p7zip-16.02-10.el7.x86_64.rpm
+ rpm -U --quiet p7zip-plugins-16.02-10.el7.x86_64.rpm
+ yum install -y make gcc-c++
+ - name: Build ninja
+ shell: bash
+ run: |
+ mkdir build && cd build
+ cmake -DCMAKE_BUILD_TYPE=Release ..
+ cmake --build . --parallel --config Release
+ ctest -vv
+ strip ninja
+ - name: Create ninja archive
+ run: |
+ mkdir artifact
+ 7z a artifact/ninja-linux.zip ./build/ninja
+
+ # Upload ninja binary archive as an artifact
+ - name: Upload artifact
+ uses: actions/upload-artifact@v1
+ with:
+ name: ninja-binary-archives
+ path: artifact
+
+ - name: Upload release asset
+ if: github.event.action == 'published'
+ uses: actions/upload-release-asset@v1.0.1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ github.event.release.upload_url }}
+ asset_path: ./artifact/ninja-linux.zip
+ asset_name: ninja-linux.zip
+ asset_content_type: application/zip
diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml
new file mode 100644
index 0000000..2a7c100
--- /dev/null
+++ b/.github/workflows/macos.yml
@@ -0,0 +1,49 @@
+name: macOS
+
+on:
+ pull_request:
+ push:
+ release:
+ types: published
+
+jobs:
+ build:
+ runs-on: macOS-latest
+
+ steps:
+ - uses: actions/checkout@v1
+
+ - name: Install dependencies
+ run: brew install re2c p7zip cmake
+
+ - name: Build ninja
+ shell: bash
+ run: |
+ mkdir build && cd build
+ cmake -DCMAKE_BUILD_TYPE=Release ..
+ cmake --build . --parallel --config Release
+ ctest -vv
+
+ - name: Create ninja archive
+ shell: bash
+ run: |
+ mkdir artifact
+ 7z a artifact/ninja-mac.zip ./build/ninja
+
+ # Upload ninja binary archive as an artifact
+ - name: Upload artifact
+ uses: actions/upload-artifact@v1
+ with:
+ name: ninja-binary-archives
+ path: artifact
+
+ - name: Upload release asset
+ if: github.event.action == 'published'
+ uses: actions/upload-release-asset@v1.0.1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ github.event.release.upload_url }}
+ asset_path: ./artifact/ninja-mac.zip
+ asset_name: ninja-mac.zip
+ asset_content_type: application/zip
diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml
new file mode 100644
index 0000000..bdec6c9
--- /dev/null
+++ b/.github/workflows/windows.yml
@@ -0,0 +1,49 @@
+name: Windows
+
+on:
+ pull_request:
+ push:
+ release:
+ types: published
+
+jobs:
+ build:
+ runs-on: windows-latest
+
+ steps:
+ - uses: actions/checkout@v1
+
+ - name: Install dependencies
+ run: choco install re2c
+
+ - name: Build ninja
+ shell: bash
+ run: |
+ mkdir build && cd build
+ cmake -DCMAKE_BUILD_TYPE=Release ..
+ cmake --build . --parallel --config Release
+ ctest -vv
+
+ - name: Create ninja archive
+ shell: bash
+ run: |
+ mkdir artifact
+ 7z a artifact/ninja-win.zip ./build/Release/ninja.exe
+
+ # Upload ninja binary archive as an artifact
+ - name: Upload artifact
+ uses: actions/upload-artifact@v1
+ with:
+ name: ninja-binary-archives
+ path: artifact
+
+ - name: Upload release asset
+ if: github.event.action == 'published'
+ uses: actions/upload-release-asset@v1.0.1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ github.event.release.upload_url }}
+ asset_path: ./artifact/ninja-win.zip
+ asset_name: ninja-win.zip
+ asset_content_type: application/zip
diff --git a/.gitignore b/.gitignore
index 46736a6..dca1129 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,8 +3,7 @@
*.exe
*.pdb
*.ilk
-TAGS
-/build
+/build*/
/build.ninja
/ninja
/ninja.bootstrap
@@ -18,8 +17,8 @@ TAGS
/graph.png
/doc/manual.html
/doc/doxygen
-/gtest-1.6.0
*.patch
+.DS_Store
# Eclipse project files
.project
@@ -36,3 +35,6 @@ TAGS
# Visual Studio Code project files
/.vscode/
/.ccls-cache/
+
+# Qt Creator project files
+/CMakeLists.txt.user
diff --git a/.travis.yml b/.travis.yml
index f76b982..e5d7d2b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,15 +1,35 @@
matrix:
include:
- os: linux
+ dist: precise
compiler: gcc
- os: linux
+ dist: precise
compiler: clang
+ - os: linux
+ dist: trusty
+ compiler: gcc
+ - os: linux
+ dist: trusty
+ compiler: clang
+ - os: linux
+ dist: xenial
+ compiler: gcc
+ - os: linux
+ dist: xenial
+ compiler: clang
+ - os: osx
+ osx_image: xcode10
- os: osx
+ osx_image: xcode10.1
sudo: false
language: cpp
+before_install:
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install re2c ; fi
+ - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then choco install re2c python ; fi
script:
- ./misc/ci.py
- - ./configure.py --bootstrap
+ - python3 configure.py --bootstrap
- ./ninja all
- ./ninja_test --gtest_filter=-SubprocessTest.SetWithLots
- ./misc/ninja_syntax_test.py
diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 0000000..60fd8a1
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,129 @@
+cmake_minimum_required(VERSION 3.15)
+cmake_policy(SET CMP0091 NEW)
+project(ninja)
+
+if(CMAKE_BUILD_TYPE MATCHES "Release")
+ cmake_policy(SET CMP0069 NEW)
+ include(CheckIPOSupported)
+ check_ipo_supported(RESULT lto_supported OUTPUT error)
+
+ if(lto_supported)
+ message(STATUS "IPO / LTO enabled")
+ set(CMAKE_INTERPROCEDURAL_OPTIMIZATION TRUE)
+ else()
+ message(STATUS "IPO / LTO not supported: <${error}>")
+ endif()
+endif()
+
+if(MSVC)
+ set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /GR- /Zc:__cplusplus")
+else()
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated -fdiagnostics-color")
+endif()
+
+find_program(RE2C re2c)
+if(RE2C)
+ # the depfile parser and ninja lexers are generated using re2c.
+ function(re2c IN OUT)
+ add_custom_command(DEPENDS ${IN} OUTPUT ${OUT}
+ COMMAND ${RE2C} -b -i --no-generation-date -o ${OUT} ${IN}
+ )
+ endfunction()
+ re2c(${CMAKE_SOURCE_DIR}/src/depfile_parser.in.cc ${CMAKE_BINARY_DIR}/depfile_parser.cc)
+ re2c(${CMAKE_SOURCE_DIR}/src/lexer.in.cc ${CMAKE_BINARY_DIR}/lexer.cc)
+ add_library(libninja-re2c OBJECT ${CMAKE_BINARY_DIR}/depfile_parser.cc ${CMAKE_BINARY_DIR}/lexer.cc)
+else()
+ message(WARNING "re2c was not found; changes to src/*.in.cc will not affect your build.")
+ add_library(libninja-re2c OBJECT src/depfile_parser.cc src/lexer.cc)
+endif()
+target_include_directories(libninja-re2c PRIVATE src)
+
+# Core source files all build into ninja library.
+add_library(libninja OBJECT
+ src/build_log.cc
+ src/build.cc
+ src/clean.cc
+ src/clparser.cc
+ src/dyndep.cc
+ src/dyndep_parser.cc
+ src/debug_flags.cc
+ src/deps_log.cc
+ src/disk_interface.cc
+ src/edit_distance.cc
+ src/eval_env.cc
+ src/graph.cc
+ src/graphviz.cc
+ src/line_printer.cc
+ src/manifest_parser.cc
+ src/metrics.cc
+ src/parser.cc
+ src/state.cc
+ src/string_piece_util.cc
+ src/util.cc
+ src/version.cc
+)
+if(WIN32)
+ target_sources(libninja PRIVATE
+ src/subprocess-win32.cc
+ src/includes_normalize-win32.cc
+ src/msvc_helper-win32.cc
+ src/msvc_helper_main-win32.cc
+ src/getopt.c
+ )
+ if(MSVC)
+ target_sources(libninja PRIVATE src/minidump-win32.cc)
+ endif()
+else()
+ target_sources(libninja PRIVATE src/subprocess-posix.cc)
+endif()
+
+#Fixes GetActiveProcessorCount on MinGW
+if(MINGW)
+target_compile_definitions(libninja PRIVATE _WIN32_WINNT=0x0601 __USE_MINGW_ANSI_STDIO=1)
+endif()
+
+# Main executable is library plus main() function.
+add_executable(ninja src/ninja.cc)
+target_link_libraries(ninja PRIVATE libninja libninja-re2c)
+
+# Tests all build into ninja_test executable.
+add_executable(ninja_test
+ src/build_log_test.cc
+ src/build_test.cc
+ src/clean_test.cc
+ src/clparser_test.cc
+ src/depfile_parser_test.cc
+ src/deps_log_test.cc
+ src/disk_interface_test.cc
+ src/dyndep_parser_test.cc
+ src/edit_distance_test.cc
+ src/graph_test.cc
+ src/lexer_test.cc
+ src/manifest_parser_test.cc
+ src/ninja_test.cc
+ src/state_test.cc
+ src/string_piece_util_test.cc
+ src/subprocess_test.cc
+ src/test.cc
+ src/util_test.cc
+)
+if(WIN32)
+ target_sources(ninja_test PRIVATE src/includes_normalize_test.cc src/msvc_helper_test.cc)
+endif()
+target_link_libraries(ninja_test PRIVATE libninja libninja-re2c)
+
+foreach(perftest
+ build_log_perftest
+ canon_perftest
+ clparser_perftest
+ depfile_parser_perftest
+ hash_collision_bench
+ manifest_parser_perftest
+)
+ add_executable(${perftest} src/${perftest}.cc)
+ target_link_libraries(${perftest} PRIVATE libninja libninja-re2c)
+endforeach()
+
+enable_testing()
+add_test(NinjaTest ninja_test)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..be1fc02
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,34 @@
+# How to successfully make changes to Ninja
+
+We're very wary of changes that increase the complexity of Ninja (in particular,
+new build file syntax or command-line flags) or increase the maintenance burden
+of Ninja. Ninja is already successfully used by hundreds of developers for large
+projects and it already achieves (most of) the goals we set out for it to do.
+It's probably best to discuss new feature ideas on the
+[mailing list](https://groups.google.com/forum/#!forum/ninja-build) or in an
+issue before creating a PR.
+
+## Coding guidelines
+
+Generally it's the
+[Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html) with
+a few additions:
+
+* Any code merged into the Ninja codebase which will be part of the main
+ executable must compile as C++03. You may use C++11 features in a test or an
+ unimportant tool if you guard your code with `#if __cplusplus >= 201103L`.
+* We have used `using namespace std;` a lot in the past. For new contributions,
+ please try to avoid relying on it and instead whenever possible use `std::`.
+ However, please do not change existing code simply to add `std::` unless your
+ contribution already needs to change that line of code anyway.
+* All source files should have the Google Inc. license header.
+* Use `///` for [Doxygen](http://www.doxygen.nl/) (use `\a` to refer to
+ arguments).
+* It's not necessary to document each argument, especially when they're
+ relatively self-evident (e.g. in
+ `CanonicalizePath(string* path, string* err)`, the arguments are hopefully
+ obvious).
+
+If you're unsure about code formatting, please use
+[clang-format](https://clang.llvm.org/docs/ClangFormat.html). However, please do
+not format code that is not otherwise part of your contribution.
diff --git a/HACKING.md b/HACKING.md
deleted file mode 100644
index bd6fec7..0000000
--- a/HACKING.md
+++ /dev/null
@@ -1,252 +0,0 @@
-## Basic overview
-
-`./configure.py` generates the `build.ninja` files used to build
-ninja. It accepts various flags to adjust build parameters.
-Run './configure.py --help' for more configuration options.
-
-The primary build target of interest is `ninja`, but when hacking on
-Ninja your changes should be testable so it's more useful to build and
-run `ninja_test` when developing.
-
-### Bootstrapping
-
-Ninja is built using itself. To bootstrap the first binary, run the
-configure script as `./configure.py --bootstrap`. This first compiles
-all non-test source files together, then re-builds Ninja using itself.
-You should end up with a `ninja` binary (or `ninja.exe`) in the project root.
-
-#### Windows
-
-On Windows, you'll need to install Python to run `configure.py`, and
-run everything under a Visual Studio Tools Command Prompt (or after
-running `vcvarsall` in a normal command prompt).
-
-For other combinations such as gcc/clang you will need the compiler
-(gcc/cl) in your PATH and you will have to set the appropriate
-platform configuration script.
-
-See below if you want to use mingw or some other compiler instead of
-Visual Studio.
-
-##### Using Visual Studio
-Assuming that you now have Python installed, then the steps for building under
-Windows using Visual Studio are:
-
-Clone and checkout the latest release (or whatever branch you want). You
-can do this in either a command prompt or by opening a git bash prompt:
-
-```
- $ git clone git://github.com/ninja-build/ninja.git && cd ninja
- $ git checkout release
-```
-
-Then:
-
-1. Open a Windows command prompt in the folder where you checked out ninja.
-2. Select the Microsoft build environment by running
-`vcvarsall.bat` with the appropriate environment.
-3. Build ninja and test it.
-
-The steps for a Visual Studio 2015 64-bit build are outlined here:
-
-```
- > "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64
- > python configure.py --bootstrap
- > ninja --help
-```
-Copy the ninja executable to another location, if desired, e.g. C:\local\Ninja.
-
-Finally add the path where ninja.exe is to the PATH variable.
-
-### Adjusting build flags
-
-Build in "debug" mode while developing (disables optimizations and builds
-way faster on Windows):
-
- ./configure.py --debug
-
-To use clang, set `CXX`:
-
- CXX=clang++ ./configure.py
-
-## How to successfully make changes to Ninja
-
-Github pull requests are convenient for me to merge (I can just click
-a button and it's all handled server-side), but I'm also comfortable
-accepting pre-github git patches (via `send-email` etc.).
-
-Good pull requests have all of these attributes:
-
-* Are scoped to one specific issue
-* Include a test to demonstrate their correctness
-* Update the docs where relevant
-* Match the Ninja coding style (see below)
-* Don't include a mess of "oops, fix typo" commits
-
-These are typically merged without hesitation. If a change is lacking
-any of the above I usually will ask you to fix it, though there are
-obvious exceptions (fixing typos in comments don't need tests).
-
-I am very wary of changes that increase the complexity of Ninja (in
-particular, new build file syntax or command-line flags) or increase
-the maintenance burden of Ninja. Ninja is already successfully used
-by hundreds of developers for large projects and it already achieves
-(most of) the goals I set out for it to do. It's probably best to
-discuss new feature ideas on the [mailing list](https://groups.google.com/forum/#!forum/ninja-build)
-before I shoot down your patch.
-
-## Testing
-
-### Test-driven development
-
-Set your build command to
-
- ./ninja ninja_test && ./ninja_test --gtest_filter=MyTest.Name
-
-now you can repeatedly run that while developing until the tests pass
-(I frequently set it as my compilation command in Emacs). Remember to
-build "all" before committing to verify the other source still works!
-
-## Testing performance impact of changes
-
-If you have a Chrome build handy, it's a good test case. There's a
-script at `misc/measure.py` that repeatedly runs a command (to address
-variance) and summarizes its runtime. E.g.
-
- path/to/misc/measure.py path/to/my/ninja chrome
-
-For changing the depfile parser, you can also build `parser_perftest`
-and run that directly on some representative input files.
-
-## Coding guidelines
-
-Generally it's the [Google C++ coding style][], but in brief:
-
-* Function name are camelcase.
-* Member methods are camelcase, except for trivial getters which are
- underscore separated.
-* Local variables are underscore separated.
-* Member variables are underscore separated and suffixed by an extra
- underscore.
-* Two spaces indentation.
-* Opening braces is at the end of line.
-* Lines are 80 columns maximum.
-* All source files should have the Google Inc. license header.
-
-[Google C++ coding style]: https://google.github.io/styleguide/cppguide.html
-
-## Documentation
-
-### Style guidelines
-
-* Use `///` for doxygen.
-* Use `\a` to refer to arguments.
-* It's not necessary to document each argument, especially when they're
- relatively self-evident (e.g. in `CanonicalizePath(string* path, string* err)`,
- the arguments are hopefully obvious)
-
-### Building the manual
-
- sudo apt-get install asciidoc --no-install-recommends
- ./ninja manual
-
-### Building the code documentation
-
- sudo apt-get install doxygen
- ./ninja doxygen
-
-## Building for Windows
-
-While developing, it's helpful to copy `ninja.exe` to another name like
-`n.exe`; otherwise, rebuilds will be unable to write `ninja.exe` because
-it's locked while in use.
-
-### Via Visual Studio
-
-* Install Visual Studio (Express is fine), [Python for Windows][],
- and (if making changes) googletest (see above instructions)
-* In a Visual Studio command prompt: `python configure.py --bootstrap`
-
-[Python for Windows]: http://www.python.org/getit/windows/
-
-### Via mingw on Windows (not well supported)
-
-* Install mingw, msys, and python
-* In the mingw shell, put Python in your path, and
- `python configure.py --bootstrap`
-* To reconfigure, run `python configure.py`
-* Remember to strip the resulting executable if size matters to you
-
-### Via mingw on Linux (not well supported)
-
-Setup on Ubuntu Lucid:
-* `sudo apt-get install gcc-mingw32 wine`
-* `export CC=i586-mingw32msvc-cc CXX=i586-mingw32msvc-c++ AR=i586-mingw32msvc-ar`
-
-Setup on Ubuntu Precise:
-* `sudo apt-get install gcc-mingw-w64-i686 g++-mingw-w64-i686 wine`
-* `export CC=i686-w64-mingw32-gcc CXX=i686-w64-mingw32-g++ AR=i686-w64-mingw32-ar`
-
-Setup on Arch:
-* Uncomment the `[multilib]` section of `/etc/pacman.conf` and `sudo pacman -Sy`.
-* `sudo pacman -S mingw-w64-gcc wine`
-* `export CC=x86_64-w64-mingw32-cc CXX=x86_64-w64-mingw32-c++ AR=x86_64-w64-mingw32-ar`
-* `export CFLAGS=-I/usr/x86_64-w64-mingw32/include`
-
-Then run:
-* `./configure.py --platform=mingw --host=linux`
-* Build `ninja.exe` using a Linux ninja binary: `/path/to/linux/ninja`
-* Run: `./ninja.exe` (implicitly runs through wine(!))
-
-### Using Microsoft compilers on Linux (extremely flaky)
-
-The trick is to install just the compilers, and not all of Visual Studio,
-by following [these instructions][win7sdk].
-
-[win7sdk]: http://www.kegel.com/wine/cl-howto-win7sdk.html
-
-### Using gcov
-
-Do a clean debug build with the right flags:
-
- CFLAGS=-coverage LDFLAGS=-coverage ./configure.py --debug
- ninja -t clean ninja_test && ninja ninja_test
-
-Run the test binary to generate `.gcda` and `.gcno` files in the build
-directory, then run gcov on the .o files to generate `.gcov` files in the
-root directory:
-
- ./ninja_test
- gcov build/*.o
-
-Look at the generated `.gcov` files directly, or use your favorite gcov viewer.
-
-### Using afl-fuzz
-
-Build with afl-clang++:
-
- CXX=path/to/afl-1.20b/afl-clang++ ./configure.py
- ninja
-
-Then run afl-fuzz like so:
-
- afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@
-
-You can pass `-x misc/afl-fuzz-tokens` to use the token dictionary. In my
-testing, that did not seem more effective though.
-
-#### Using afl-fuzz with asan
-
-If you want to use asan (the `isysroot` bit is only needed on OS X; if clang
-can't find C++ standard headers make sure your LLVM checkout includes a libc++
-checkout and has libc++ installed in the build directory):
-
- CFLAGS="-fsanitize=address -isysroot $(xcrun -show-sdk-path)" \
- LDFLAGS=-fsanitize=address CXX=path/to/afl-1.20b/afl-clang++ \
- ./configure.py
- AFL_CXX=path/to/clang++ ninja
-
-Make sure ninja can find the asan runtime:
-
- DYLD_LIBRARY_PATH=path/to//lib/clang/3.7.0/lib/darwin/ \
- afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@
diff --git a/README b/README
deleted file mode 100644
index a1535ff..0000000
--- a/README
+++ /dev/null
@@ -1,21 +0,0 @@
-Ninja is a small build system with a focus on speed.
-https://ninja-build.org/
-
-See the manual -- https://ninja-build.org/manual.html or
-doc/manual.asciidoc included in the distribution -- for background
-and more details.
-
-Binaries for Linux, Mac, and Windows are available at
- https://github.com/ninja-build/ninja/releases
-Run './ninja -h' for Ninja help.
-
-To build your own binary, on many platforms it should be sufficient to
-just run `./configure.py --bootstrap`; for more details see HACKING.md.
-(Also read that before making changes to Ninja, as it has advice.)
-
-Installation is not necessary because the only required file is the
-resulting ninja binary. However, to enable features like Bash
-completion and Emacs and Vim editing modes, some files in misc/ must be
-copied to appropriate locations.
-
-If you're interested in making changes to Ninja, read HACKING.md first.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..3326f81
--- /dev/null
+++ b/README.md
@@ -0,0 +1,50 @@
+# Ninja
+
+Ninja is a small build system with a focus on speed.
+https://ninja-build.org/
+
+See [the manual](https://ninja-build.org/manual.html) or
+`doc/manual.asciidoc` included in the distribution for background
+and more details.
+
+Binaries for Linux, Mac, and Windows are available at
+ [GitHub](https://github.com/ninja-build/ninja/releases).
+Run `./ninja -h` for Ninja help.
+
+Installation is not necessary because the only required file is the
+resulting ninja binary. However, to enable features like Bash
+completion and Emacs and Vim editing modes, some files in misc/ must be
+copied to appropriate locations.
+
+If you're interested in making changes to Ninja, read CONTRIBUTING.md first.
+
+## Building Ninja itself
+
+You can either build Ninja via the custom generator script written in Python or
+via CMake. For more details see
+[the wiki](https://github.com/ninja-build/ninja/wiki).
+
+### Python
+
+```
+./configure.py --bootstrap
+```
+
+This will generate the `ninja` binary and a `build.ninja` file you can now use
+to built Ninja with itself.
+
+### CMake
+
+```
+cmake -Bbuild-cmake -H.
+cmake --build build-cmake
+```
+
+The `ninja` binary will now be inside the `build-cmake` directory (you can
+choose any other name you like).
+
+To run the unit tests:
+
+```
+./build-cmake/ninja_test
+```
diff --git a/RELEASING b/RELEASING
index da4dbdd..0b03341 100644
--- a/RELEASING
+++ b/RELEASING
@@ -1,7 +1,7 @@
Notes to myself on all the steps to make for a Ninja release.
Push new release branch:
-1. Run afl-fuzz for a day or so (see HACKING.md) and run ninja_test
+1. Run afl-fuzz for a day or so and run ninja_test
2. Consider sending a heads-up to the ninja-build mailing list first
3. Make sure branches 'master' and 'release' are synced up locally
4. Update src/version.cc with new version (with ".git"), then
diff --git a/appveyor.yml b/appveyor.yml
index 04ed58e..f0b92b8 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -1,5 +1,7 @@
version: 1.0.{build}
-image: Visual Studio 2017
+image:
+ - Visual Studio 2017
+ - Ubuntu1804
environment:
CLICOLOR_FORCE: 1
@@ -7,6 +9,16 @@ environment:
matrix:
- MSYSTEM: MINGW64
- MSYSTEM: MSVC
+ - MSYSTEM: LINUX
+
+matrix:
+ exclude:
+ - image: Visual Studio 2017
+ MSYSTEM: LINUX
+ - image: Ubuntu1804
+ MSYSTEM: MINGW64
+ - image: Ubuntu1804
+ MSYSTEM: MSVC
for:
-
@@ -16,7 +28,6 @@ for:
build_script:
ps: "C:\\msys64\\usr\\bin\\bash -lc @\"\n
pacman -S --quiet --noconfirm --needed re2c 2>&1\n
- sed -i 's|cmd /c `$ar cqs `$out.tmp `$in \\&\\& move /Y `$out.tmp `$out|`$ar crs `$out `$in|g' configure.py\n
./configure.py --bootstrap --platform mingw 2>&1\n
./ninja all\n
./ninja_test 2>&1\n
@@ -37,4 +48,14 @@ for:
python misc/ninja_syntax_test.py
+ - matrix:
+ only:
+ - image: Ubuntu1804
+ build_script:
+ - ./configure.py --bootstrap
+ - ./ninja all
+ - ./ninja_test
+ - misc/ninja_syntax_test.py
+ - misc/output_test.py
+
test: off
diff --git a/configure.py b/configure.py
index 850bb98..7d8ce90 100755
--- a/configure.py
+++ b/configure.py
@@ -60,6 +60,8 @@ class Platform(object):
self._platform = 'netbsd'
elif self._platform.startswith('aix'):
self._platform = 'aix'
+ elif self._platform.startswith('os400'):
+ self._platform = 'os400'
elif self._platform.startswith('dragonfly'):
self._platform = 'dragonfly'
@@ -97,6 +99,9 @@ class Platform(object):
def is_aix(self):
return self._platform == 'aix'
+ def is_os400_pase(self):
+ return self._platform == 'os400' or os.uname().sysname.startswith('OS400')
+
def uses_usr_local(self):
return self._platform in ('freebsd', 'openbsd', 'bitrig', 'dragonfly', 'netbsd')
@@ -351,7 +356,7 @@ else:
except:
pass
if platform.is_mingw():
- cflags += ['-D_WIN32_WINNT=0x0501']
+ cflags += ['-D_WIN32_WINNT=0x0601', '-D__USE_MINGW_ANSI_STDIO=1']
ldflags = ['-L$builddir']
if platform.uses_usr_local():
cflags.append('-I/usr/local/include')
@@ -432,7 +437,7 @@ if host.is_msvc():
description='LIB $out')
elif host.is_mingw():
n.rule('ar',
- command='cmd /c $ar cqs $out.tmp $in && move /Y $out.tmp $out',
+ command='$ar crs $out $in',
description='AR $out')
else:
n.rule('ar',
@@ -536,7 +541,7 @@ if platform.is_msvc():
else:
libs.append('-lninja')
-if platform.is_aix():
+if platform.is_aix() and not platform.is_os400_pase():
libs.append('-lperfstat')
all_targets = []
diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc
index 0bada17..238e6c4 100644
--- a/doc/manual.asciidoc
+++ b/doc/manual.asciidoc
@@ -271,6 +271,9 @@ Files created but not referenced in the graph are not removed. This
tool takes in account the +-v+ and the +-n+ options (note that +-n+
implies +-v+).
+`cleandead`:: remove files produced by previous builds that are no longer in the
+manifest. _Available since Ninja 1.10._
+
`compdb`:: given a list of rules, each of which is expected to be a
C family language compiler rule whose first input is the name of the
source file, prints on standard output a compilation database in the
@@ -283,6 +286,9 @@ target, show just the target's dependencies. _Available since Ninja 1.4._
`recompact`:: recompact the `.ninja_deps` file. _Available since Ninja 1.4._
+`restat`:: updates all recorded file modification timestamps in the `.ninja_log`
+file. _Available since Ninja 1.10._
+
`rules`:: output the list of all rules (eventually with their description
if they have one). It can be used to know which rule name to pass to
+ninja -t targets rule _name_+ or +ninja -t compdb+.
diff --git a/misc/output_test.py b/misc/output_test.py
index 1dcde10..3fd9c32 100755
--- a/misc/output_test.py
+++ b/misc/output_test.py
@@ -18,12 +18,15 @@ if 'NINJA_STATUS' in default_env:
if 'CLICOLOR_FORCE' in default_env:
del default_env['CLICOLOR_FORCE']
default_env['TERM'] = ''
+NINJA_PATH = os.path.abspath('./ninja')
def run(build_ninja, flags='', pipe=False, env=default_env):
- with tempfile.NamedTemporaryFile('w') as f:
- f.write(build_ninja)
- f.flush()
- ninja_cmd = './ninja {} -f {}'.format(flags, f.name)
+ with tempfile.TemporaryDirectory() as d:
+ os.chdir(d)
+ with open('build.ninja', 'w') as f:
+ f.write(build_ninja)
+ f.flush()
+ ninja_cmd = '{} {}'.format(NINJA_PATH, flags)
try:
if pipe:
output = subprocess.check_output([ninja_cmd], shell=True, env=env)
@@ -56,7 +59,7 @@ build b: echo
delay = 2
build c: echo
delay = 1
-'''),
+''', '-j3'),
'''[1/3] echo c\x1b[K
c
[2/3] echo b\x1b[K
@@ -99,5 +102,10 @@ red
\x1b[31mred\x1b[0m
''')
+ def test_pr_1685(self):
+ # Running those tools without .ninja_deps and .ninja_log shouldn't fail.
+ self.assertEqual(run('', flags='-t recompact'), '')
+ self.assertEqual(run('', flags='-t restat'), '')
+
if __name__ == '__main__':
unittest.main()
diff --git a/src/build.cc b/src/build.cc
index 8ef88b5..cd8df4e 100644
--- a/src/build.cc
+++ b/src/build.cc
@@ -47,7 +47,7 @@ struct DryRunCommandRunner : public CommandRunner {
virtual ~DryRunCommandRunner() {}
// Overridden from CommandRunner:
- virtual bool CanRunMore();
+ virtual bool CanRunMore() const;
virtual bool StartCommand(Edge* edge);
virtual bool WaitForCommand(Result* result);
@@ -55,7 +55,7 @@ struct DryRunCommandRunner : public CommandRunner {
queue<Edge*> finished_;
};
-bool DryRunCommandRunner::CanRunMore() {
+bool DryRunCommandRunner::CanRunMore() const {
return true;
}
@@ -96,7 +96,7 @@ void BuildStatus::PlanHasTotalEdges(int total) {
total_edges_ = total;
}
-void BuildStatus::BuildEdgeStarted(Edge* edge) {
+void BuildStatus::BuildEdgeStarted(const Edge* edge) {
assert(running_edges_.find(edge) == running_edges_.end());
int start_time = (int)(GetTimeMillis() - start_time_millis_);
running_edges_.insert(make_pair(edge, start_time));
@@ -290,7 +290,7 @@ string BuildStatus::FormatProgressStatus(
return out;
}
-void BuildStatus::PrintStatus(Edge* edge, EdgeStatus status) {
+void BuildStatus::PrintStatus(const Edge* edge, EdgeStatus status) {
if (config_.verbosity == BuildConfig::QUIET)
return;
@@ -319,11 +319,11 @@ void Plan::Reset() {
want_.clear();
}
-bool Plan::AddTarget(Node* node, string* err) {
+bool Plan::AddTarget(const Node* node, string* err) {
return AddSubTarget(node, NULL, err, NULL);
}
-bool Plan::AddSubTarget(Node* node, Node* dependent, string* err,
+bool Plan::AddSubTarget(const Node* node, const Node* dependent, string* err,
set<Edge*>* dyndep_walk) {
Edge* edge = node->in_edge();
if (!edge) { // Leaf node.
@@ -373,7 +373,7 @@ bool Plan::AddSubTarget(Node* node, Node* dependent, string* err,
return true;
}
-void Plan::EdgeWanted(Edge* edge) {
+void Plan::EdgeWanted(const Edge* edge) {
++wanted_edges_;
if (!edge->is_phony())
++command_edges_;
@@ -533,7 +533,7 @@ bool Plan::CleanNode(DependencyScan* scan, Node* node, string* err) {
return true;
}
-bool Plan::DyndepsLoaded(DependencyScan* scan, Node* node,
+bool Plan::DyndepsLoaded(DependencyScan* scan, const Node* node,
const DyndepFile& ddf, string* err) {
// Recompute the dirty state of all our direct and indirect dependents now
// that our dyndep information has been loaded.
@@ -601,7 +601,7 @@ bool Plan::DyndepsLoaded(DependencyScan* scan, Node* node,
return true;
}
-bool Plan::RefreshDyndepDependents(DependencyScan* scan, Node* node,
+bool Plan::RefreshDyndepDependents(DependencyScan* scan, const Node* node,
string* err) {
// Collect the transitive closure of dependents and mark their edges
// as not yet visited by RecomputeDirty.
@@ -635,7 +635,7 @@ bool Plan::RefreshDyndepDependents(DependencyScan* scan, Node* node,
return true;
}
-void Plan::UnmarkDependents(Node* node, set<Node*>* dependents) {
+void Plan::UnmarkDependents(const Node* node, set<Node*>* dependents) {
for (vector<Edge*>::const_iterator oe = node->out_edges().begin();
oe != node->out_edges().end(); ++oe) {
Edge* edge = *oe;
@@ -655,9 +655,9 @@ void Plan::UnmarkDependents(Node* node, set<Node*>* dependents) {
}
}
-void Plan::Dump() {
+void Plan::Dump() const {
printf("pending: %d\n", (int)want_.size());
- for (map<Edge*, Want>::iterator e = want_.begin(); e != want_.end(); ++e) {
+ for (map<Edge*, Want>::const_iterator e = want_.begin(); e != want_.end(); ++e) {
if (e->second != kWantNothing)
printf("want ");
e->first->Dump();
@@ -668,7 +668,7 @@ void Plan::Dump() {
struct RealCommandRunner : public CommandRunner {
explicit RealCommandRunner(const BuildConfig& config) : config_(config) {}
virtual ~RealCommandRunner() {}
- virtual bool CanRunMore();
+ virtual bool CanRunMore() const;
virtual bool StartCommand(Edge* edge);
virtual bool WaitForCommand(Result* result);
virtual vector<Edge*> GetActiveEdges();
@@ -676,12 +676,12 @@ struct RealCommandRunner : public CommandRunner {
const BuildConfig& config_;
SubprocessSet subprocs_;
- map<Subprocess*, Edge*> subproc_to_edge_;
+ map<const Subprocess*, Edge*> subproc_to_edge_;
};
vector<Edge*> RealCommandRunner::GetActiveEdges() {
vector<Edge*> edges;
- for (map<Subprocess*, Edge*>::iterator e = subproc_to_edge_.begin();
+ for (map<const Subprocess*, Edge*>::iterator e = subproc_to_edge_.begin();
e != subproc_to_edge_.end(); ++e)
edges.push_back(e->second);
return edges;
@@ -691,7 +691,7 @@ void RealCommandRunner::Abort() {
subprocs_.Clear();
}
-bool RealCommandRunner::CanRunMore() {
+bool RealCommandRunner::CanRunMore() const {
size_t subproc_number =
subprocs_.running_.size() + subprocs_.finished_.size();
return (int)subproc_number < config_.parallelism
@@ -720,7 +720,7 @@ bool RealCommandRunner::WaitForCommand(Result* result) {
result->status = subproc->Finish();
result->output = subproc->GetOutput();
- map<Subprocess*, Edge*>::iterator e = subproc_to_edge_.find(subproc);
+ map<const Subprocess*, Edge*>::iterator e = subproc_to_edge_.find(subproc);
result->edge = e->second;
subproc_to_edge_.erase(e);
@@ -1033,14 +1033,16 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) {
}
if (!deps_type.empty() && !config_.dry_run) {
- assert(edge->outputs_.size() == 1 && "should have been rejected by parser");
- Node* out = edge->outputs_[0];
- TimeStamp deps_mtime = disk_interface_->Stat(out->path(), err);
- if (deps_mtime == -1)
- return false;
- if (!scan_.deps_log()->RecordDeps(out, deps_mtime, deps_nodes)) {
- *err = string("Error writing to deps log: ") + strerror(errno);
- return false;
+ assert(edge->outputs_.size() >= 1 && "should have been rejected by parser");
+ for (std::vector<Node*>::const_iterator o = edge->outputs_.begin();
+ o != edge->outputs_.end(); ++o) {
+ TimeStamp deps_mtime = disk_interface_->Stat((*o)->path(), err);
+ if (deps_mtime == -1)
+ return false;
+ if (!scan_.deps_log()->RecordDeps(*o, deps_mtime, deps_nodes)) {
+ *err = std::string("Error writing to deps log: ") + strerror(errno);
+ return false;
+ }
}
}
return true;
diff --git a/src/build.h b/src/build.h
index ab59f0c..97773c4 100644
--- a/src/build.h
+++ b/src/build.h
@@ -46,7 +46,7 @@ struct Plan {
/// Add a target to our plan (including all its dependencies).
/// Returns false if we don't need to build this target; may
/// fill in |err| with an error message if there's a problem.
- bool AddTarget(Node* node, string* err);
+ bool AddTarget(const Node* node, string* err);
// Pop a ready edge off the queue of edges to build.
// Returns NULL if there's no work to do.
@@ -56,7 +56,7 @@ struct Plan {
bool more_to_do() const { return wanted_edges_ > 0 && command_edges_ > 0; }
/// Dumps the current state of the plan.
- void Dump();
+ void Dump() const;
enum EdgeResult {
kEdgeFailed,
@@ -81,12 +81,12 @@ struct Plan {
/// Update the build plan to account for modifications made to the graph
/// by information loaded from a dyndep file.
- bool DyndepsLoaded(DependencyScan* scan, Node* node,
+ bool DyndepsLoaded(DependencyScan* scan, const Node* node,
const DyndepFile& ddf, string* err);
private:
- bool RefreshDyndepDependents(DependencyScan* scan, Node* node, string* err);
- void UnmarkDependents(Node* node, set<Node*>* dependents);
- bool AddSubTarget(Node* node, Node* dependent, string* err,
+ bool RefreshDyndepDependents(DependencyScan* scan, const Node* node, string* err);
+ void UnmarkDependents(const Node* node, set<Node*>* dependents);
+ bool AddSubTarget(const Node* node, const Node* dependent, string* err,
set<Edge*>* dyndep_walk);
/// Update plan with knowledge that the given node is up to date.
@@ -108,7 +108,7 @@ private:
kWantToFinish
};
- void EdgeWanted(Edge* edge);
+ void EdgeWanted(const Edge* edge);
bool EdgeMaybeReady(map<Edge*, Want>::iterator want_e, string* err);
/// Submits a ready edge as a candidate for execution.
@@ -138,7 +138,7 @@ private:
/// RealCommandRunner is an implementation that actually runs commands.
struct CommandRunner {
virtual ~CommandRunner() {}
- virtual bool CanRunMore() = 0;
+ virtual bool CanRunMore() const = 0;
virtual bool StartCommand(Edge* edge) = 0;
/// The result of waiting for a command.
@@ -240,7 +240,7 @@ struct Builder {
struct BuildStatus {
explicit BuildStatus(const BuildConfig& config);
void PlanHasTotalEdges(int total);
- void BuildEdgeStarted(Edge* edge);
+ void BuildEdgeStarted(const Edge* edge);
void BuildEdgeFinished(Edge* edge, bool success, const string& output,
int* start_time, int* end_time);
void BuildLoadDyndeps();
@@ -261,7 +261,7 @@ struct BuildStatus {
EdgeStatus status) const;
private:
- void PrintStatus(Edge* edge, EdgeStatus status);
+ void PrintStatus(const Edge* edge, EdgeStatus status);
const BuildConfig& config_;
@@ -271,7 +271,7 @@ struct BuildStatus {
int started_edges_, finished_edges_, total_edges_;
/// Map of running edge to time the edge started running.
- typedef map<Edge*, int> RunningEdgeMap;
+ typedef map<const Edge*, int> RunningEdgeMap;
RunningEdgeMap running_edges_;
/// Prints progress output.
diff --git a/src/build_log.cc b/src/build_log.cc
index 774f72f..98543b6 100644
--- a/src/build_log.cc
+++ b/src/build_log.cc
@@ -21,6 +21,7 @@
#endif
#include "build_log.h"
+#include "disk_interface.h"
#include <errno.h>
#include <stdlib.h>
@@ -49,7 +50,6 @@
namespace {
const char kFileSignature[] = "# ninja log v%d\n";
-const char kFileColumnLabels[] = "# start_time end_time mtime command hash\n";
const int kOldestSupportedVersion = 4;
const int kCurrentVersion = 5;
@@ -145,8 +145,7 @@ bool BuildLog::OpenForWrite(const string& path, const BuildLogUser& user,
fseek(log_file_, 0, SEEK_END);
if (ftell(log_file_) == 0) {
- if (fprintf(log_file_, kFileSignature, kCurrentVersion) < 0 ||
- fprintf(log_file_, kFileColumnLabels) < 0) {
+ if (fprintf(log_file_, kFileSignature, kCurrentVersion) < 0) {
*err = strerror(errno);
return false;
}
@@ -243,14 +242,14 @@ struct LineReader {
char* line_end_;
};
-bool BuildLog::Load(const string& path, string* err) {
+LoadStatus BuildLog::Load(const string& path, string* err) {
METRIC_RECORD(".ninja_log load");
FILE* file = fopen(path.c_str(), "r");
if (!file) {
if (errno == ENOENT)
- return true;
+ return LOAD_NOT_FOUND;
*err = strerror(errno);
- return false;
+ return LOAD_ERROR;
}
int log_version = 0;
@@ -271,7 +270,7 @@ bool BuildLog::Load(const string& path, string* err) {
unlink(path.c_str());
// Don't report this as a failure. An empty build log will cause
// us to rebuild the outputs anyway.
- return true;
+ return LOAD_SUCCESS;
}
}
@@ -341,7 +340,7 @@ bool BuildLog::Load(const string& path, string* err) {
fclose(file);
if (!line_start) {
- return true; // file was empty
+ return LOAD_SUCCESS; // file was empty
}
// Decide whether it's time to rebuild the log:
@@ -356,7 +355,7 @@ bool BuildLog::Load(const string& path, string* err) {
needs_recompaction_ = true;
}
- return true;
+ return LOAD_SUCCESS;
}
BuildLog::LogEntry* BuildLog::LookupByOutput(const string& path) {
@@ -420,3 +419,60 @@ bool BuildLog::Recompact(const string& path, const BuildLogUser& user,
return true;
}
+
+bool BuildLog::Restat(const StringPiece path,
+ const DiskInterface& disk_interface,
+ const int output_count, char** outputs,
+ std::string* const err) {
+ METRIC_RECORD(".ninja_log restat");
+
+ Close();
+ std::string temp_path = path.AsString() + ".restat";
+ FILE* f = fopen(temp_path.c_str(), "wb");
+ if (!f) {
+ *err = strerror(errno);
+ return false;
+ }
+
+ if (fprintf(f, kFileSignature, kCurrentVersion) < 0) {
+ *err = strerror(errno);
+ fclose(f);
+ return false;
+ }
+ for (Entries::iterator i = entries_.begin(); i != entries_.end(); ++i) {
+ bool skip = output_count > 0;
+ for (int j = 0; j < output_count; ++j) {
+ if (i->second->output == outputs[j]) {
+ skip = false;
+ break;
+ }
+ }
+ if (!skip) {
+ const TimeStamp mtime = disk_interface.Stat(i->second->output, err);
+ if (mtime == -1) {
+ fclose(f);
+ return false;
+ }
+ i->second->mtime = mtime;
+ }
+
+ if (!WriteEntry(f, *i->second)) {
+ *err = strerror(errno);
+ fclose(f);
+ return false;
+ }
+ }
+
+ fclose(f);
+ if (unlink(path.str_) < 0) {
+ *err = strerror(errno);
+ return false;
+ }
+
+ if (rename(temp_path.c_str(), path.str_) < 0) {
+ *err = strerror(errno);
+ return false;
+ }
+
+ return true;
+}
diff --git a/src/build_log.h b/src/build_log.h
index 5268fab..ebe0530 100644
--- a/src/build_log.h
+++ b/src/build_log.h
@@ -20,9 +20,11 @@
using namespace std;
#include "hash_map.h"
+#include "load_status.h"
#include "timestamp.h"
#include "util.h" // uint64_t
+struct DiskInterface;
struct Edge;
/// Can answer questions about the manifest for the BuildLog.
@@ -49,7 +51,7 @@ struct BuildLog {
void Close();
/// Load the on-disk log.
- bool Load(const string& path, string* err);
+ LoadStatus Load(const string& path, string* err);
struct LogEntry {
string output;
@@ -81,6 +83,10 @@ struct BuildLog {
/// Rewrite the known log entries, throwing away old data.
bool Recompact(const string& path, const BuildLogUser& user, string* err);
+ /// Restat all outputs in the log
+ bool Restat(StringPiece path, const DiskInterface& disk_interface,
+ int output_count, char** outputs, std::string* err);
+
typedef ExternalStringHashMap<LogEntry*>::Type Entries;
const Entries& entries() const { return entries_; }
diff --git a/src/build_log_test.cc b/src/build_log_test.cc
index eea818f..a8b1733 100644
--- a/src/build_log_test.cc
+++ b/src/build_log_test.cc
@@ -25,6 +25,7 @@
#include <sys/types.h>
#include <unistd.h>
#endif
+#include <cassert>
namespace {
@@ -70,9 +71,8 @@ TEST_F(BuildLogTest, WriteRead) {
}
TEST_F(BuildLogTest, FirstWriteAddsSignature) {
- const char kExpectedContent[] = "# ninja log vX\n"
- "# start_time end_time mtime command hash\n";
- const size_t kVersionPos = 13; // Points at 'X'.
+ const char kExpectedVersion[] = "# ninja log vX\n";
+ const size_t kVersionPos = strlen(kExpectedVersion) - 2; // Points at 'X'.
BuildLog log;
string contents, err;
@@ -85,7 +85,7 @@ TEST_F(BuildLogTest, FirstWriteAddsSignature) {
ASSERT_EQ("", err);
if (contents.size() >= kVersionPos)
contents[kVersionPos] = 'X';
- EXPECT_EQ(kExpectedContent, contents);
+ EXPECT_EQ(kExpectedVersion, contents);
// Opening the file anew shouldn't add a second version string.
EXPECT_TRUE(log.OpenForWrite(kTestFilename, *this, &err));
@@ -97,7 +97,7 @@ TEST_F(BuildLogTest, FirstWriteAddsSignature) {
ASSERT_EQ("", err);
if (contents.size() >= kVersionPos)
contents[kVersionPos] = 'X';
- EXPECT_EQ(kExpectedContent, contents);
+ EXPECT_EQ(kExpectedVersion, contents);
}
TEST_F(BuildLogTest, DoubleEntry) {
@@ -151,7 +151,7 @@ TEST_F(BuildLogTest, Truncate) {
BuildLog log3;
err.clear();
- ASSERT_TRUE(log3.Load(kTestFilename, &err) || !err.empty());
+ ASSERT_TRUE(log3.Load(kTestFilename, &err) == LOAD_SUCCESS || !err.empty());
}
}
@@ -217,6 +217,54 @@ TEST_F(BuildLogTest, DuplicateVersionHeader) {
ASSERT_NO_FATAL_FAILURE(AssertHash("command2", e->command_hash));
}
+struct TestDiskInterface : public DiskInterface {
+ virtual TimeStamp Stat(const string& path, string* err) const {
+ return 4;
+ }
+ virtual bool WriteFile(const string& path, const string& contents) {
+ assert(false);
+ return true;
+ }
+ virtual bool MakeDir(const string& path) {
+ assert(false);
+ return false;
+ }
+ virtual Status ReadFile(const string& path, string* contents, string* err) {
+ assert(false);
+ return NotFound;
+ }
+ virtual int RemoveFile(const string& path) {
+ assert(false);
+ return 0;
+ }
+};
+
+TEST_F(BuildLogTest, Restat) {
+ FILE* f = fopen(kTestFilename, "wb");
+ fprintf(f, "# ninja log v4\n"
+ "1\t2\t3\tout\tcommand\n");
+ fclose(f);
+ std::string err;
+ BuildLog log;
+ EXPECT_TRUE(log.Load(kTestFilename, &err));
+ ASSERT_EQ("", err);
+ BuildLog::LogEntry* e = log.LookupByOutput("out");
+ ASSERT_EQ(3, e->mtime);
+
+ TestDiskInterface testDiskInterface;
+ char out2[] = { 'o', 'u', 't', '2' };
+ char* filter2[] = { out2 };
+ EXPECT_TRUE(log.Restat(kTestFilename, testDiskInterface, 1, filter2, &err));
+ ASSERT_EQ("", err);
+ e = log.LookupByOutput("out");
+ ASSERT_EQ(3, e->mtime); // unchanged, since the filter doesn't match
+
+ EXPECT_TRUE(log.Restat(kTestFilename, testDiskInterface, 0, NULL, &err));
+ ASSERT_EQ("", err);
+ e = log.LookupByOutput("out");
+ ASSERT_EQ(4, e->mtime);
+}
+
TEST_F(BuildLogTest, VeryLongInputLine) {
// Ninja's build log buffer is currently 256kB. Lines longer than that are
// silently ignored, but don't affect parsing of other lines.
diff --git a/src/build_test.cc b/src/build_test.cc
index b5dbc6c..426e825 100644
--- a/src/build_test.cc
+++ b/src/build_test.cc
@@ -470,7 +470,7 @@ struct FakeCommandRunner : public CommandRunner {
max_active_edges_(1), fs_(fs) {}
// CommandRunner impl
- virtual bool CanRunMore();
+ virtual bool CanRunMore() const;
virtual bool StartCommand(Edge* edge);
virtual bool WaitForCommand(Result* result);
virtual vector<Edge*> GetActiveEdges();
@@ -488,6 +488,11 @@ struct BuildTest : public StateTestWithBuiltinRules, public BuildLogUser {
status_(config_) {
}
+ BuildTest(DepsLog* log) : config_(MakeConfig()), command_runner_(&fs_),
+ builder_(&state_, config_, NULL, log, &fs_),
+ status_(config_) {
+ }
+
virtual void SetUp() {
StateTestWithBuiltinRules::SetUp();
@@ -569,7 +574,7 @@ void BuildTest::RebuildTarget(const string& target, const char* manifest,
builder.command_runner_.release();
}
-bool FakeCommandRunner::CanRunMore() {
+bool FakeCommandRunner::CanRunMore() const {
return active_edges_.size() < max_active_edges_;
}
@@ -582,6 +587,8 @@ bool FakeCommandRunner::StartCommand(Edge* edge) {
edge->rule().name() == "cat_rsp" ||
edge->rule().name() == "cat_rsp_out" ||
edge->rule().name() == "cc" ||
+ edge->rule().name() == "cp_multi_msvc" ||
+ edge->rule().name() == "cp_multi_gcc" ||
edge->rule().name() == "touch" ||
edge->rule().name() == "touch-interrupt" ||
edge->rule().name() == "touch-fail-tick2") {
@@ -643,6 +650,14 @@ bool FakeCommandRunner::WaitForCommand(Result* result) {
return true;
}
+ if (edge->rule().name() == "cp_multi_msvc") {
+ const std::string prefix = edge->GetBinding("msvc_deps_prefix");
+ for (std::vector<Node*>::iterator in = edge->inputs_.begin();
+ in != edge->inputs_.end(); ++in) {
+ result->output += prefix + (*in)->path() + '\n';
+ }
+ }
+
if (edge->rule().name() == "fail" ||
(edge->rule().name() == "touch-fail-tick2" && fs_->now_ == 2))
result->status = ExitFailure;
@@ -1855,6 +1870,214 @@ TEST_F(BuildTest, FailedDepsParse) {
EXPECT_EQ("subcommand failed", err);
}
+struct BuildWithQueryDepsLogTest : public BuildTest {
+ BuildWithQueryDepsLogTest() : BuildTest(&log_) {
+ }
+
+ ~BuildWithQueryDepsLogTest() {
+ log_.Close();
+ }
+
+ virtual void SetUp() {
+ BuildTest::SetUp();
+
+ temp_dir_.CreateAndEnter("BuildWithQueryDepsLogTest");
+
+ std::string err;
+ ASSERT_TRUE(log_.OpenForWrite("ninja_deps", &err));
+ ASSERT_EQ("", err);
+ }
+
+ ScopedTempDir temp_dir_;
+
+ DepsLog log_;
+};
+
+/// Test a MSVC-style deps log with multiple outputs.
+TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileMSVC) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"rule cp_multi_msvc\n"
+" command = echo 'using $in' && for file in $out; do cp $in $$file; done\n"
+" deps = msvc\n"
+" msvc_deps_prefix = using \n"
+"build out1 out2: cp_multi_msvc in1\n"));
+
+ std::string err;
+ EXPECT_TRUE(builder_.AddTarget("out1", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("echo 'using in1' && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]);
+
+ Node* out1_node = state_.LookupNode("out1");
+ DepsLog::Deps* out1_deps = log_.GetDeps(out1_node);
+ EXPECT_EQ(1, out1_deps->node_count);
+ EXPECT_EQ("in1", out1_deps->nodes[0]->path());
+
+ Node* out2_node = state_.LookupNode("out2");
+ DepsLog::Deps* out2_deps = log_.GetDeps(out2_node);
+ EXPECT_EQ(1, out2_deps->node_count);
+ EXPECT_EQ("in1", out2_deps->nodes[0]->path());
+}
+
+/// Test a GCC-style deps log with multiple outputs.
+TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCOneLine) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"rule cp_multi_gcc\n"
+" command = echo '$out: $in' > in.d && for file in $out; do cp in1 $$file; done\n"
+" deps = gcc\n"
+" depfile = in.d\n"
+"build out1 out2: cp_multi_gcc in1 in2\n"));
+
+ std::string err;
+ EXPECT_TRUE(builder_.AddTarget("out1", &err));
+ ASSERT_EQ("", err);
+ fs_.Create("in.d", "out1 out2: in1 in2");
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("echo 'out1 out2: in1 in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]);
+
+ Node* out1_node = state_.LookupNode("out1");
+ DepsLog::Deps* out1_deps = log_.GetDeps(out1_node);
+ EXPECT_EQ(2, out1_deps->node_count);
+ EXPECT_EQ("in1", out1_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out1_deps->nodes[1]->path());
+
+ Node* out2_node = state_.LookupNode("out2");
+ DepsLog::Deps* out2_deps = log_.GetDeps(out2_node);
+ EXPECT_EQ(2, out2_deps->node_count);
+ EXPECT_EQ("in1", out2_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out2_deps->nodes[1]->path());
+}
+
+/// Test a GCC-style deps log with multiple outputs using a line per input.
+TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCMultiLineInput) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"rule cp_multi_gcc\n"
+" command = echo '$out: in1\\n$out: in2' > in.d && for file in $out; do cp in1 $$file; done\n"
+" deps = gcc\n"
+" depfile = in.d\n"
+"build out1 out2: cp_multi_gcc in1 in2\n"));
+
+ std::string err;
+ EXPECT_TRUE(builder_.AddTarget("out1", &err));
+ ASSERT_EQ("", err);
+ fs_.Create("in.d", "out1 out2: in1\nout1 out2: in2");
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("echo 'out1 out2: in1\\nout1 out2: in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]);
+
+ Node* out1_node = state_.LookupNode("out1");
+ DepsLog::Deps* out1_deps = log_.GetDeps(out1_node);
+ EXPECT_EQ(2, out1_deps->node_count);
+ EXPECT_EQ("in1", out1_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out1_deps->nodes[1]->path());
+
+ Node* out2_node = state_.LookupNode("out2");
+ DepsLog::Deps* out2_deps = log_.GetDeps(out2_node);
+ EXPECT_EQ(2, out2_deps->node_count);
+ EXPECT_EQ("in1", out2_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out2_deps->nodes[1]->path());
+}
+
+/// Test a GCC-style deps log with multiple outputs using a line per output.
+TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCMultiLineOutput) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"rule cp_multi_gcc\n"
+" command = echo 'out1: $in\\nout2: $in' > in.d && for file in $out; do cp in1 $$file; done\n"
+" deps = gcc\n"
+" depfile = in.d\n"
+"build out1 out2: cp_multi_gcc in1 in2\n"));
+
+ std::string err;
+ EXPECT_TRUE(builder_.AddTarget("out1", &err));
+ ASSERT_EQ("", err);
+ fs_.Create("in.d", "out1: in1 in2\nout2: in1 in2");
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("echo 'out1: in1 in2\\nout2: in1 in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]);
+
+ Node* out1_node = state_.LookupNode("out1");
+ DepsLog::Deps* out1_deps = log_.GetDeps(out1_node);
+ EXPECT_EQ(2, out1_deps->node_count);
+ EXPECT_EQ("in1", out1_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out1_deps->nodes[1]->path());
+
+ Node* out2_node = state_.LookupNode("out2");
+ DepsLog::Deps* out2_deps = log_.GetDeps(out2_node);
+ EXPECT_EQ(2, out2_deps->node_count);
+ EXPECT_EQ("in1", out2_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out2_deps->nodes[1]->path());
+}
+
+/// Test a GCC-style deps log with multiple outputs mentioning only the main output.
+TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCOnlyMainOutput) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"rule cp_multi_gcc\n"
+" command = echo 'out1: $in' > in.d && for file in $out; do cp in1 $$file; done\n"
+" deps = gcc\n"
+" depfile = in.d\n"
+"build out1 out2: cp_multi_gcc in1 in2\n"));
+
+ std::string err;
+ EXPECT_TRUE(builder_.AddTarget("out1", &err));
+ ASSERT_EQ("", err);
+ fs_.Create("in.d", "out1: in1 in2");
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("echo 'out1: in1 in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]);
+
+ Node* out1_node = state_.LookupNode("out1");
+ DepsLog::Deps* out1_deps = log_.GetDeps(out1_node);
+ EXPECT_EQ(2, out1_deps->node_count);
+ EXPECT_EQ("in1", out1_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out1_deps->nodes[1]->path());
+
+ Node* out2_node = state_.LookupNode("out2");
+ DepsLog::Deps* out2_deps = log_.GetDeps(out2_node);
+ EXPECT_EQ(2, out2_deps->node_count);
+ EXPECT_EQ("in1", out2_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out2_deps->nodes[1]->path());
+}
+
+/// Test a GCC-style deps log with multiple outputs mentioning only the secondary output.
+TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCOnlySecondaryOutput) {
+ // Note: This ends up short-circuiting the node creation due to the primary
+ // output not being present, but it should still work.
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"rule cp_multi_gcc\n"
+" command = echo 'out2: $in' > in.d && for file in $out; do cp in1 $$file; done\n"
+" deps = gcc\n"
+" depfile = in.d\n"
+"build out1 out2: cp_multi_gcc in1 in2\n"));
+
+ std::string err;
+ EXPECT_TRUE(builder_.AddTarget("out1", &err));
+ ASSERT_EQ("", err);
+ fs_.Create("in.d", "out2: in1 in2");
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("echo 'out2: in1 in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]);
+
+ Node* out1_node = state_.LookupNode("out1");
+ DepsLog::Deps* out1_deps = log_.GetDeps(out1_node);
+ EXPECT_EQ(2, out1_deps->node_count);
+ EXPECT_EQ("in1", out1_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out1_deps->nodes[1]->path());
+
+ Node* out2_node = state_.LookupNode("out2");
+ DepsLog::Deps* out2_deps = log_.GetDeps(out2_node);
+ EXPECT_EQ(2, out2_deps->node_count);
+ EXPECT_EQ("in1", out2_deps->nodes[0]->path());
+ EXPECT_EQ("in2", out2_deps->nodes[1]->path());
+}
+
/// Tests of builds involving deps logs necessarily must span
/// multiple builds. We reuse methods on BuildTest but not the
/// builder_ it sets up, because we want pristine objects for
diff --git a/src/clean.cc b/src/clean.cc
index d1f221d..ec6e7d7 100644
--- a/src/clean.cc
+++ b/src/clean.cc
@@ -124,6 +124,19 @@ int Cleaner::CleanAll(bool generator) {
return status_;
}
+int Cleaner::CleanDead(const BuildLog::Entries& entries) {
+ Reset();
+ PrintHeader();
+ for (BuildLog::Entries::const_iterator i = entries.begin(); i != entries.end(); ++i) {
+ Node* n = state_->LookupNode(i->first);
+ if (!n || !n->in_edge()) {
+ Remove(i->first.AsString());
+ }
+ }
+ PrintFooter();
+ return status_;
+}
+
void Cleaner::DoCleanTarget(Node* target) {
if (Edge* e = target->in_edge()) {
// Do not try to remove phony targets
diff --git a/src/clean.h b/src/clean.h
index d044fb1..4c02ff6 100644
--- a/src/clean.h
+++ b/src/clean.h
@@ -20,6 +20,7 @@
#include "build.h"
#include "dyndep.h"
+#include "build_log.h"
using namespace std;
@@ -58,6 +59,10 @@ struct Cleaner {
/// Clean the file produced by the given @a rules.
/// @return non-zero if an error occurs.
int CleanRules(int rule_count, char* rules[]);
+ /// Clean the files produced by previous builds that are no longer in the
+ /// manifest.
+ /// @return non-zero if an error occurs.
+ int CleanDead(const BuildLog::Entries& entries);
/// @return the number of file cleaned.
int cleaned_files_count() const {
diff --git a/src/clean_test.cc b/src/clean_test.cc
index 45187f4..d068f3c 100644
--- a/src/clean_test.cc
+++ b/src/clean_test.cc
@@ -15,8 +15,17 @@
#include "clean.h"
#include "build.h"
+#include "util.h"
#include "test.h"
+#ifndef _WIN32
+#include <unistd.h>
+#endif
+
+namespace {
+
+const char kTestFilename[] = "CleanTest-tempfile";
+
struct CleanTest : public StateTestWithBuiltinRules {
VirtualFileSystem fs_;
BuildConfig config_;
@@ -454,3 +463,76 @@ TEST_F(CleanTest, CleanDepFileAndRspFileWithSpaces) {
EXPECT_EQ(0, fs_.Stat("out 1.d", &err));
EXPECT_EQ(0, fs_.Stat("out 2.rsp", &err));
}
+
+struct CleanDeadTest : public CleanTest, public BuildLogUser{
+ virtual void SetUp() {
+ // In case a crashing test left a stale file behind.
+ unlink(kTestFilename);
+ CleanTest::SetUp();
+ }
+ virtual void TearDown() {
+ unlink(kTestFilename);
+ }
+ virtual bool IsPathDead(StringPiece) const { return false; }
+};
+
+TEST_F(CleanDeadTest, CleanDead) {
+ State state;
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state,
+"rule cat\n"
+" command = cat $in > $out\n"
+"build out1: cat in\n"
+"build out2: cat in\n"
+));
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"build out2: cat in\n"
+));
+ fs_.Create("in", "");
+ fs_.Create("out1", "");
+ fs_.Create("out2", "");
+
+ BuildLog log1;
+ string err;
+ EXPECT_TRUE(log1.OpenForWrite(kTestFilename, *this, &err));
+ ASSERT_EQ("", err);
+ log1.RecordCommand(state.edges_[0], 15, 18);
+ log1.RecordCommand(state.edges_[1], 20, 25);
+ log1.Close();
+
+ BuildLog log2;
+ EXPECT_TRUE(log2.Load(kTestFilename, &err));
+ ASSERT_EQ("", err);
+ ASSERT_EQ(2u, log2.entries().size());
+ ASSERT_TRUE(log2.LookupByOutput("out1"));
+ ASSERT_TRUE(log2.LookupByOutput("out2"));
+
+ // First use the manifest that describe how to build out1.
+ Cleaner cleaner1(&state, config_, &fs_);
+ EXPECT_EQ(0, cleaner1.CleanDead(log2.entries()));
+ EXPECT_EQ(0, cleaner1.cleaned_files_count());
+ EXPECT_EQ(0u, fs_.files_removed_.size());
+ EXPECT_NE(0, fs_.Stat("in", &err));
+ EXPECT_NE(0, fs_.Stat("out1", &err));
+ EXPECT_NE(0, fs_.Stat("out2", &err));
+
+ // Then use the manifest that does not build out1 anymore.
+ Cleaner cleaner2(&state_, config_, &fs_);
+ EXPECT_EQ(0, cleaner2.CleanDead(log2.entries()));
+ EXPECT_EQ(1, cleaner2.cleaned_files_count());
+ EXPECT_EQ(1u, fs_.files_removed_.size());
+ EXPECT_EQ("out1", *(fs_.files_removed_.begin()));
+ EXPECT_NE(0, fs_.Stat("in", &err));
+ EXPECT_EQ(0, fs_.Stat("out1", &err));
+ EXPECT_NE(0, fs_.Stat("out2", &err));
+
+ // Nothing to do now.
+ EXPECT_EQ(0, cleaner2.CleanDead(log2.entries()));
+ EXPECT_EQ(0, cleaner2.cleaned_files_count());
+ EXPECT_EQ(1u, fs_.files_removed_.size());
+ EXPECT_EQ("out1", *(fs_.files_removed_.begin()));
+ EXPECT_NE(0, fs_.Stat("in", &err));
+ EXPECT_EQ(0, fs_.Stat("out1", &err));
+ EXPECT_NE(0, fs_.Stat("out2", &err));
+ log2.Close();
+}
+} // anonymous namespace
diff --git a/src/depfile_parser.cc b/src/depfile_parser.cc
index 6faeac6..90d4a8a 100644
--- a/src/depfile_parser.cc
+++ b/src/depfile_parser.cc
@@ -16,6 +16,8 @@
#include "depfile_parser.h"
#include "util.h"
+#include <algorithm>
+
DepfileParser::DepfileParser(DepfileParserOptions options)
: options_(options)
{
@@ -48,10 +50,8 @@ bool DepfileParser::Parse(string* content, string* err) {
char* in = &(*content)[0];
char* end = in + content->size();
bool have_target = false;
- bool have_secondary_target_on_this_rule = false;
- bool have_newline_since_primary_target = false;
- bool warned_distinct_target_lines = false;
bool parsing_targets = true;
+ bool poisoned_input = false;
while (in < end) {
bool have_newline = false;
// out: current output point (typically same as in, but can fall behind
@@ -294,41 +294,32 @@ yy28:
}
if (len > 0) {
- if (is_dependency) {
- if (have_secondary_target_on_this_rule) {
- if (!have_newline_since_primary_target) {
- *err = "depfile has multiple output paths";
- return false;
- } else if (options_.depfile_distinct_target_lines_action_ ==
- kDepfileDistinctTargetLinesActionError) {
- *err =
- "depfile has multiple output paths (on separate lines)"
- " [-w depfilemulti=err]";
+ StringPiece piece = StringPiece(filename, len);
+ // If we've seen this as an input before, skip it.
+ std::vector<StringPiece>::iterator pos = std::find(ins_.begin(), ins_.end(), piece);
+ if (pos == ins_.end()) {
+ if (is_dependency) {
+ if (poisoned_input) {
+ *err = "inputs may not also have inputs";
return false;
- } else {
- if (!warned_distinct_target_lines) {
- warned_distinct_target_lines = true;
- Warning("depfile has multiple output paths (on separate lines); "
- "continuing anyway [-w depfilemulti=warn]");
- }
- continue;
}
+ // New input.
+ ins_.push_back(piece);
+ } else {
+ // Check for a new output.
+ if (std::find(outs_.begin(), outs_.end(), piece) == outs_.end())
+ outs_.push_back(piece);
}
- ins_.push_back(StringPiece(filename, len));
- } else if (!out_.str_) {
- out_ = StringPiece(filename, len);
- } else if (out_ != StringPiece(filename, len)) {
- have_secondary_target_on_this_rule = true;
+ } else if (!is_dependency) {
+ // We've passed an input on the left side; reject new inputs.
+ poisoned_input = true;
}
}
if (have_newline) {
// A newline ends a rule so the next filename will be a new target.
parsing_targets = true;
- have_secondary_target_on_this_rule = false;
- if (have_target) {
- have_newline_since_primary_target = true;
- }
+ poisoned_input = false;
}
}
if (!have_target) {
diff --git a/src/depfile_parser.h b/src/depfile_parser.h
index be20374..11b1228 100644
--- a/src/depfile_parser.h
+++ b/src/depfile_parser.h
@@ -21,17 +21,8 @@ using namespace std;
#include "string_piece.h"
-enum DepfileDistinctTargetLinesAction {
- kDepfileDistinctTargetLinesActionWarn,
- kDepfileDistinctTargetLinesActionError,
-};
-
struct DepfileParserOptions {
- DepfileParserOptions()
- : depfile_distinct_target_lines_action_(
- kDepfileDistinctTargetLinesActionWarn) {}
- DepfileDistinctTargetLinesAction
- depfile_distinct_target_lines_action_;
+ DepfileParserOptions() {}
};
/// Parser for the dependency information emitted by gcc's -M flags.
@@ -44,7 +35,7 @@ struct DepfileParser {
/// pointers within it.
bool Parse(string* content, string* err);
- StringPiece out_;
+ std::vector<StringPiece> outs_;
vector<StringPiece> ins_;
DepfileParserOptions options_;
};
diff --git a/src/depfile_parser.in.cc b/src/depfile_parser.in.cc
index 735a0c3..b32b942 100644
--- a/src/depfile_parser.in.cc
+++ b/src/depfile_parser.in.cc
@@ -15,6 +15,8 @@
#include "depfile_parser.h"
#include "util.h"
+#include <algorithm>
+
DepfileParser::DepfileParser(DepfileParserOptions options)
: options_(options)
{
@@ -47,10 +49,8 @@ bool DepfileParser::Parse(string* content, string* err) {
char* in = &(*content)[0];
char* end = in + content->size();
bool have_target = false;
- bool have_secondary_target_on_this_rule = false;
- bool have_newline_since_primary_target = false;
- bool warned_distinct_target_lines = false;
bool parsing_targets = true;
+ bool poisoned_input = false;
while (in < end) {
bool have_newline = false;
// out: current output point (typically same as in, but can fall behind
@@ -146,41 +146,32 @@ bool DepfileParser::Parse(string* content, string* err) {
}
if (len > 0) {
- if (is_dependency) {
- if (have_secondary_target_on_this_rule) {
- if (!have_newline_since_primary_target) {
- *err = "depfile has multiple output paths";
- return false;
- } else if (options_.depfile_distinct_target_lines_action_ ==
- kDepfileDistinctTargetLinesActionError) {
- *err =
- "depfile has multiple output paths (on separate lines)"
- " [-w depfilemulti=err]";
+ StringPiece piece = StringPiece(filename, len);
+ // If we've seen this as an input before, skip it.
+ std::vector<StringPiece>::iterator pos = std::find(ins_.begin(), ins_.end(), piece);
+ if (pos == ins_.end()) {
+ if (is_dependency) {
+ if (poisoned_input) {
+ *err = "inputs may not also have inputs";
return false;
- } else {
- if (!warned_distinct_target_lines) {
- warned_distinct_target_lines = true;
- Warning("depfile has multiple output paths (on separate lines); "
- "continuing anyway [-w depfilemulti=warn]");
- }
- continue;
}
+ // New input.
+ ins_.push_back(piece);
+ } else {
+ // Check for a new output.
+ if (std::find(outs_.begin(), outs_.end(), piece) == outs_.end())
+ outs_.push_back(piece);
}
- ins_.push_back(StringPiece(filename, len));
- } else if (!out_.str_) {
- out_ = StringPiece(filename, len);
- } else if (out_ != StringPiece(filename, len)) {
- have_secondary_target_on_this_rule = true;
+ } else if (!is_dependency) {
+ // We've passed an input on the left side; reject new inputs.
+ poisoned_input = true;
}
}
if (have_newline) {
// A newline ends a rule so the next filename will be a new target.
parsing_targets = true;
- have_secondary_target_on_this_rule = false;
- if (have_target) {
- have_newline_since_primary_target = true;
- }
+ poisoned_input = false;
}
}
if (!have_target) {
diff --git a/src/depfile_parser_test.cc b/src/depfile_parser_test.cc
index 19224f3..bf1a0bc 100644
--- a/src/depfile_parser_test.cc
+++ b/src/depfile_parser_test.cc
@@ -34,7 +34,8 @@ TEST_F(DepfileParserTest, Basic) {
"build/ninja.o: ninja.cc ninja.h eval_env.h manifest_parser.h\n",
&err));
ASSERT_EQ("", err);
- EXPECT_EQ("build/ninja.o", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ EXPECT_EQ("build/ninja.o", parser_.outs_[0].AsString());
EXPECT_EQ(4u, parser_.ins_.size());
}
@@ -54,7 +55,8 @@ TEST_F(DepfileParserTest, Continuation) {
" bar.h baz.h\n",
&err));
ASSERT_EQ("", err);
- EXPECT_EQ("foo.o", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ EXPECT_EQ("foo.o", parser_.outs_[0].AsString());
EXPECT_EQ(2u, parser_.ins_.size());
}
@@ -65,7 +67,8 @@ TEST_F(DepfileParserTest, CarriageReturnContinuation) {
" bar.h baz.h\r\n",
&err));
ASSERT_EQ("", err);
- EXPECT_EQ("foo.o", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ EXPECT_EQ("foo.o", parser_.outs_[0].AsString());
EXPECT_EQ(2u, parser_.ins_.size());
}
@@ -79,8 +82,9 @@ TEST_F(DepfileParserTest, BackSlashes) {
" Project\\Thing\\Bar.tlb \\\n",
&err));
ASSERT_EQ("", err);
+ ASSERT_EQ(1u, parser_.outs_.size());
EXPECT_EQ("Project\\Dir\\Build\\Release8\\Foo\\Foo.res",
- parser_.out_.AsString());
+ parser_.outs_[0].AsString());
EXPECT_EQ(4u, parser_.ins_.size());
}
@@ -90,8 +94,9 @@ TEST_F(DepfileParserTest, Spaces) {
"a\\ bc\\ def: a\\ b c d",
&err));
ASSERT_EQ("", err);
+ ASSERT_EQ(1u, parser_.outs_.size());
EXPECT_EQ("a bc def",
- parser_.out_.AsString());
+ parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("a b",
parser_.ins_[0].AsString());
@@ -111,8 +116,9 @@ TEST_F(DepfileParserTest, MultipleBackslashes) {
"a\\ b\\#c.h: \\\\\\\\\\ \\\\\\\\ \\\\share\\info\\\\#1",
&err));
ASSERT_EQ("", err);
+ ASSERT_EQ(1u, parser_.outs_.size());
EXPECT_EQ("a b#c.h",
- parser_.out_.AsString());
+ parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("\\\\ ",
parser_.ins_[0].AsString());
@@ -130,8 +136,9 @@ TEST_F(DepfileParserTest, Escapes) {
"\\!\\@\\#$$\\%\\^\\&\\[\\]\\\\:",
&err));
ASSERT_EQ("", err);
+ ASSERT_EQ(1u, parser_.outs_.size());
EXPECT_EQ("\\!\\@#$\\%\\^\\&\\[\\]\\\\",
- parser_.out_.AsString());
+ parser_.outs_[0].AsString());
ASSERT_EQ(0u, parser_.ins_.size());
}
@@ -147,8 +154,9 @@ TEST_F(DepfileParserTest, SpecialChars) {
" a[1]b@2%c",
&err));
ASSERT_EQ("", err);
+ ASSERT_EQ(1u, parser_.outs_.size());
EXPECT_EQ("C:/Program Files (x86)/Microsoft crtdefs.h",
- parser_.out_.AsString());
+ parser_.outs_[0].AsString());
ASSERT_EQ(5u, parser_.ins_.size());
EXPECT_EQ("en@quot.header~",
parser_.ins_[0].AsString());
@@ -166,18 +174,25 @@ TEST_F(DepfileParserTest, UnifyMultipleOutputs) {
// check that multiple duplicate targets are properly unified
string err;
EXPECT_TRUE(Parse("foo foo: x y z", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
EXPECT_EQ("y", parser_.ins_[1].AsString());
EXPECT_EQ("z", parser_.ins_[2].AsString());
}
-TEST_F(DepfileParserTest, RejectMultipleDifferentOutputs) {
- // check that multiple different outputs are rejected by the parser
+TEST_F(DepfileParserTest, MultipleDifferentOutputs) {
+ // check that multiple different outputs are accepted by the parser
string err;
- EXPECT_FALSE(Parse("foo bar: x y z", &err));
- ASSERT_EQ("depfile has multiple output paths", err);
+ EXPECT_TRUE(Parse("foo bar: x y z", &err));
+ ASSERT_EQ(2u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
+ ASSERT_EQ("bar", parser_.outs_[1].AsString());
+ ASSERT_EQ(3u, parser_.ins_.size());
+ EXPECT_EQ("x", parser_.ins_[0].AsString());
+ EXPECT_EQ("y", parser_.ins_[1].AsString());
+ EXPECT_EQ("z", parser_.ins_[2].AsString());
}
TEST_F(DepfileParserTest, MultipleEmptyRules) {
@@ -185,7 +200,8 @@ TEST_F(DepfileParserTest, MultipleEmptyRules) {
EXPECT_TRUE(Parse("foo: x\n"
"foo: \n"
"foo:\n", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(1u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
}
@@ -196,7 +212,8 @@ TEST_F(DepfileParserTest, UnifyMultipleRulesLF) {
"foo: y\n"
"foo \\\n"
"foo: z\n", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
EXPECT_EQ("y", parser_.ins_[1].AsString());
@@ -209,7 +226,8 @@ TEST_F(DepfileParserTest, UnifyMultipleRulesCRLF) {
"foo: y\r\n"
"foo \\\r\n"
"foo: z\r\n", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
EXPECT_EQ("y", parser_.ins_[1].AsString());
@@ -222,7 +240,8 @@ TEST_F(DepfileParserTest, UnifyMixedRulesLF) {
" y\n"
"foo \\\n"
"foo: z\n", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
EXPECT_EQ("y", parser_.ins_[1].AsString());
@@ -235,7 +254,8 @@ TEST_F(DepfileParserTest, UnifyMixedRulesCRLF) {
" y\r\n"
"foo \\\r\n"
"foo: z\r\n", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
EXPECT_EQ("y", parser_.ins_[1].AsString());
@@ -247,7 +267,8 @@ TEST_F(DepfileParserTest, IndentedRulesLF) {
EXPECT_TRUE(Parse(" foo: x\n"
" foo: y\n"
" foo: z\n", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
EXPECT_EQ("y", parser_.ins_[1].AsString());
@@ -259,7 +280,8 @@ TEST_F(DepfileParserTest, IndentedRulesCRLF) {
EXPECT_TRUE(Parse(" foo: x\r\n"
" foo: y\r\n"
" foo: z\r\n", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
EXPECT_EQ("y", parser_.ins_[1].AsString());
@@ -272,7 +294,8 @@ TEST_F(DepfileParserTest, TolerateMP) {
"x:\n"
"y:\n"
"z:\n", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
EXPECT_EQ("y", parser_.ins_[1].AsString());
@@ -287,25 +310,34 @@ TEST_F(DepfileParserTest, MultipleRulesTolerateMP) {
"y:\n"
"foo: z\n"
"z:\n", &err));
- ASSERT_EQ("foo", parser_.out_.AsString());
+ ASSERT_EQ(1u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
ASSERT_EQ(3u, parser_.ins_.size());
EXPECT_EQ("x", parser_.ins_[0].AsString());
EXPECT_EQ("y", parser_.ins_[1].AsString());
EXPECT_EQ("z", parser_.ins_[2].AsString());
}
-TEST_F(DepfileParserTest, MultipleRulesRejectDifferentOutputs) {
- // check that multiple different outputs are rejected by the parser
+TEST_F(DepfileParserTest, MultipleRulesDifferentOutputs) {
+ // check that multiple different outputs are accepted by the parser
// when spread across multiple rules
- DepfileParserOptions parser_opts;
- parser_opts.depfile_distinct_target_lines_action_ =
- kDepfileDistinctTargetLinesActionError;
- DepfileParser parser(parser_opts);
string err;
- string input =
- "foo: x y\n"
- "bar: y z\n";
- EXPECT_FALSE(parser.Parse(&input, &err));
- ASSERT_EQ("depfile has multiple output paths (on separate lines)"
- " [-w depfilemulti=err]", err);
+ EXPECT_TRUE(Parse("foo: x y\n"
+ "bar: y z\n", &err));
+ ASSERT_EQ(2u, parser_.outs_.size());
+ ASSERT_EQ("foo", parser_.outs_[0].AsString());
+ ASSERT_EQ("bar", parser_.outs_[1].AsString());
+ ASSERT_EQ(3u, parser_.ins_.size());
+ EXPECT_EQ("x", parser_.ins_[0].AsString());
+ EXPECT_EQ("y", parser_.ins_[1].AsString());
+ EXPECT_EQ("z", parser_.ins_[2].AsString());
+}
+
+TEST_F(DepfileParserTest, BuggyMP) {
+ std::string err;
+ EXPECT_FALSE(Parse("foo: x y z\n"
+ "x: alsoin\n"
+ "y:\n"
+ "z:\n", &err));
+ ASSERT_EQ("inputs may not also have inputs", err);
}
diff --git a/src/deps_log.cc b/src/deps_log.cc
index 4aaffeb..cf55194 100644
--- a/src/deps_log.cc
+++ b/src/deps_log.cc
@@ -167,15 +167,15 @@ void DepsLog::Close() {
file_ = NULL;
}
-bool DepsLog::Load(const string& path, State* state, string* err) {
+LoadStatus DepsLog::Load(const string& path, State* state, string* err) {
METRIC_RECORD(".ninja_deps load");
char buf[kMaxRecordSize + 1];
FILE* f = fopen(path.c_str(), "rb");
if (!f) {
if (errno == ENOENT)
- return true;
+ return LOAD_NOT_FOUND;
*err = strerror(errno);
- return false;
+ return LOAD_ERROR;
}
bool valid_header = true;
@@ -196,7 +196,7 @@ bool DepsLog::Load(const string& path, State* state, string* err) {
unlink(path.c_str());
// Don't report this as a failure. An empty deps log will cause
// us to rebuild the outputs anyway.
- return true;
+ return LOAD_SUCCESS;
}
long offset;
@@ -284,12 +284,12 @@ bool DepsLog::Load(const string& path, State* state, string* err) {
fclose(f);
if (!Truncate(path, offset, err))
- return false;
+ return LOAD_ERROR;
// The truncate succeeded; we'll just report the load error as a
// warning because the build can proceed.
*err += "; recovering";
- return true;
+ return LOAD_SUCCESS;
}
fclose(f);
@@ -302,7 +302,7 @@ bool DepsLog::Load(const string& path, State* state, string* err) {
needs_recompaction_ = true;
}
- return true;
+ return LOAD_SUCCESS;
}
DepsLog::Deps* DepsLog::GetDeps(Node* node) {
diff --git a/src/deps_log.h b/src/deps_log.h
index 3812a28..e7974a1 100644
--- a/src/deps_log.h
+++ b/src/deps_log.h
@@ -21,6 +21,7 @@ using namespace std;
#include <stdio.h>
+#include "load_status.h"
#include "timestamp.h"
struct Node;
@@ -84,7 +85,7 @@ struct DepsLog {
int node_count;
Node** nodes;
};
- bool Load(const string& path, State* state, string* err);
+ LoadStatus Load(const string& path, State* state, string* err);
Deps* GetDeps(Node* node);
/// Rewrite the known log entries, throwing away old data.
diff --git a/src/graph.cc b/src/graph.cc
index 376b911..28a9653 100644
--- a/src/graph.cc
+++ b/src/graph.cc
@@ -14,6 +14,7 @@
#include "graph.h"
+#include <algorithm>
#include <assert.h>
#include <stdio.h>
@@ -222,8 +223,8 @@ bool DependencyScan::RecomputeOutputsDirty(Edge* edge, Node* most_recent_input,
return true;
}
-bool DependencyScan::RecomputeOutputDirty(Edge* edge,
- Node* most_recent_input,
+bool DependencyScan::RecomputeOutputDirty(const Edge* edge,
+ const Node* most_recent_input,
const string& command,
Node* output) {
if (edge->is_phony()) {
@@ -342,7 +343,11 @@ string EdgeEnv::LookupVariable(const string& var) {
if (var == "in" || var == "in_newline") {
int explicit_deps_count = edge_->inputs_.size() - edge_->implicit_deps_ -
edge_->order_only_deps_;
+#if __cplusplus >= 201103L
+ return MakePathList(edge_->inputs_.data(), explicit_deps_count,
+#else
return MakePathList(&edge_->inputs_[0], explicit_deps_count,
+#endif
var == "in" ? ' ' : '\n');
} else if (var == "out") {
int explicit_outs_count = edge_->outputs_.size() - edge_->implicit_outs_;
@@ -379,7 +384,7 @@ std::string EdgeEnv::MakePathList(const Node* const* const span,
result.push_back(sep);
const string& path = (*i)->PathDecanonicalized();
if (escape_in_out_ == kShellEscape) {
-#if _WIN32
+#ifdef _WIN32
GetWin32EscapedString(path, &result);
#else
GetShellEscapedString(path, &result);
@@ -406,16 +411,16 @@ std::string Edge::GetBinding(const std::string& key) const {
return env.LookupVariable(key);
}
-bool Edge::GetBindingBool(const string& key) {
+bool Edge::GetBindingBool(const string& key) const {
return !GetBinding(key).empty();
}
-string Edge::GetUnescapedDepfile() {
+string Edge::GetUnescapedDepfile() const {
EdgeEnv env(this, EdgeEnv::kDoNotEscape);
return env.LookupVariable("depfile");
}
-string Edge::GetUnescapedDyndep() {
+string Edge::GetUnescapedDyndep() const {
EdgeEnv env(this, EdgeEnv::kDoNotEscape);
return env.LookupVariable("dyndep");
}
@@ -507,6 +512,17 @@ bool ImplicitDepLoader::LoadDeps(Edge* edge, string* err) {
return true;
}
+struct matches {
+ matches(std::vector<StringPiece>::iterator i) : i_(i) {}
+
+ bool operator()(const Node* node) const {
+ StringPiece opath = StringPiece(node->path());
+ return *i_ == opath;
+ }
+
+ std::vector<StringPiece>::iterator i_;
+};
+
bool ImplicitDepLoader::LoadDepFile(Edge* edge, const string& path,
string* err) {
METRIC_RECORD("depfile load");
@@ -537,9 +553,15 @@ bool ImplicitDepLoader::LoadDepFile(Edge* edge, const string& path,
return false;
}
+ if (depfile.outs_.empty()) {
+ *err = path + ": no outputs declared";
+ return false;
+ }
+
uint64_t unused;
- if (!CanonicalizePath(const_cast<char*>(depfile.out_.str_),
- &depfile.out_.len_, &unused, err)) {
+ std::vector<StringPiece>::iterator primary_out = depfile.outs_.begin();
+ if (!CanonicalizePath(const_cast<char*>(primary_out->str_),
+ &primary_out->len_, &unused, err)) {
*err = path + ": " + *err;
return false;
}
@@ -548,12 +570,22 @@ bool ImplicitDepLoader::LoadDepFile(Edge* edge, const string& path,
// mark the edge as dirty.
Node* first_output = edge->outputs_[0];
StringPiece opath = StringPiece(first_output->path());
- if (opath != depfile.out_) {
+ if (opath != *primary_out) {
EXPLAIN("expected depfile '%s' to mention '%s', got '%s'", path.c_str(),
- first_output->path().c_str(), depfile.out_.AsString().c_str());
+ first_output->path().c_str(), primary_out->AsString().c_str());
return false;
}
+ // Ensure that all mentioned outputs are outputs of the edge.
+ for (std::vector<StringPiece>::iterator o = depfile.outs_.begin();
+ o != depfile.outs_.end(); ++o) {
+ matches m(o);
+ if (std::find_if(edge->outputs_.begin(), edge->outputs_.end(), m) == edge->outputs_.end()) {
+ *err = path + ": depfile mentions '" + o->AsString() + "' as an output, but no such output was declared";
+ return false;
+ }
+ }
+
// Preallocate space in edge->inputs_ to be filled in below.
vector<Node*>::iterator implicit_dep =
PreallocateSpace(edge, depfile.ins_.size());
diff --git a/src/graph.h b/src/graph.h
index 6122837..2fa54af 100644
--- a/src/graph.h
+++ b/src/graph.h
@@ -159,12 +159,12 @@ struct Edge {
/// Returns the shell-escaped value of |key|.
std::string GetBinding(const string& key) const;
- bool GetBindingBool(const string& key);
+ bool GetBindingBool(const string& key) const;
/// Like GetBinding("depfile"), but without shell escaping.
- string GetUnescapedDepfile();
+ string GetUnescapedDepfile() const;
/// Like GetBinding("dyndep"), but without shell escaping.
- string GetUnescapedDyndep();
+ string GetUnescapedDyndep() const;
/// Like GetBinding("rspfile"), but without shell escaping.
std::string GetUnescapedRspfile() const;
@@ -310,7 +310,7 @@ struct DependencyScan {
/// Recompute whether a given single output should be marked dirty.
/// Returns true if so.
- bool RecomputeOutputDirty(Edge* edge, Node* most_recent_input,
+ bool RecomputeOutputDirty(const Edge* edge, const Node* most_recent_input,
const string& command, Node* output);
BuildLog* build_log_;
diff --git a/src/graph_test.cc b/src/graph_test.cc
index c8cca1c..660943f 100644
--- a/src/graph_test.cc
+++ b/src/graph_test.cc
@@ -218,7 +218,7 @@ TEST_F(GraphTest, VarInOutPathEscaping) {
"build a$ b: cat no'space with$ space$$ no\"space2\n"));
Edge* edge = GetNode("a b")->in_edge();
-#if _WIN32
+#ifdef _WIN32
EXPECT_EQ("cat no'space \"with space$\" \"no\\\"space2\" > \"a b\"",
edge->EvaluateCommand());
#else
diff --git a/src/load_status.h b/src/load_status.h
new file mode 100644
index 0000000..0b16b1a
--- /dev/null
+++ b/src/load_status.h
@@ -0,0 +1,24 @@
+// Copyright 2019 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef NINJA_LOAD_STATUS_H_
+#define NINJA_LOAD_STATUS_H_
+
+enum LoadStatus {
+ LOAD_ERROR,
+ LOAD_SUCCESS,
+ LOAD_NOT_FOUND,
+};
+
+#endif // NINJA_LOAD_STATUS_H_
diff --git a/src/manifest_parser.cc b/src/manifest_parser.cc
index 2011368..bb53dc2 100644
--- a/src/manifest_parser.cc
+++ b/src/manifest_parser.cc
@@ -228,7 +228,7 @@ bool ManifestParser::ParseEdge(string* err) {
for (;;) {
EvalString out;
if (!lexer_.ReadPath(&out, err))
- return err;
+ return false;
if (out.empty())
break;
outs.push_back(out);
@@ -266,7 +266,7 @@ bool ManifestParser::ParseEdge(string* err) {
for (;;) {
EvalString in;
if (!lexer_.ReadPath(&in, err))
- return err;
+ return false;
if (in.empty())
break;
ins.push_back(in);
@@ -379,14 +379,6 @@ bool ManifestParser::ParseEdge(string* err) {
}
}
- // Multiple outputs aren't (yet?) supported with depslog.
- string deps_type = edge->GetBinding("deps");
- if (!deps_type.empty() && edge->outputs_.size() > 1) {
- return lexer_.Error("multiple outputs aren't (yet?) supported by depslog; "
- "bring this up on the mailing list if it affects you",
- err);
- }
-
// Lookup, validate, and save any dyndep binding. It will be used later
// to load generated dependency information dynamically, but it must
// be one of our manifest-specified inputs.
diff --git a/src/manifest_parser_test.cc b/src/manifest_parser_test.cc
index f2b7467..f4aee2d 100644
--- a/src/manifest_parser_test.cc
+++ b/src/manifest_parser_test.cc
@@ -858,11 +858,10 @@ TEST_F(ParserTest, MultipleOutputsWithDeps) {
State local_state;
ManifestParser parser(&local_state, NULL);
string err;
- EXPECT_FALSE(parser.ParseTest("rule cc\n command = foo\n deps = gcc\n"
+ EXPECT_TRUE(parser.ParseTest("rule cc\n command = foo\n deps = gcc\n"
"build a.o b.o: cc c.cc\n",
&err));
- EXPECT_EQ("input:5: multiple outputs aren't (yet?) supported by depslog; "
- "bring this up on the mailing list if it affects you\n", err);
+ EXPECT_EQ("", err);
}
TEST_F(ParserTest, SubNinja) {
diff --git a/src/ninja.cc b/src/ninja.cc
index c24f09d..1429639 100644
--- a/src/ninja.cc
+++ b/src/ninja.cc
@@ -17,6 +17,7 @@
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#include <cstdlib>
#ifdef _WIN32
#include "getopt.h"
@@ -73,10 +74,6 @@ struct Options {
/// Whether phony cycles should warn or print an error.
bool phony_cycle_should_err;
-
- /// Whether a depfile with multiple targets on separate lines should
- /// warn or print an error.
- bool depfile_distinct_target_lines_should_err;
};
/// The Ninja main() loads up a series of data structures; various tools need
@@ -123,17 +120,19 @@ struct NinjaMain : public BuildLogUser {
int ToolTargets(const Options* options, int argc, char* argv[]);
int ToolCommands(const Options* options, int argc, char* argv[]);
int ToolClean(const Options* options, int argc, char* argv[]);
+ int ToolCleanDead(const Options* options, int argc, char* argv[]);
int ToolCompilationDatabase(const Options* options, int argc, char* argv[]);
int ToolRecompact(const Options* options, int argc, char* argv[]);
+ int ToolRestat(const Options* options, int argc, char* argv[]);
int ToolUrtle(const Options* options, int argc, char** argv);
int ToolRules(const Options* options, int argc, char* argv[]);
/// Open the build log.
- /// @return false on error.
+ /// @return LOAD_ERROR on error.
bool OpenBuildLog(bool recompact_only = false);
/// Open the deps log: load it, then open for writing.
- /// @return false on error.
+ /// @return LOAD_ERROR on error.
bool OpenDepsLog(bool recompact_only = false);
/// Ensure the build directory exists, creating it if necessary.
@@ -154,7 +153,7 @@ struct NinjaMain : public BuildLogUser {
virtual bool IsPathDead(StringPiece s) const {
Node* n = state_.LookupNode(s);
- if (!n || !n->in_edge())
+ if (n && n->in_edge())
return false;
// Just checking n isn't enough: If an old output is both in the build log
// and in the deps log, it will have a Node object in state_. (It will also
@@ -719,6 +718,11 @@ int NinjaMain::ToolClean(const Options* options, int argc, char* argv[]) {
}
}
+int NinjaMain::ToolCleanDead(const Options* options, int argc, char* argv[]) {
+ Cleaner cleaner(&state_, config_, &disk_interface_);
+ return cleaner.CleanDead(build_log_.entries());
+}
+
void EncodeJSONString(const char *str) {
while (*str) {
if (*str == '"' || *str == '\\')
@@ -803,12 +807,14 @@ int NinjaMain::ToolCompilationDatabase(const Options* options, int argc,
bool first = true;
vector<char> cwd;
+ char* success = NULL;
do {
cwd.resize(cwd.size() + 1024);
errno = 0;
- } while (!getcwd(&cwd[0], cwd.size()) && errno == ERANGE);
- if (errno != 0 && errno != ERANGE) {
+ success = getcwd(&cwd[0], cwd.size());
+ } while (!success && errno == ERANGE);
+ if (!success) {
Error("cannot determine working directory: %s", strerror(errno));
return 1;
}
@@ -845,13 +851,71 @@ int NinjaMain::ToolRecompact(const Options* options, int argc, char* argv[]) {
if (!EnsureBuildDirExists())
return 1;
- if (!OpenBuildLog(/*recompact_only=*/true) ||
- !OpenDepsLog(/*recompact_only=*/true))
+ if (OpenBuildLog(/*recompact_only=*/true) == LOAD_ERROR ||
+ OpenDepsLog(/*recompact_only=*/true) == LOAD_ERROR)
return 1;
return 0;
}
+int NinjaMain::ToolRestat(const Options* options, int argc, char* argv[]) {
+ // The restat tool uses getopt, and expects argv[0] to contain the name of the
+ // tool, i.e. "restat"
+ argc++;
+ argv--;
+
+ optind = 1;
+ int opt;
+ while ((opt = getopt(argc, argv, const_cast<char*>("h"))) != -1) {
+ switch (opt) {
+ case 'h':
+ default:
+ printf("usage: ninja -t restat [outputs]\n");
+ return 1;
+ }
+ }
+ argv += optind;
+ argc -= optind;
+
+ if (!EnsureBuildDirExists())
+ return 1;
+
+ string log_path = ".ninja_log";
+ if (!build_dir_.empty())
+ log_path = build_dir_ + "/" + log_path;
+
+ string err;
+ const LoadStatus status = build_log_.Load(log_path, &err);
+ if (status == LOAD_ERROR) {
+ Error("loading build log %s: %s", log_path.c_str(), err.c_str());
+ return EXIT_FAILURE;
+ }
+ if (status == LOAD_NOT_FOUND) {
+ // Nothing to restat, ignore this
+ return EXIT_SUCCESS;
+ }
+ if (!err.empty()) {
+ // Hack: Load() can return a warning via err by returning LOAD_SUCCESS.
+ Warning("%s", err.c_str());
+ err.clear();
+ }
+
+ bool success = build_log_.Restat(log_path, disk_interface_, argc, argv, &err);
+ if (!success) {
+ Error("failed recompaction: %s", err.c_str());
+ return EXIT_FAILURE;
+ }
+
+ if (!config_.dry_run) {
+ if (!build_log_.OpenForWrite(log_path, *this, &err)) {
+ Error("opening build log: %s", err.c_str());
+ return EXIT_FAILURE;
+ }
+ }
+
+ return EXIT_SUCCESS;
+}
+
int NinjaMain::ToolUrtle(const Options* options, int argc, char** argv) {
// RLE encoded.
const char* urtle =
@@ -904,8 +968,12 @@ const Tool* ChooseTool(const string& tool_name) {
Tool::RUN_AFTER_LOAD, &NinjaMain::ToolCompilationDatabase },
{ "recompact", "recompacts ninja-internal data structures",
Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRecompact },
+ { "restat", "restats all outputs in the build log",
+ Tool::RUN_AFTER_FLAGS, &NinjaMain::ToolRestat },
{ "rules", "list all rules",
Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRules },
+ { "cleandead", "clean built files that are no longer produced by the manifest",
+ Tool::RUN_AFTER_LOGS, &NinjaMain::ToolCleanDead },
{ "urtle", NULL,
Tool::RUN_AFTER_FLAGS, &NinjaMain::ToolUrtle },
{ NULL, NULL, Tool::RUN_AFTER_FLAGS, NULL }
@@ -989,7 +1057,6 @@ bool WarningEnable(const string& name, Options* options) {
printf("warning flags:\n"
" dupbuild={err,warn} multiple build lines for one target\n"
" phonycycle={err,warn} phony build statement references itself\n"
-" depfilemulti={err,warn} depfile has multiple output paths on separate lines\n"
);
return false;
} else if (name == "dupbuild=err") {
@@ -1004,11 +1071,9 @@ bool WarningEnable(const string& name, Options* options) {
} else if (name == "phonycycle=warn") {
options->phony_cycle_should_err = false;
return true;
- } else if (name == "depfilemulti=err") {
- options->depfile_distinct_target_lines_should_err = true;
- return true;
- } else if (name == "depfilemulti=warn") {
- options->depfile_distinct_target_lines_should_err = false;
+ } else if (name == "depfilemulti=err" ||
+ name == "depfilemulti=warn") {
+ Warning("deprecated warning 'depfilemulti'");
return true;
} else {
const char* suggestion =
@@ -1030,17 +1095,21 @@ bool NinjaMain::OpenBuildLog(bool recompact_only) {
log_path = build_dir_ + "/" + log_path;
string err;
- if (!build_log_.Load(log_path, &err)) {
+ const LoadStatus status = build_log_.Load(log_path, &err);
+ if (status == LOAD_ERROR) {
Error("loading build log %s: %s", log_path.c_str(), err.c_str());
return false;
}
if (!err.empty()) {
- // Hack: Load() can return a warning via err by returning true.
+ // Hack: Load() can return a warning via err by returning LOAD_SUCCESS.
Warning("%s", err.c_str());
err.clear();
}
if (recompact_only) {
+ if (status == LOAD_NOT_FOUND) {
+ return true;
+ }
bool success = build_log_.Recompact(log_path, *this, &err);
if (!success)
Error("failed recompaction: %s", err.c_str());
@@ -1065,17 +1134,21 @@ bool NinjaMain::OpenDepsLog(bool recompact_only) {
path = build_dir_ + "/" + path;
string err;
- if (!deps_log_.Load(path, &state_, &err)) {
+ const LoadStatus status = deps_log_.Load(path, &state_, &err);
+ if (status == LOAD_ERROR) {
Error("loading deps log %s: %s", path.c_str(), err.c_str());
return false;
}
if (!err.empty()) {
- // Hack: Load() can return a warning via err by returning true.
+ // Hack: Load() can return a warning via err by returning LOAD_SUCCESS.
Warning("%s", err.c_str());
err.clear();
}
if (recompact_only) {
+ if (status == LOAD_NOT_FOUND) {
+ return true;
+ }
bool success = deps_log_.Recompact(path, &err);
if (!success)
Error("failed recompaction: %s", err.c_str());
@@ -1284,11 +1357,6 @@ NORETURN void real_main(int argc, char** argv) {
if (exit_code >= 0)
exit(exit_code);
- if (options.depfile_distinct_target_lines_should_err) {
- config.depfile_parser_options.depfile_distinct_target_lines_action_ =
- kDepfileDistinctTargetLinesActionError;
- }
-
if (options.working_dir) {
// The formatting of this string, complete with funny quotes, is
// so Emacs can properly identify that the cwd has changed for
diff --git a/src/util.cc b/src/util.cc
index ee810d6..4df2bb2 100644
--- a/src/util.cc
+++ b/src/util.cc
@@ -45,7 +45,7 @@
#elif defined(__SVR4) && defined(__sun)
#include <unistd.h>
#include <sys/loadavg.h>
-#elif defined(_AIX)
+#elif defined(_AIX) && !defined(__PASE__)
#include <libperfstat.h>
#elif defined(linux) || defined(__GLIBC__)
#include <sys/sysinfo.h>
@@ -481,9 +481,7 @@ string StripAnsiEscapeCodes(const string& in) {
int GetProcessorCount() {
#ifdef _WIN32
- SYSTEM_INFO info;
- GetNativeSystemInfo(&info);
- return info.dwNumberOfProcessors;
+ return GetActiveProcessorCount(ALL_PROCESSOR_GROUPS);
#else
#ifdef CPU_COUNT
// The number of exposed processors might not represent the actual number of
@@ -564,6 +562,10 @@ double GetLoadAverage() {
return posix_compatible_load;
}
+#elif defined(__PASE__)
+double GetLoadAverage() {
+ return -0.0f;
+}
#elif defined(_AIX)
double GetLoadAverage() {
perfstat_cpu_total_t cpu_stats;
@@ -574,7 +576,7 @@ double GetLoadAverage() {
// Calculation taken from comment in libperfstats.h
return double(cpu_stats.loadavg[0]) / double(1 << SBITS);
}
-#elif defined(__UCLIBC__)
+#elif defined(__UCLIBC__) || (defined(__BIONIC__) && __ANDROID_API__ < 29)
double GetLoadAverage() {
struct sysinfo si;
if (sysinfo(&si) != 0)
@@ -594,6 +596,12 @@ double GetLoadAverage() {
#endif // _WIN32
string ElideMiddle(const string& str, size_t width) {
+ switch (width) {
+ case 0: return "";
+ case 1: return ".";
+ case 2: return "..";
+ case 3: return "...";
+ }
const int kMargin = 3; // Space for "...".
string result = str;
if (result.size() > width) {
diff --git a/src/util_test.cc b/src/util_test.cc
index d97b48c..b43788d 100644
--- a/src/util_test.cc
+++ b/src/util_test.cc
@@ -420,6 +420,10 @@ TEST(ElideMiddle, NothingToElide) {
string input = "Nothing to elide in this short string.";
EXPECT_EQ(input, ElideMiddle(input, 80));
EXPECT_EQ(input, ElideMiddle(input, 38));
+ EXPECT_EQ("", ElideMiddle(input, 0));
+ EXPECT_EQ(".", ElideMiddle(input, 1));
+ EXPECT_EQ("..", ElideMiddle(input, 2));
+ EXPECT_EQ("...", ElideMiddle(input, 3));
}
TEST(ElideMiddle, ElideInTheMiddle) {