diff options
161 files changed, 7075 insertions, 3934 deletions
diff --git a/.gitignore b/.gitignore index 62afef2347..8670081adf 100644 --- a/.gitignore +++ b/.gitignore @@ -128,7 +128,6 @@ git-status git-stripspace git-submodule git-svn -git-svnimport git-symbolic-ref git-tag git-tar-tree @@ -37,6 +37,7 @@ Sam Vilain <sam@vilain.net> Santi Béjar <sbejar@gmail.com> Sean Estabrooks <seanlkml@sympatico.ca> Shawn O. Pearce <spearce@spearce.org> +Steven Grimm <koreth@midwinter.com> Theodore Ts'o <tytso@mit.edu> Tony Luck <tony.luck@intel.com> Uwe Kleine-König <Uwe_Zeisberger@digi.com> diff --git a/Documentation/RelNotes-1.5.3.5.txt b/Documentation/RelNotes-1.5.3.5.txt index de38a84ad6..4e46d2c2a2 100644 --- a/Documentation/RelNotes-1.5.3.5.txt +++ b/Documentation/RelNotes-1.5.3.5.txt @@ -4,37 +4,91 @@ GIT v1.5.3.5 Release Notes Fixes since v1.5.3.4 -------------------- + * Comes with git-gui 0.8.4. + * "git-config" silently ignored options after --list; now it will error out with a usage message. * "git-config --file" failed if the argument used a relative path as it changed directories before opening the file. + * "git-config --file" now displays a proper error message if it + cannot read the file specified on the command line. + * "git-config", "git-diff", "git-apply" failed if run from a subdirectory with relative GIT_DIR and GIT_WORK_TREE set. + * "git-blame" crashed if run during a merge conflict. + * "git-add -i" did not handle single line hunks correctly. - * "git-rebase -i" failed if external diff drivers were used for one - or more files in a commit. It now avoids calling the external - diff drivers. + * "git-rebase -i" and "git-stash apply" failed if external diff + drivers were used for one or more files in a commit. They now + avoid calling the external diff drivers. * "git-log --follow" did not work unless diff generation (e.g. -p) was also requested. + * "git-log --follow -B" did not work at all. Fixed. + + * "git-log -M -B" did not correctly handle cases of very large files + being renamed and replaced by very small files in the same commit. + * "git-log" printed extra newlines between commits when a diff was generated internally (e.g. -S or --follow) but not displayed. * "git-push" error message is more helpful when pushing to a repository with no matching refs and none specified. + * "git-push" now respects + (force push) on wildcard refspecs, + matching the behavior of git-fetch. + * "git-filter-branch" now updates the working directory when it has finished filtering the current branch. * "git-instaweb" no longer fails on Mac OS X. + * "git-cvsexportcommit" didn't always create new parent directories + before trying to create new child directories. Fixed. + + * "git-fetch" printed a scary (but bogus) error message while + fetching a tag that pointed to a tree or blob. The error did + not impact correctness, only user perception. The bogus error + is no longer printed. + + * "git-ls-files --ignored" did not properly descend into non-ignored + directories that themselves contained ignored files if d_type + was not supported by the filesystem. This bug impacted systems + such as AFS. Fixed. + + * Git segfaulted when reading an invalid .gitattributes file. Fixed. + + * post-receive-email example hook fixed was fixed for + non-fast-forward updates. + * Documentation updates for supported (but previously undocumented) options of "git-archive" and "git-reflog". * "make clean" no longer deletes the configure script that ships with the git tarball, making multiple architecture builds easier. + + * "git-remote show origin" spewed a warning message from Perl + when no remote is defined for the current branch via + branch.<name>.remote configuration settings. + + * Building with NO_PERL_MAKEMAKER excessively rebuilt contents + of perl/ subdirectory by rewriting perl.mak. + + * http.sslVerify configuration settings were not used in scripted + Porcelains. + + * "git-add" leaked a bit of memory while scanning for files to add. + + * A few workarounds to squelch false warnings from recent gcc have + been added. + + * "git-send-pack $remote frotz" segfaulted when there is nothing + named 'frotz' on the local end. + + * "git-rebase -interactive" did not handle its "--strategy" option + properly. diff --git a/Documentation/RelNotes-1.5.4.txt b/Documentation/RelNotes-1.5.4.txt index ceee857232..133fa64d22 100644 --- a/Documentation/RelNotes-1.5.4.txt +++ b/Documentation/RelNotes-1.5.4.txt @@ -4,6 +4,8 @@ GIT v1.5.4 Release Notes Updates since v1.5.3 -------------------- + * Comes with much improved gitk. + * git-reset is now built-in. * git-send-email can optionally talk over ssmtp and use SMTP-AUTH. @@ -19,6 +21,29 @@ Updates since v1.5.3 * git-archive can optionally substitute keywords in files marked with export-subst attribute. + * git-for-each-ref learned %(xxxdate:<dateformat>) syntax to + show the various date fields in different formats. + + * git-gc --auto is a low-impact way to automatically run a + variant of git-repack that does not lose unreferenced objects + (read: safer than the usual one) after the user accumulates + too many loose objects. + + * git-push has been rewritten in C. + + * git-push learned --dry-run option to show what would happen + if a push is run. + + * git-remote learned "rm" subcommand. + + * git-rebase --interactive mode can now work on detached HEAD. + + * git-cvsserver can be run via git-shell. + + * git-am and git-rebase are far less verbose. + + * git-pull learned to pass --[no-]ff option to underlying git-merge. + * Various Perforce importer updates. Fixes since v1.5.3 @@ -29,7 +54,6 @@ this release, unless otherwise noted. -- exec >/var/tmp/1 -O=v1.5.3.2-99-ge4b2890 +O=v1.5.3.4-450-g952a9e5 echo O=`git describe refs/heads/master` git shortlog --no-merges $O..refs/heads/master ^refs/heads/maint - diff --git a/Documentation/cmd-list.perl b/Documentation/cmd-list.perl index 1061fd8bcd..8d21d423e5 100755 --- a/Documentation/cmd-list.perl +++ b/Documentation/cmd-list.perl @@ -185,7 +185,6 @@ git-status mainporcelain git-stripspace purehelpers git-submodule mainporcelain git-svn foreignscminterface -git-svnimport foreignscminterface git-symbolic-ref plumbingmanipulators git-tag mainporcelain git-tar-tree plumbinginterrogators diff --git a/Documentation/config.txt b/Documentation/config.txt index d4a476e2ff..edf50cd211 100644 --- a/Documentation/config.txt +++ b/Documentation/config.txt @@ -324,10 +324,11 @@ branch.<name>.remote:: If this option is not given, `git fetch` defaults to remote "origin". branch.<name>.merge:: - When in branch <name>, it tells `git fetch` the default refspec to - be marked for merging in FETCH_HEAD. The value has exactly to match - a remote part of one of the refspecs which are fetched from the remote - given by "branch.<name>.remote". + When in branch <name>, it tells `git fetch` the default + refspec to be marked for merging in FETCH_HEAD. The value is + handled like the remote part of a refspec, and must match a + ref which is fetched from the remote given by + "branch.<name>.remote". The merge information is used by `git pull` (which at first calls `git fetch`) to lookup the default branch for merging. Without this option, `git pull` defaults to merge the first refspec fetched. diff --git a/Documentation/core-tutorial.txt b/Documentation/core-tutorial.txt index 6b2590d072..5df97a1f9d 100644 --- a/Documentation/core-tutorial.txt +++ b/Documentation/core-tutorial.txt @@ -553,13 +553,8 @@ can explore on your own. [NOTE] Most likely, you are not directly using the core -git Plumbing commands, but using Porcelain like Cogito on top -of it. Cogito works a bit differently and you usually do not -have to run `git-update-index` yourself for changed files (you -do tell underlying git about additions and removals via -`cg-add` and `cg-rm` commands). Just before you make a commit -with `cg-commit`, Cogito figures out which files you modified, -and runs `git-update-index` on them for you. +git Plumbing commands, but using Porcelain such as `git-add`, `git-rm' +and `git-commit'. Tagging a version @@ -686,8 +681,8 @@ $ git reset and in fact a lot of the common git command combinations can be scripted with the `git xyz` interfaces. You can learn things by just looking -at what the various git scripts do. For example, `git reset` is the -above two lines implemented in `git-reset`, but some things like +at what the various git scripts do. For example, `git reset` used to be +the above two lines implemented in `git-reset`, but some things like `git status` and `git commit` are slightly more complex scripts around the basic git commands. @@ -805,8 +800,8 @@ you have, you can say $ git branch ------------ -which is nothing more than a simple script around `ls .git/refs/heads`. -There will be asterisk in front of the branch you are currently on. +which used to be nothing more than a simple script around `ls .git/refs/heads`. +There will be an asterisk in front of the branch you are currently on. Sometimes you may wish to create a new branch _without_ actually checking it out and switching to it. If so, just use the command @@ -883,7 +878,7 @@ script called `git merge`, which wants to know which branches you want to resolve and what the merge is all about: ------------ -$ git merge "Merge work in mybranch" HEAD mybranch +$ git merge -m "Merge work in mybranch" mybranch ------------ where the first argument is going to be used as the commit message if @@ -952,7 +947,7 @@ the later output lines is used to show commits contained in the `master` branch, and the second column for the `mybranch` branch. Three commits are shown along with their log messages. All of them have non blank characters in the first column (`*` -shows an ordinary commit on the current branch, `.` is a merge commit), which +shows an ordinary commit on the current branch, `-` is a merge commit), which means they are now part of the `master` branch. Only the "Some work" commit has the plus `+` character in the second column, because `mybranch` has not been merged to incorporate these @@ -970,7 +965,7 @@ to the `master` branch. Let's go back to `mybranch`, and run ------------ $ git checkout mybranch -$ git merge "Merge upstream changes." HEAD master +$ git merge -m "Merge upstream changes." master ------------ This outputs something like this (the actual commit object names @@ -1086,7 +1081,7 @@ to help dumb transport downloaders. There are (confusingly enough) `git-ssh-fetch` and `git-ssh-upload` programs, which are 'commit walkers'; they outlived their usefulness when git Native and SSH transports were introduced, -and not used by `git pull` or `git push` scripts. +and are not used by `git pull` or `git push` scripts. Once you fetch from the remote repository, you `merge` that with your current branch. @@ -1193,7 +1188,7 @@ $ mb=$(git-merge-base HEAD mybranch) The command writes the commit object name of the common ancestor to the standard output, so we captured its output to a variable, -because we will be using it in the next step. BTW, the common +because we will be using it in the next step. By the way, the common ancestor commit is the "New day." commit in this case. You can tell it by: @@ -1459,8 +1454,7 @@ Although git is a truly distributed system, it is often convenient to organize your project with an informal hierarchy of developers. Linux kernel development is run this way. There is a nice illustration (page 17, "Merges to Mainline") in -link:http://www.xenotime.net/linux/mentor/linux-mentoring-2006.pdf -[Randy Dunlap's presentation]. +link:http://www.xenotime.net/linux/mentor/linux-mentoring-2006.pdf[Randy Dunlap's presentation]. It should be stressed that this hierarchy is purely *informal*. There is nothing fundamental in git that enforces the "chain of @@ -1613,8 +1607,8 @@ in both of them. You could merge in 'diff-fix' first and then 'commit-fix' next, like this: ------------ -$ git merge 'Merge fix in diff-fix' master diff-fix -$ git merge 'Merge fix in commit-fix' master commit-fix +$ git merge -m 'Merge fix in diff-fix' diff-fix +$ git merge -m 'Merge fix in commit-fix' commit-fix ------------ Which would result in: diff --git a/Documentation/diff-format.txt b/Documentation/diff-format.txt index 001503205b..9709c35c98 100644 --- a/Documentation/diff-format.txt +++ b/Documentation/diff-format.txt @@ -1,5 +1,5 @@ -The output format from "git-diff-index", "git-diff-tree" and -"git-diff-files" are very similar. +The output format from "git-diff-index", "git-diff-tree", +"git-diff-files" and "git diff --raw" are very similar. These commands all compare two sets of things; what is compared differs: @@ -62,7 +62,8 @@ respectively. diff format for merges ---------------------- -"git-diff-tree" and "git-diff-files" can take '-c' or '--cc' option +"git-diff-tree", "git-diff-files" and "git-diff --raw" +can take '-c' or '--cc' option to generate diff output also for merge commits. The output differs from the format described above in the following way: @@ -86,10 +87,10 @@ Generating patches with -p -------------------------- When "git-diff-index", "git-diff-tree", or "git-diff-files" are run -with a '-p' option, they do not produce the output described above; -instead they produce a patch file. You can customize the creation -of such patches via the GIT_EXTERNAL_DIFF and the GIT_DIFF_OPTS -environment variables. +with a '-p' option, or "git diff" without the '--raw' option, they +do not produce the output described above; instead they produce a +patch file. You can customize the creation of such patches via the +GIT_EXTERNAL_DIFF and the GIT_DIFF_OPTS environment variables. What the -p option produces is slightly different from the traditional diff format. @@ -137,8 +138,8 @@ file made it into the new one. combined diff format -------------------- -git-diff-tree and git-diff-files can take '-c' or '--cc' option -to produce 'combined diff', which looks like this: +"git-diff-tree", "git-diff-files" and "git-diff" can take '-c' or +'--cc' option to produce 'combined diff', which looks like this: ------------ diff --combined describe.c diff --git a/Documentation/git-bisect.txt b/Documentation/git-bisect.txt index 1072fb87d1..4795349c10 100644 --- a/Documentation/git-bisect.txt +++ b/Documentation/git-bisect.txt @@ -16,8 +16,9 @@ The command takes various subcommands, and different options depending on the subcommand: git bisect start [<bad> [<good>...]] [--] [<paths>...] - git bisect bad <rev> - git bisect good <rev> + git bisect bad [<rev>] + git bisect good [<rev>...] + git bisect skip [<rev>...] git bisect reset [<branch>] git bisect visualize git bisect replay <logfile> @@ -134,6 +135,20 @@ $ git reset --hard HEAD~3 # try 3 revs before what Then compile and test the one you chose to try. After that, tell bisect what the result was as usual. +Bisect skip +~~~~~~~~~~~~ + +Instead of choosing by yourself a nearby commit, you may just want git +to do it for you using: + +------------ +$ git bisect skip # Current version cannot be tested +------------ + +But computing the commit to test may be slower afterwards and git may +eventually not be able to tell the first bad among a bad and one or +more "skip"ped commits. + Cutting down bisection by giving more parameters to bisect start ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -167,14 +182,18 @@ $ git bisect run my_script ------------ Note that the "run" script (`my_script` in the above example) should -exit with code 0 in case the current source code is good and with a -code between 1 and 127 (included) in case the current source code is -bad. +exit with code 0 in case the current source code is good. Exit with a +code between 1 and 127 (inclusive), except 125, if the current +source code is bad. Any other exit code will abort the automatic bisect process. (A program that does "exit(-1)" leaves $? = 255, see exit(3) manual page, the value is chopped with "& 0377".) +The special exit code 125 should be used when the current source code +cannot be tested. If the "run" script exits with this code, the current +revision will be skipped, see `git bisect skip` above. + You may often find that during bisect you want to have near-constant tweaks (e.g., s/#define DEBUG 0/#define DEBUG 1/ in a header file, or "revision that does not have this commit needs this patch applied to diff --git a/Documentation/git-cherry-pick.txt b/Documentation/git-cherry-pick.txt index 47b1e8c2fc..76a2edfd9b 100644 --- a/Documentation/git-cherry-pick.txt +++ b/Documentation/git-cherry-pick.txt @@ -27,11 +27,12 @@ OPTIONS message prior committing. -x:: - Cause the command to append which commit was - cherry-picked after the original commit message when - making a commit. Do not use this option if you are - cherry-picking from your private branch because the - information is useless to the recipient. If on the + When recording the commit, append to the original commit + message a note that indicates which commit this change + was cherry-picked from. Append the note only for cherry + picks without conflicts. Do not use this option if + you are cherry-picking from your private branch because + the information is useless to the recipient. If on the other hand you are cherry-picking between two publicly visible branches (e.g. backporting a fix to a maintenance branch for an older release from a diff --git a/Documentation/git-clone.txt b/Documentation/git-clone.txt index 253f4f03c5..cca14d6b5d 100644 --- a/Documentation/git-clone.txt +++ b/Documentation/git-clone.txt @@ -111,11 +111,11 @@ OPTIONS --depth <depth>:: Create a 'shallow' clone with a history truncated to the - specified number of revs. A shallow repository has + specified number of revisions. A shallow repository has a number of limitations (you cannot clone or fetch from it, nor push from nor into it), but is adequate if you - want to only look at near the tip of a large project - with a long history, and would want to send in a fixes + are only interested in the recent history of a large project + with a long history, and would want to send in fixes as patches. <repository>:: diff --git a/Documentation/git-cvsexportcommit.txt b/Documentation/git-cvsexportcommit.txt index 4c8d1e6386..c3922f9238 100644 --- a/Documentation/git-cvsexportcommit.txt +++ b/Documentation/git-cvsexportcommit.txt @@ -73,7 +73,7 @@ Merge one patch into CVS:: $ export GIT_DIR=~/project/.git $ cd ~/project_cvs_checkout $ git-cvsexportcommit -v <commit-sha1> -$ cvs commit -F .mgs <files> +$ cvs commit -F .msg <files> ------------ Merge pending patches into CVS automatically -- only if you really know what you are doing:: diff --git a/Documentation/git-diff.txt b/Documentation/git-diff.txt index ce0f502468..11c4216c4a 100644 --- a/Documentation/git-diff.txt +++ b/Documentation/git-diff.txt @@ -82,6 +82,9 @@ include::diff-options.txt[] the diff to the named paths (you can give directory names and get diff for all files under them). +Output format +------------- +include::diff-format.txt[] EXAMPLES -------- diff --git a/Documentation/git-filter-branch.txt b/Documentation/git-filter-branch.txt index ba9b4fbca7..385ecc900f 100644 --- a/Documentation/git-filter-branch.txt +++ b/Documentation/git-filter-branch.txt @@ -219,7 +219,7 @@ git filter-branch --commit-filter ' fi' HEAD ------------------------------------------------------------------------------ -The function 'skip_commits' is defined as follows: +The function 'skip_commit' is defined as follows: -------------------------- skip_commit() diff --git a/Documentation/git-format-patch.txt b/Documentation/git-format-patch.txt index c9857a2d62..f0617efa0a 100644 --- a/Documentation/git-format-patch.txt +++ b/Documentation/git-format-patch.txt @@ -168,7 +168,7 @@ git-format-patch origin:: is created in the current directory. git-format-patch \--root origin:: - Extract all commits which that leads to 'origin' since the + Extract all commits that lead to 'origin' since the inception of the project. git-format-patch -M -B origin:: diff --git a/Documentation/git-gc.txt b/Documentation/git-gc.txt index b9d5660eac..872056ea04 100644 --- a/Documentation/git-gc.txt +++ b/Documentation/git-gc.txt @@ -19,7 +19,8 @@ created from prior invocations of gitlink:git-add[1]. Users are encouraged to run this task on a regular basis within each repository to maintain good disk space utilization and good -operating performance. +operating performance. Some git commands may automatically run +`git-gc`; see the `--auto` flag below for details. OPTIONS ------- @@ -44,18 +45,23 @@ OPTIONS few hundred changesets or so. --auto:: - With this option, `git gc` checks if there are too many - loose objects in the repository and runs - gitlink:git-repack[1] with `-d -l` option to pack them. - The threshold for loose objects is set with `gc.auto` configuration - variable, and can be disabled by setting it to 0. Some - Porcelain commands use this after they perform operation - that could create many loose objects automatically. - Additionally, when there are too many packs are present, - they are consolidated into one larger pack by running - the `git-repack` command with `-A` option. The - threshold for number of packs is set with - `gc.autopacklimit` configuration variable. + With this option, `git gc` checks whether any housekeeping is + required; if not, it exits without performing any work. + Some git commands run `git gc --auto` after performing + operations that could create many loose objects. ++ +Housekeeping is required if there are too many loose objects or +too many packs in the repository. If the number of loose objects +exceeds the value of the `gc.auto` configuration variable, then +all loose objects are combined into a single pack using +`git-repack -d -l`. Setting the value of `gc.auto` to 0 +disables automatic packing of loose objects. ++ +If the number of packs exceeds the value of `gc.autopacklimit`, +then existing packs (except those marked with a `.keep` file) +are consolidated into a single pack by using the `-A` option of +`git-repack`. Setting `gc.autopacklimit` to 0 disables +automatic consolidation of packs. Configuration ------------- diff --git a/Documentation/git-http-push.txt b/Documentation/git-http-push.txt index 9afb860381..3a69b719b5 100644 --- a/Documentation/git-http-push.txt +++ b/Documentation/git-http-push.txt @@ -8,7 +8,7 @@ git-http-push - Push objects over HTTP/DAV to another repository SYNOPSIS -------- -'git-http-push' [--all] [--force] [--verbose] <url> <ref> [<ref>...] +'git-http-push' [--all] [--dry-run] [--force] [--verbose] <url> <ref> [<ref>...] DESCRIPTION ----------- @@ -30,6 +30,9 @@ OPTIONS the remote repository can lose commits; use it with care. +--dry-run:: + Do everything except actually send the updates. + --verbose:: Report the list of objects being walked locally and the list of objects successfully sent to the remote repository. diff --git a/Documentation/git-merge.txt b/Documentation/git-merge.txt index bca4212e56..eabd7ef33f 100644 --- a/Documentation/git-merge.txt +++ b/Documentation/git-merge.txt @@ -11,26 +11,27 @@ SYNOPSIS [verse] 'git-merge' [-n] [--summary] [--no-commit] [--squash] [-s <strategy>]... [-m <msg>] <remote> <remote>... +'git-merge' <msg> HEAD <remote>... DESCRIPTION ----------- This is the top-level interface to the merge machinery which drives multiple merge strategy scripts. +The second syntax (<msg> `HEAD` <remote>) is supported for +historical reasons. Do not use it from the command line or in +new scripts. It is the same as `git merge -m <msg> <remote>`. + OPTIONS ------- include::merge-options.txt[] -<msg>:: +-m <msg>:: The commit message to be used for the merge commit (in case it is created). The `git-fmt-merge-msg` script can be used to give a good default for automated `git-merge` invocations. -<head>:: - Our branch head commit. This has to be `HEAD`, so new - syntax does not require it - <remote>:: Other branch head merged into our branch. You need at least one <remote>. Specifying more than one <remote> diff --git a/Documentation/git-mergetool.txt b/Documentation/git-mergetool.txt index 6c32c6d18e..a26c260162 100644 --- a/Documentation/git-mergetool.txt +++ b/Documentation/git-mergetool.txt @@ -25,12 +25,18 @@ OPTIONS -t or --tool=<tool>:: Use the merge resolution program specified by <tool>. Valid merge tools are: - kdiff3, tkdiff, meld, xxdiff, emerge, vimdiff, gvimdiff, and opendiff + kdiff3, tkdiff, meld, xxdiff, emerge, vimdiff, gvimdiff, ecmerge, and opendiff + If a merge resolution program is not specified, 'git mergetool' will use the configuration variable merge.tool. If the configuration variable merge.tool is not set, 'git mergetool' will pick a suitable default. ++ +You can explicitly provide a full path to the tool by setting the +configuration variable mergetool.<tool>.path. For example, you +can configure the absolute path to kdiff3 by setting +mergetool.kdiff3.path. Otherwise, 'git mergetool' assumes the tool +is available in PATH. Author ------ diff --git a/Documentation/git-rev-list.txt b/Documentation/git-rev-list.txt index 7cd0e8913e..485280423e 100644 --- a/Documentation/git-rev-list.txt +++ b/Documentation/git-rev-list.txt @@ -34,6 +34,7 @@ SYNOPSIS [ \--pretty | \--header ] [ \--bisect ] [ \--bisect-vars ] + [ \--bisect-all ] [ \--merge ] [ \--reverse ] [ \--walk-reflogs ] @@ -354,6 +355,21 @@ the expected number of commits to be tested if `bisect_rev` turns out to be bad to `bisect_bad`, and the number of commits we are bisecting right now to `bisect_all`. +--bisect-all:: + +This outputs all the commit objects between the included and excluded +commits, ordered by their distance to the included and excluded +commits. The farthest from them is displayed first. (This is the only +one displayed by `--bisect`.) + +This is useful because it makes it easy to choose a good commit to +test when you want to avoid to test some of them for some reason (they +may not compile for example). + +This option can be used along with `--bisect-vars`, in this case, +after all the sorted commit objects, there will be the same text as if +`--bisect-vars` had been used alone. + -- Commit Ordering diff --git a/Documentation/git-send-email.txt b/Documentation/git-send-email.txt index 3727776a0b..e38b7021b4 100644 --- a/Documentation/git-send-email.txt +++ b/Documentation/git-send-email.txt @@ -159,6 +159,9 @@ sendemail.aliasfiletype:: Format of the file(s) specified in sendemail.aliasesfile. Must be one of 'mutt', 'mailrc', 'pine', or 'gnus'. +sendemail.to:: + Email address (or alias) to always send to. + sendemail.cccmd:: Command to execute to generate per patch file specific "Cc:"s. diff --git a/Documentation/git-tools.txt b/Documentation/git-tools.txt index 10653ff898..a96403cb8c 100644 --- a/Documentation/git-tools.txt +++ b/Documentation/git-tools.txt @@ -22,6 +22,9 @@ Alternative/Augmentative Porcelains providing generally smoother user experience than the "raw" Core GIT itself and indeed many other version control systems. + Cogito is no longer maintained as most of its functionality + is now in core GIT. + - *pg* (http://www.spearce.org/category/projects/scm/pg/) @@ -33,7 +36,7 @@ Alternative/Augmentative Porcelains - *StGit* (http://www.procode.org/stgit/) Stacked GIT provides a quilt-like patch management functionality in the - GIT environment. You can easily manage your patches in the scope of GIT + GIT environment. You can easily manage your patches in the scope of GIT until they get merged upstream. diff --git a/Documentation/git.txt b/Documentation/git.txt index ce8f923a15..6db7ae1ea7 100644 --- a/Documentation/git.txt +++ b/Documentation/git.txt @@ -46,10 +46,12 @@ Documentation for older releases are available here: * link:v1.5.3/git.html[documentation for release 1.5.3] * release notes for + link:RelNotes-1.5.3.5.txt[1.5.3.5], link:RelNotes-1.5.3.4.txt[1.5.3.4], link:RelNotes-1.5.3.3.txt[1.5.3.3], link:RelNotes-1.5.3.2.txt[1.5.3.2], - link:RelNotes-1.5.3.1.txt[1.5.3.1]. + link:RelNotes-1.5.3.1.txt[1.5.3.1], + link:RelNotes-1.5.3.txt[1.5.3]. * release notes for link:RelNotes-1.5.2.5.txt[1.5.2.5], diff --git a/Documentation/gitk.txt b/Documentation/gitk.txt index e9f82b97b9..8dbfb0d5a3 100644 --- a/Documentation/gitk.txt +++ b/Documentation/gitk.txt @@ -69,7 +69,7 @@ gitk --since="2 weeks ago" \-- gitk:: The "--" is necessary to avoid confusion with the *branch* named 'gitk' -gitk --max-count=100 --all -- Makefile:: +gitk --max-count=100 --all \-- Makefile:: Show at most 100 changes made to the file 'Makefile'. Instead of only looking for changes in the current branch look in all branches. @@ -38,6 +38,8 @@ all:: # # Define NO_SETENV if you don't have setenv in the C library. # +# Define NO_MKDTEMP if you don't have mkdtemp in the C library. +# # Define NO_SYMLINK_HEAD if you never want .git/HEAD to be a symbolic link. # Enable it on Windows. By default, symrefs are still used. # @@ -208,7 +210,6 @@ BASIC_LDFLAGS = SCRIPT_SH = \ git-bisect.sh git-checkout.sh \ git-clean.sh git-clone.sh git-commit.sh \ - git-fetch.sh \ git-ls-remote.sh \ git-merge-one-file.sh git-mergetool.sh git-parse-remote.sh \ git-pull.sh git-rebase.sh git-rebase--interactive.sh \ @@ -224,8 +225,7 @@ SCRIPT_SH = \ SCRIPT_PERL = \ git-add--interactive.perl \ git-archimport.perl git-cvsimport.perl git-relink.perl \ - git-cvsserver.perl git-remote.perl \ - git-svnimport.perl git-cvsexportcommit.perl \ + git-cvsserver.perl git-remote.perl git-cvsexportcommit.perl \ git-send-email.perl git-svn.perl SCRIPTS = $(patsubst %.sh,%,$(SCRIPT_SH)) \ @@ -235,14 +235,14 @@ SCRIPTS = $(patsubst %.sh,%,$(SCRIPT_SH)) \ # ... and all the rest that could be moved out of bindir to gitexecdir PROGRAMS = \ git-fetch-pack$X \ - git-hash-object$X git-index-pack$X git-local-fetch$X \ + git-hash-object$X git-index-pack$X \ git-fast-import$X \ git-daemon$X \ git-merge-index$X git-mktag$X git-mktree$X git-patch-id$X \ git-peek-remote$X git-receive-pack$X \ git-send-pack$X git-shell$X \ - git-show-index$X git-ssh-fetch$X \ - git-ssh-upload$X git-unpack-file$X \ + git-show-index$X \ + git-unpack-file$X \ git-update-server-info$X \ git-upload-pack$X \ git-pack-redundant$X git-var$X \ @@ -270,9 +270,6 @@ ifndef NO_TCLTK OTHER_PROGRAMS += gitk-wish endif -# Backward compatibility -- to be removed after 1.0 -PROGRAMS += git-ssh-pull$X git-ssh-push$X - # Set paths to tools early so that they can be used for version tests. ifndef SHELL_PATH SHELL_PATH = /bin/sh @@ -292,7 +289,7 @@ LIB_H = \ run-command.h strbuf.h tag.h tree.h git-compat-util.h revision.h \ tree-walk.h log-tree.h dir.h path-list.h unpack-trees.h builtin.h \ utf8.h reflog-walk.h patch-ids.h attr.h decorate.h progress.h \ - mailmap.h remote.h + mailmap.h remote.h transport.h diffcore.h hash.h DIFF_OBJS = \ diff.o diff-lib.o diffcore-break.o diffcore-order.o \ @@ -302,7 +299,7 @@ DIFF_OBJS = \ LIB_OBJS = \ blob.o commit.o connect.o csum-file.o cache-tree.o base85.o \ date.o diff-delta.o entry.o exec_cmd.o ident.o \ - interpolate.o \ + interpolate.o hash.o \ lockfile.o \ patch-ids.o \ object.o pack-check.o pack-write.o patch-delta.o path.o pkt-line.o \ @@ -314,7 +311,8 @@ LIB_OBJS = \ write_or_die.o trace.o list-objects.o grep.o match-trees.o \ alloc.o merge-file.o path-list.o help.o unpack-trees.o $(DIFF_OBJS) \ color.o wt-status.o archive-zip.o archive-tar.o shallow.o utf8.o \ - convert.o attr.o decorate.o progress.o mailmap.o symlinks.o remote.o + convert.o attr.o decorate.o progress.o mailmap.o symlinks.o remote.o \ + transport.o bundle.o walker.o BUILTIN_OBJS = \ builtin-add.o \ @@ -335,6 +333,8 @@ BUILTIN_OBJS = \ builtin-diff-files.o \ builtin-diff-index.o \ builtin-diff-tree.o \ + builtin-fetch.o \ + builtin-fetch-pack.o \ builtin-fetch--tool.o \ builtin-fmt-merge-msg.o \ builtin-for-each-ref.o \ @@ -416,12 +416,14 @@ ifeq ($(uname_S),SunOS) NEEDS_LIBICONV = YesPlease NO_UNSETENV = YesPlease NO_SETENV = YesPlease + NO_MKDTEMP = YesPlease NO_C99_FORMAT = YesPlease NO_STRTOUMAX = YesPlease endif ifeq ($(uname_R),5.9) NO_UNSETENV = YesPlease NO_SETENV = YesPlease + NO_MKDTEMP = YesPlease NO_C99_FORMAT = YesPlease NO_STRTOUMAX = YesPlease endif @@ -518,7 +520,9 @@ else CC_LD_DYNPATH = -R endif -ifndef NO_CURL +ifdef NO_CURL + BASIC_CFLAGS += -DNO_CURL +else ifdef CURLDIR # Try "-Wl,-rpath=$(CURLDIR)/$(lib)" in such a case. BASIC_CFLAGS += -I$(CURLDIR)/include @@ -526,7 +530,9 @@ ifndef NO_CURL else CURL_LIBCURL = -lcurl endif - PROGRAMS += git-http-fetch$X + BUILTIN_OBJS += builtin-http-fetch.o + EXTLIBS += $(CURL_LIBCURL) + LIB_OBJS += http.o http-walker.o curl_check := $(shell (echo 070908; curl-config --vernum) | sort -r | sed -ne 2p) ifeq "$(curl_check)" "070908" ifndef NO_EXPAT @@ -608,6 +614,10 @@ ifdef NO_SETENV COMPAT_CFLAGS += -DNO_SETENV COMPAT_OBJS += compat/setenv.o endif +ifdef NO_MKDTEMP + COMPAT_CFLAGS += -DNO_MKDTEMP + COMPAT_OBJS += compat/mkdtemp.o +endif ifdef NO_UNSETENV COMPAT_CFLAGS += -DNO_UNSETENV COMPAT_OBJS += compat/unsetenv.o @@ -801,7 +811,7 @@ $(patsubst %.sh,%,$(SCRIPT_SH)) : % : %.sh $(patsubst %.perl,%,$(SCRIPT_PERL)): perl/perl.mak -perl/perl.mak: GIT-CFLAGS +perl/perl.mak: GIT-CFLAGS perl/Makefile perl/Makefile.PL $(QUIET_SUBDIR0)perl $(QUIET_SUBDIR1) PERL_PATH='$(PERL_PATH_SQ)' prefix='$(prefix_SQ)' $(@F) $(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl @@ -889,35 +899,23 @@ http.o: http.c GIT-CFLAGS $(QUIET_CC)$(CC) -o $*.o -c $(ALL_CFLAGS) -DGIT_USER_AGENT='"git/$(GIT_VERSION)"' $< ifdef NO_EXPAT -http-fetch.o: http-fetch.c http.h GIT-CFLAGS +http-walker.o: http-walker.c http.h GIT-CFLAGS $(QUIET_CC)$(CC) -o $*.o -c $(ALL_CFLAGS) -DNO_EXPAT $< endif git-%$X: %.o $(GITLIBS) $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(LIBS) -ssh-pull.o: ssh-fetch.c -ssh-push.o: ssh-upload.c -git-local-fetch$X: fetch.o -git-ssh-fetch$X: rsh.o fetch.o -git-ssh-upload$X: rsh.o -git-ssh-pull$X: rsh.o fetch.o -git-ssh-push$X: rsh.o - git-imap-send$X: imap-send.o $(LIB_FILE) -http.o http-fetch.o http-push.o: http.h -git-http-fetch$X: fetch.o http.o http-fetch.o $(GITLIBS) - $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \ - $(LIBS) $(CURL_LIBCURL) $(EXPAT_LIBEXPAT) +http.o http-walker.o http-push.o: http.h git-http-push$X: revision.o http.o http-push.o $(GITLIBS) $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \ $(LIBS) $(CURL_LIBCURL) $(EXPAT_LIBEXPAT) -$(LIB_OBJS) $(BUILTIN_OBJS) fetch.o: $(LIB_H) +$(LIB_OBJS) $(BUILTIN_OBJS): $(LIB_H) $(patsubst git-%$X,%.o,$(PROGRAMS)): $(LIB_H) $(wildcard */*.h) -$(DIFF_OBJS): diffcore.h $(LIB_FILE): $(LIB_OBJS) $(QUIET_AR)$(RM) $@ && $(AR) rcs $@ $(LIB_OBJS) @@ -931,10 +929,6 @@ $(XDIFF_LIB): $(XDIFF_OBJS) $(QUIET_AR)$(RM) $@ && $(AR) rcs $@ $(XDIFF_OBJS) -perl/Makefile: perl/Git.pm perl/Makefile.PL GIT-CFLAGS - (cd perl && $(PERL_PATH) Makefile.PL \ - PREFIX='$(prefix_SQ)') - doc: $(MAKE) -C Documentation all @@ -1131,8 +1125,7 @@ check-docs:: git-merge-octopus | git-merge-ours | git-merge-recursive | \ git-merge-resolve | git-merge-stupid | \ git-add--interactive | git-fsck-objects | git-init-db | \ - git-repo-config | git-fetch--tool | \ - git-ssh-pull | git-ssh-push ) continue ;; \ + git-repo-config | git-fetch--tool ) continue ;; \ esac ; \ test -f "Documentation/$$v.txt" || \ echo "no doc: $$v"; \ @@ -209,8 +209,11 @@ static struct match_attr *parse_attr_line(const char *line, const char *src, num_attr = 0; cp = name + namelen; cp = cp + strspn(cp, blank); - while (*cp) + while (*cp) { cp = parse_attr(src, lineno, cp, &num_attr, res); + if (!cp) + return NULL; + } if (pass) break; res = xcalloc(1, diff --git a/builtin-add.c b/builtin-add.c index f9a65803d8..dbbb05215f 100644 --- a/builtin-add.c +++ b/builtin-add.c @@ -12,6 +12,7 @@ #include "diffcore.h" #include "commit.h" #include "revision.h" +#include "run-command.h" static const char builtin_add_usage[] = "git-add [-n] [-v] [-f] [--interactive | -i] [-u] [--refresh] [--] <filepattern>..."; @@ -44,6 +45,7 @@ static void prune_directory(struct dir_struct *dir, const char **pathspec, int p die("pathspec '%s' did not match any files", pathspec[i]); } + free(seen); } static void fill_directory(struct dir_struct *dir, const char **pathspec, @@ -106,7 +108,7 @@ static void update_callback(struct diff_queue_struct *q, } } -static void update(int verbose, const char *prefix, const char **files) +void add_files_to_cache(int verbose, const char *prefix, const char **files) { struct rev_info rev; init_revisions(&rev, prefix); @@ -115,8 +117,6 @@ static void update(int verbose, const char *prefix, const char **files) rev.diffopt.output_format = DIFF_FORMAT_CALLBACK; rev.diffopt.format_callback = update_callback; rev.diffopt.format_callback_data = &verbose; - if (read_cache() < 0) - die("index file corrupt"); run_diff_files(&rev, 0); } @@ -135,6 +135,7 @@ static void refresh(int verbose, const char **pathspec) if (!seen[i]) die("pathspec '%s' did not match any files", pathspec[i]); } + free(seen); } static int git_add_config(const char *var, const char *value) @@ -149,6 +150,13 @@ static int git_add_config(const char *var, const char *value) return git_default_config(var, value); } +int interactive_add(void) +{ + const char *argv[2] = { "add--interactive", NULL }; + + return run_command_v_opt(argv, RUN_GIT_CMD); +} + static struct lock_file lock_file; static const char ignore_error[] = @@ -168,12 +176,9 @@ int cmd_add(int argc, const char **argv, const char *prefix) add_interactive++; } if (add_interactive) { - const char *args[] = { "add--interactive", NULL }; - - if (add_interactive != 1 || argc != 2) + if (argc != 2) die("add --interactive does not take any parameters"); - execv_git_cmd(args); - exit(1); + exit(interactive_add()); } git_config(git_add_config); @@ -213,7 +218,9 @@ int cmd_add(int argc, const char **argv, const char *prefix) } if (take_worktree_changes) { - update(verbose, prefix, argv + i); + if (read_cache() < 0) + die("index file corrupt"); + add_files_to_cache(verbose, prefix, argv + i); goto finish; } diff --git a/builtin-apply.c b/builtin-apply.c index 05c6bc3592..8411b38c79 100644 --- a/builtin-apply.c +++ b/builtin-apply.c @@ -152,7 +152,7 @@ struct patch { unsigned int is_rename:1; struct fragment *fragments; char *result; - unsigned long resultsize; + size_t resultsize; char old_sha1_prefix[41]; char new_sha1_prefix[41]; struct patch *next; diff --git a/builtin-archive.c b/builtin-archive.c index 04385dea05..14a1b3077c 100644 --- a/builtin-archive.c +++ b/builtin-archive.c @@ -30,7 +30,7 @@ static int run_remote_archiver(const char *remote, int argc, { char *url, buf[LARGE_PACKET_MAX]; int fd[2], i, len, rv; - pid_t pid; + struct child_process *conn; const char *exec = "git-upload-archive"; int exec_at = 0; @@ -46,9 +46,7 @@ static int run_remote_archiver(const char *remote, int argc, } url = xstrdup(remote); - pid = git_connect(fd, url, exec, 0); - if (pid < 0) - return pid; + conn = git_connect(fd, url, exec, 0); for (i = 1; i < argc; i++) { if (i == exec_at) @@ -76,7 +74,7 @@ static int run_remote_archiver(const char *remote, int argc, rv = recv_sideband("archive", fd[0], 1, 2); close(fd[0]); close(fd[1]); - rv |= finish_connect(pid); + rv |= finish_connect(conn); return !!rv; } @@ -148,12 +146,14 @@ void *sha1_file_to_archive(const char *path, const unsigned char *sha1, buffer = read_sha1_file(sha1, type, sizep); if (buffer && S_ISREG(mode)) { struct strbuf buf; + size_t size = 0; strbuf_init(&buf, 0); strbuf_attach(&buf, buffer, *sizep, *sizep + 1); convert_to_working_tree(path, buf.buf, buf.len, &buf); convert_to_archive(path, buf.buf, buf.len, &buf, commit); - buffer = strbuf_detach(&buf, sizep); + buffer = strbuf_detach(&buf, &size); + *sizep = size; } return buffer; diff --git a/builtin-blame.c b/builtin-blame.c index e3112a2d5b..8432b823e6 100644 --- a/builtin-blame.c +++ b/builtin-blame.c @@ -2059,6 +2059,7 @@ static struct commit *fake_working_tree_commit(const char *path, const char *con if (strbuf_read(&buf, 0, 0) < 0) die("read error %s from stdin", strerror(errno)); } + convert_to_git(path, buf.buf, buf.len, &buf); origin->file.ptr = buf.buf; origin->file.size = buf.len; pretend_sha1_file(buf.buf, buf.len, OBJ_BLOB, origin->blob_sha1); diff --git a/builtin-bundle.c b/builtin-bundle.c index 1b650069c9..9f38e2176a 100644 --- a/builtin-bundle.c +++ b/builtin-bundle.c @@ -1,11 +1,6 @@ #include "builtin.h" #include "cache.h" -#include "object.h" -#include "commit.h" -#include "diff.h" -#include "revision.h" -#include "list-objects.h" -#include "run-command.h" +#include "bundle.h" /* * Basic handler for bundle files to connect repositories via sneakernet. @@ -16,355 +11,6 @@ static const char *bundle_usage="git-bundle (create <bundle> <git-rev-list args> | verify <bundle> | list-heads <bundle> [refname]... | unbundle <bundle> [refname]... )"; -static const char bundle_signature[] = "# v2 git bundle\n"; - -struct ref_list { - unsigned int nr, alloc; - struct ref_list_entry { - unsigned char sha1[20]; - char *name; - } *list; -}; - -static void add_to_ref_list(const unsigned char *sha1, const char *name, - struct ref_list *list) -{ - if (list->nr + 1 >= list->alloc) { - list->alloc = alloc_nr(list->nr + 1); - list->list = xrealloc(list->list, - list->alloc * sizeof(list->list[0])); - } - memcpy(list->list[list->nr].sha1, sha1, 20); - list->list[list->nr].name = xstrdup(name); - list->nr++; -} - -struct bundle_header { - struct ref_list prerequisites; - struct ref_list references; -}; - -/* returns an fd */ -static int read_header(const char *path, struct bundle_header *header) { - char buffer[1024]; - int fd; - long fpos; - FILE *ffd = fopen(path, "rb"); - - if (!ffd) - return error("could not open '%s'", path); - if (!fgets(buffer, sizeof(buffer), ffd) || - strcmp(buffer, bundle_signature)) { - fclose(ffd); - return error("'%s' does not look like a v2 bundle file", path); - } - while (fgets(buffer, sizeof(buffer), ffd) - && buffer[0] != '\n') { - int is_prereq = buffer[0] == '-'; - int offset = is_prereq ? 1 : 0; - int len = strlen(buffer); - unsigned char sha1[20]; - struct ref_list *list = is_prereq ? &header->prerequisites - : &header->references; - char delim; - - if (buffer[len - 1] == '\n') - buffer[len - 1] = '\0'; - if (get_sha1_hex(buffer + offset, sha1)) { - warning("unrecognized header: %s", buffer); - continue; - } - delim = buffer[40 + offset]; - if (!isspace(delim) && (delim != '\0' || !is_prereq)) - die ("invalid header: %s", buffer); - add_to_ref_list(sha1, isspace(delim) ? - buffer + 41 + offset : "", list); - } - fpos = ftell(ffd); - fclose(ffd); - fd = open(path, O_RDONLY); - if (fd < 0) - return error("could not open '%s'", path); - lseek(fd, fpos, SEEK_SET); - return fd; -} - -static int list_refs(struct ref_list *r, int argc, const char **argv) -{ - int i; - - for (i = 0; i < r->nr; i++) { - if (argc > 1) { - int j; - for (j = 1; j < argc; j++) - if (!strcmp(r->list[i].name, argv[j])) - break; - if (j == argc) - continue; - } - printf("%s %s\n", sha1_to_hex(r->list[i].sha1), - r->list[i].name); - } - return 0; -} - -#define PREREQ_MARK (1u<<16) - -static int verify_bundle(struct bundle_header *header, int verbose) -{ - /* - * Do fast check, then if any prereqs are missing then go line by line - * to be verbose about the errors - */ - struct ref_list *p = &header->prerequisites; - struct rev_info revs; - const char *argv[] = {NULL, "--all"}; - struct object_array refs; - struct commit *commit; - int i, ret = 0, req_nr; - const char *message = "Repository lacks these prerequisite commits:"; - - init_revisions(&revs, NULL); - for (i = 0; i < p->nr; i++) { - struct ref_list_entry *e = p->list + i; - struct object *o = parse_object(e->sha1); - if (o) { - o->flags |= PREREQ_MARK; - add_pending_object(&revs, o, e->name); - continue; - } - if (++ret == 1) - error(message); - error("%s %s", sha1_to_hex(e->sha1), e->name); - } - if (revs.pending.nr != p->nr) - return ret; - req_nr = revs.pending.nr; - setup_revisions(2, argv, &revs, NULL); - - memset(&refs, 0, sizeof(struct object_array)); - for (i = 0; i < revs.pending.nr; i++) { - struct object_array_entry *e = revs.pending.objects + i; - add_object_array(e->item, e->name, &refs); - } - - prepare_revision_walk(&revs); - - i = req_nr; - while (i && (commit = get_revision(&revs))) - if (commit->object.flags & PREREQ_MARK) - i--; - - for (i = 0; i < req_nr; i++) - if (!(refs.objects[i].item->flags & SHOWN)) { - if (++ret == 1) - error(message); - error("%s %s", sha1_to_hex(refs.objects[i].item->sha1), - refs.objects[i].name); - } - - for (i = 0; i < refs.nr; i++) - clear_commit_marks((struct commit *)refs.objects[i].item, -1); - - if (verbose) { - struct ref_list *r; - - r = &header->references; - printf("The bundle contains %d ref%s\n", - r->nr, (1 < r->nr) ? "s" : ""); - list_refs(r, 0, NULL); - r = &header->prerequisites; - printf("The bundle requires these %d ref%s\n", - r->nr, (1 < r->nr) ? "s" : ""); - list_refs(r, 0, NULL); - } - return ret; -} - -static int list_heads(struct bundle_header *header, int argc, const char **argv) -{ - return list_refs(&header->references, argc, argv); -} - -static int create_bundle(struct bundle_header *header, const char *path, - int argc, const char **argv) -{ - static struct lock_file lock; - int bundle_fd = -1; - int bundle_to_stdout; - const char **argv_boundary = xmalloc((argc + 4) * sizeof(const char *)); - const char **argv_pack = xmalloc(5 * sizeof(const char *)); - int i, ref_count = 0; - char buffer[1024]; - struct rev_info revs; - struct child_process rls; - FILE *rls_fout; - - bundle_to_stdout = !strcmp(path, "-"); - if (bundle_to_stdout) - bundle_fd = 1; - else - bundle_fd = hold_lock_file_for_update(&lock, path, 1); - - /* write signature */ - write_or_die(bundle_fd, bundle_signature, strlen(bundle_signature)); - - /* init revs to list objects for pack-objects later */ - save_commit_buffer = 0; - init_revisions(&revs, NULL); - - /* write prerequisites */ - memcpy(argv_boundary + 3, argv + 1, argc * sizeof(const char *)); - argv_boundary[0] = "rev-list"; - argv_boundary[1] = "--boundary"; - argv_boundary[2] = "--pretty=oneline"; - argv_boundary[argc + 2] = NULL; - memset(&rls, 0, sizeof(rls)); - rls.argv = argv_boundary; - rls.out = -1; - rls.git_cmd = 1; - if (start_command(&rls)) - return -1; - rls_fout = fdopen(rls.out, "r"); - while (fgets(buffer, sizeof(buffer), rls_fout)) { - unsigned char sha1[20]; - if (buffer[0] == '-') { - write_or_die(bundle_fd, buffer, strlen(buffer)); - if (!get_sha1_hex(buffer + 1, sha1)) { - struct object *object = parse_object(sha1); - object->flags |= UNINTERESTING; - add_pending_object(&revs, object, buffer); - } - } else if (!get_sha1_hex(buffer, sha1)) { - struct object *object = parse_object(sha1); - object->flags |= SHOWN; - } - } - fclose(rls_fout); - if (finish_command(&rls)) - return error("rev-list died"); - - /* write references */ - argc = setup_revisions(argc, argv, &revs, NULL); - if (argc > 1) - return error("unrecognized argument: %s'", argv[1]); - - for (i = 0; i < revs.pending.nr; i++) { - struct object_array_entry *e = revs.pending.objects + i; - unsigned char sha1[20]; - char *ref; - - if (e->item->flags & UNINTERESTING) - continue; - if (dwim_ref(e->name, strlen(e->name), sha1, &ref) != 1) - continue; - /* - * Make sure the refs we wrote out is correct; --max-count and - * other limiting options could have prevented all the tips - * from getting output. - * - * Non commit objects such as tags and blobs do not have - * this issue as they are not affected by those extra - * constraints. - */ - if (!(e->item->flags & SHOWN) && e->item->type == OBJ_COMMIT) { - warning("ref '%s' is excluded by the rev-list options", - e->name); - free(ref); - continue; - } - /* - * If you run "git bundle create bndl v1.0..v2.0", the - * name of the positive ref is "v2.0" but that is the - * commit that is referenced by the tag, and not the tag - * itself. - */ - if (hashcmp(sha1, e->item->sha1)) { - /* - * Is this the positive end of a range expressed - * in terms of a tag (e.g. v2.0 from the range - * "v1.0..v2.0")? - */ - struct commit *one = lookup_commit_reference(sha1); - struct object *obj; - - if (e->item == &(one->object)) { - /* - * Need to include e->name as an - * independent ref to the pack-objects - * input, so that the tag is included - * in the output; otherwise we would - * end up triggering "empty bundle" - * error. - */ - obj = parse_object(sha1); - obj->flags |= SHOWN; - add_pending_object(&revs, obj, e->name); - } - free(ref); - continue; - } - - ref_count++; - write_or_die(bundle_fd, sha1_to_hex(e->item->sha1), 40); - write_or_die(bundle_fd, " ", 1); - write_or_die(bundle_fd, ref, strlen(ref)); - write_or_die(bundle_fd, "\n", 1); - free(ref); - } - if (!ref_count) - die ("Refusing to create empty bundle."); - - /* end header */ - write_or_die(bundle_fd, "\n", 1); - - /* write pack */ - argv_pack[0] = "pack-objects"; - argv_pack[1] = "--all-progress"; - argv_pack[2] = "--stdout"; - argv_pack[3] = "--thin"; - argv_pack[4] = NULL; - memset(&rls, 0, sizeof(rls)); - rls.argv = argv_pack; - rls.in = -1; - rls.out = bundle_fd; - rls.git_cmd = 1; - if (start_command(&rls)) - return error("Could not spawn pack-objects"); - for (i = 0; i < revs.pending.nr; i++) { - struct object *object = revs.pending.objects[i].item; - if (object->flags & UNINTERESTING) - write(rls.in, "^", 1); - write(rls.in, sha1_to_hex(object->sha1), 40); - write(rls.in, "\n", 1); - } - if (finish_command(&rls)) - return error ("pack-objects died"); - close(bundle_fd); - if (!bundle_to_stdout) - commit_lock_file(&lock); - return 0; -} - -static int unbundle(struct bundle_header *header, int bundle_fd, - int argc, const char **argv) -{ - const char *argv_index_pack[] = {"index-pack", - "--fix-thin", "--stdin", NULL}; - struct child_process ip; - - if (verify_bundle(header, 0)) - return -1; - memset(&ip, 0, sizeof(ip)); - ip.argv = argv_index_pack; - ip.in = bundle_fd; - ip.no_stdout = 1; - ip.git_cmd = 1; - if (run_command(&ip)) - return error("index-pack died"); - return list_heads(header, argc, argv); -} - int cmd_bundle(int argc, const char **argv, const char *prefix) { struct bundle_header header; @@ -388,8 +34,8 @@ int cmd_bundle(int argc, const char **argv, const char *prefix) } memset(&header, 0, sizeof(header)); - if (strcmp(cmd, "create") && - (bundle_fd = read_header(bundle_file, &header)) < 0) + if (strcmp(cmd, "create") && (bundle_fd = + read_bundle_header(bundle_file, &header)) < 0) return 1; if (!strcmp(cmd, "verify")) { @@ -401,7 +47,7 @@ int cmd_bundle(int argc, const char **argv, const char *prefix) } if (!strcmp(cmd, "list-heads")) { close(bundle_fd); - return !!list_heads(&header, argc, argv); + return !!list_bundle_refs(&header, argc, argv); } if (!strcmp(cmd, "create")) { if (nongit) @@ -410,7 +56,8 @@ int cmd_bundle(int argc, const char **argv, const char *prefix) } else if (!strcmp(cmd, "unbundle")) { if (nongit) die("Need a repository to unbundle."); - return !!unbundle(&header, bundle_fd, argc, argv); + return !!unbundle(&header, bundle_fd) || + list_bundle_refs(&header, argc, argv); } else usage(bundle_usage); } diff --git a/builtin-config.c b/builtin-config.c index d98b6c2c4c..e5e243f27c 100644 --- a/builtin-config.c +++ b/builtin-config.c @@ -175,7 +175,10 @@ int cmd_config(int argc, const char **argv, const char *prefix) else if (!strcmp(argv[1], "--list") || !strcmp(argv[1], "-l")) { if (argc != 2) usage(git_config_set_usage); - return git_config(show_all_config); + if (git_config(show_all_config) < 0 && file && errno) + die("unable to read config file %s: %s", file, + strerror(errno)); + return 0; } else if (!strcmp(argv[1], "--global")) { char *home = getenv("HOME"); diff --git a/builtin-fetch--tool.c b/builtin-fetch--tool.c index 1e43d79221..6a78517958 100644 --- a/builtin-fetch--tool.c +++ b/builtin-fetch--tool.c @@ -25,7 +25,7 @@ static int update_ref_env(const char *action, unsigned char *oldval) { char msg[1024]; - char *rla = getenv("GIT_REFLOG_ACTION"); + const char *rla = getenv("GIT_REFLOG_ACTION"); if (!rla) rla = "(reflog update)"; @@ -61,7 +61,7 @@ static int update_local_ref(const char *name, } if (get_sha1(name, sha1_old)) { - char *msg; + const char *msg; just_store: /* new ref */ if (!strncmp(name, "refs/tags/", 10)) @@ -131,7 +131,7 @@ static int append_fetch_head(FILE *fp, if (get_sha1(head, sha1)) return error("Not a valid object name: %s", head); - commit = lookup_commit_reference(sha1); + commit = lookup_commit_reference_gently(sha1, 1); if (!commit) not_for_merge = 1; diff --git a/fetch-pack.c b/builtin-fetch-pack.c index 9c81305be5..862652be92 100644 --- a/fetch-pack.c +++ b/builtin-fetch-pack.c @@ -6,19 +6,18 @@ #include "exec_cmd.h" #include "pack.h" #include "sideband.h" +#include "fetch-pack.h" +#include "run-command.h" -static int keep_pack; static int transfer_unpack_limit = -1; static int fetch_unpack_limit = -1; static int unpack_limit = 100; -static int quiet; -static int verbose; -static int fetch_all; -static int depth; -static int no_progress; +static struct fetch_pack_args args = { + /* .uploadpack = */ "git-upload-pack", +}; + static const char fetch_pack_usage[] = "git-fetch-pack [--all] [--quiet|-q] [--keep|-k] [--thin] [--upload-pack=<git-upload-pack>] [--depth=<n>] [--no-progress] [-v] [<host>:]<directory> [<refs>...]"; -static const char *uploadpack = "git-upload-pack"; #define COMPLETE (1U << 0) #define COMMON (1U << 1) @@ -180,7 +179,7 @@ static int find_common(int fd[2], unsigned char *result_sha1, (use_sideband == 2 ? " side-band-64k" : ""), (use_sideband == 1 ? " side-band" : ""), (use_thin_pack ? " thin-pack" : ""), - (no_progress ? " no-progress" : ""), + (args.no_progress ? " no-progress" : ""), " ofs-delta"); else packet_write(fd[1], "want %s\n", sha1_to_hex(remote)); @@ -188,13 +187,13 @@ static int find_common(int fd[2], unsigned char *result_sha1, } if (is_repository_shallow()) write_shallow_commits(fd[1], 1); - if (depth > 0) - packet_write(fd[1], "deepen %d", depth); + if (args.depth > 0) + packet_write(fd[1], "deepen %d", args.depth); packet_flush(fd[1]); if (!fetching) return 1; - if (depth > 0) { + if (args.depth > 0) { char line[1024]; unsigned char sha1[20]; int len; @@ -225,7 +224,7 @@ static int find_common(int fd[2], unsigned char *result_sha1, retval = -1; while ((sha1 = get_rev())) { packet_write(fd[1], "have %s\n", sha1_to_hex(sha1)); - if (verbose) + if (args.verbose) fprintf(stderr, "have %s\n", sha1_to_hex(sha1)); in_vain++; if (!(31 & ++count)) { @@ -243,7 +242,7 @@ static int find_common(int fd[2], unsigned char *result_sha1, do { ack = get_ack(fd[0], result_sha1); - if (verbose && ack) + if (args.verbose && ack) fprintf(stderr, "got ack %d %s\n", ack, sha1_to_hex(result_sha1)); if (ack == 1) { @@ -262,7 +261,7 @@ static int find_common(int fd[2], unsigned char *result_sha1, } while (ack); flushes--; if (got_continue && MAX_IN_VAIN < in_vain) { - if (verbose) + if (args.verbose) fprintf(stderr, "giving up\n"); break; /* give up */ } @@ -270,7 +269,7 @@ static int find_common(int fd[2], unsigned char *result_sha1, } done: packet_write(fd[1], "done\n"); - if (verbose) + if (args.verbose) fprintf(stderr, "done\n"); if (retval != 0) { multi_ack = 0; @@ -279,7 +278,7 @@ done: while (flushes || multi_ack) { int ack = get_ack(fd[0], result_sha1); if (ack) { - if (verbose) + if (args.verbose) fprintf(stderr, "got ack (%d) %s\n", ack, sha1_to_hex(result_sha1)); if (ack == 1) @@ -316,7 +315,7 @@ static int mark_complete(const char *path, const unsigned char *sha1, int flag, static void mark_recent_complete_commits(unsigned long cutoff) { while (complete && cutoff <= complete->item->date) { - if (verbose) + if (args.verbose) fprintf(stderr, "Marking %s as complete\n", sha1_to_hex(complete->item->object.sha1)); pop_most_recent_commit(&complete, COMPLETE); @@ -331,7 +330,7 @@ static void filter_refs(struct ref **refs, int nr_match, char **match) struct ref *ref, *next; struct ref *fastarray[32]; - if (nr_match && !fetch_all) { + if (nr_match && !args.fetch_all) { if (ARRAY_SIZE(fastarray) < nr_match) return_refs = xcalloc(nr_match, sizeof(struct ref *)); else { @@ -347,8 +346,8 @@ static void filter_refs(struct ref **refs, int nr_match, char **match) if (!memcmp(ref->name, "refs/", 5) && check_ref_format(ref->name + 5)) ; /* trash */ - else if (fetch_all && - (!depth || prefixcmp(ref->name, "refs/tags/") )) { + else if (args.fetch_all && + (!args.depth || prefixcmp(ref->name, "refs/tags/") )) { *newtail = ref; ref->next = NULL; newtail = &ref->next; @@ -364,7 +363,7 @@ static void filter_refs(struct ref **refs, int nr_match, char **match) free(ref); } - if (!fetch_all) { + if (!args.fetch_all) { int i; for (i = 0; i < nr_match; i++) { ref = return_refs[i]; @@ -407,7 +406,7 @@ static int everything_local(struct ref **refs, int nr_match, char **match) } } - if (!depth) { + if (!args.depth) { for_each_ref(mark_complete, NULL); if (cutoff) mark_recent_complete_commits(cutoff); @@ -441,7 +440,7 @@ static int everything_local(struct ref **refs, int nr_match, char **match) o = lookup_object(remote); if (!o || !(o->flags & COMPLETE)) { retval = 0; - if (!verbose) + if (!args.verbose) continue; fprintf(stderr, "want %s (%s)\n", sha1_to_hex(remote), @@ -450,7 +449,7 @@ static int everything_local(struct ref **refs, int nr_match, char **match) } hashcpy(ref->new_sha1, local); - if (!verbose) + if (!args.verbose) continue; fprintf(stderr, "already have %s (%s)\n", sha1_to_hex(remote), @@ -459,55 +458,52 @@ static int everything_local(struct ref **refs, int nr_match, char **match) return retval; } -static pid_t setup_sideband(int fd[2], int xd[2]) +static int sideband_demux(int fd, void *data) { - pid_t side_pid; + int *xd = data; + + close(xd[1]); + return recv_sideband("fetch-pack", xd[0], fd, 2); +} +static void setup_sideband(int fd[2], int xd[2], struct async *demux) +{ if (!use_sideband) { fd[0] = xd[0]; fd[1] = xd[1]; - return 0; + return; } /* xd[] is talking with upload-pack; subprocess reads from * xd[0], spits out band#2 to stderr, and feeds us band#1 - * through our fd[0]. + * through demux->out. */ - if (pipe(fd) < 0) - die("fetch-pack: unable to set up pipe"); - side_pid = fork(); - if (side_pid < 0) + demux->proc = sideband_demux; + demux->data = xd; + if (start_async(demux)) die("fetch-pack: unable to fork off sideband demultiplexer"); - if (!side_pid) { - /* subprocess */ - close(fd[0]); - if (xd[0] != xd[1]) - close(xd[1]); - if (recv_sideband("fetch-pack", xd[0], fd[1], 2)) - exit(1); - exit(0); - } close(xd[0]); - close(fd[1]); + fd[0] = demux->out; fd[1] = xd[1]; - return side_pid; } -static int get_pack(int xd[2]) +static int get_pack(int xd[2], char **pack_lockfile) { - int status; - pid_t pid, side_pid; + struct async demux; int fd[2]; const char *argv[20]; char keep_arg[256]; char hdr_arg[256]; const char **av; - int do_keep = keep_pack; + int do_keep = args.keep_pack; + struct child_process cmd; - side_pid = setup_sideband(fd, xd); + setup_sideband(fd, xd, &demux); + memset(&cmd, 0, sizeof(cmd)); + cmd.argv = argv; av = argv; *hdr_arg = 0; - if (unpack_limit) { + if (!args.keep_pack && unpack_limit) { struct pack_header header; if (read_pack_header(fd[0], &header)) @@ -521,13 +517,15 @@ static int get_pack(int xd[2]) } if (do_keep) { + if (pack_lockfile) + cmd.out = -1; *av++ = "index-pack"; *av++ = "--stdin"; - if (!quiet && !no_progress) + if (!args.quiet && !args.no_progress) *av++ = "-v"; - if (use_thin_pack) + if (args.use_thin_pack) *av++ = "--fix-thin"; - if (keep_pack > 1 || unpack_limit) { + if (args.lock_pack || unpack_limit) { int s = sprintf(keep_arg, "--keep=fetch-pack %d on ", getpid()); if (gethostname(keep_arg + s, sizeof(keep_arg) - s)) @@ -537,43 +535,32 @@ static int get_pack(int xd[2]) } else { *av++ = "unpack-objects"; - if (quiet) + if (args.quiet) *av++ = "-q"; } if (*hdr_arg) *av++ = hdr_arg; *av++ = NULL; - pid = fork(); - if (pid < 0) + cmd.in = fd[0]; + cmd.git_cmd = 1; + if (start_command(&cmd)) die("fetch-pack: unable to fork off %s", argv[0]); - if (!pid) { - dup2(fd[0], 0); - close(fd[0]); - close(fd[1]); - execv_git_cmd(argv); - die("%s exec failed", argv[0]); - } - close(fd[0]); close(fd[1]); - while (waitpid(pid, &status, 0) < 0) { - if (errno != EINTR) - die("waiting for %s: %s", argv[0], strerror(errno)); - } - if (WIFEXITED(status)) { - int code = WEXITSTATUS(status); - if (code) - die("%s died with error code %d", argv[0], code); - return 0; - } - if (WIFSIGNALED(status)) { - int sig = WTERMSIG(status); - die("%s died of signal %d", argv[0], sig); - } - die("%s died of unnatural causes %d", argv[0], status); + if (do_keep && pack_lockfile) + *pack_lockfile = index_pack_lockfile(cmd.out); + + if (finish_command(&cmd)) + die("%s failed", argv[0]); + if (use_sideband && finish_async(&demux)) + die("error in sideband demultiplexer"); + return 0; } -static int fetch_pack(int fd[2], int nr_match, char **match) +static struct ref *do_fetch_pack(int fd[2], + int nr_match, + char **match, + char **pack_lockfile) { struct ref *ref; unsigned char sha1[20]; @@ -582,17 +569,17 @@ static int fetch_pack(int fd[2], int nr_match, char **match) if (is_repository_shallow() && !server_supports("shallow")) die("Server does not support shallow clients"); if (server_supports("multi_ack")) { - if (verbose) + if (args.verbose) fprintf(stderr, "Server supports multi_ack\n"); multi_ack = 1; } if (server_supports("side-band-64k")) { - if (verbose) + if (args.verbose) fprintf(stderr, "Server supports side-band-64k\n"); use_sideband = 2; } else if (server_supports("side-band")) { - if (verbose) + if (args.verbose) fprintf(stderr, "Server supports side-band\n"); use_sideband = 1; } @@ -605,22 +592,17 @@ static int fetch_pack(int fd[2], int nr_match, char **match) goto all_done; } if (find_common(fd, sha1, ref) < 0) - if (keep_pack != 1) + if (!args.keep_pack) /* When cloning, it is not unusual to have * no common commit. */ fprintf(stderr, "warning: no common commits\n"); - if (get_pack(fd)) + if (get_pack(fd, pack_lockfile)) die("git-fetch-pack: fetch failed."); all_done: - while (ref) { - printf("%s %s\n", - sha1_to_hex(ref->old_sha1), ref->name); - ref = ref->next; - } - return 0; + return ref; } static int remove_duplicates(int nr_heads, char **heads) @@ -642,7 +624,6 @@ static int remove_duplicates(int nr_heads, char **heads) heads[dst] = heads[src]; dst++; } - heads[dst] = 0; return dst; } @@ -663,85 +644,117 @@ static int fetch_pack_config(const char *var, const char *value) static struct lock_file lock; -int main(int argc, char **argv) +static void fetch_pack_setup(void) { - int i, ret, nr_heads; - char *dest = NULL, **heads; - int fd[2]; - pid_t pid; - struct stat st; - - setup_git_directory(); + static int did_setup; + if (did_setup) + return; git_config(fetch_pack_config); - if (0 <= transfer_unpack_limit) unpack_limit = transfer_unpack_limit; else if (0 <= fetch_unpack_limit) unpack_limit = fetch_unpack_limit; + did_setup = 1; +} + +int cmd_fetch_pack(int argc, const char **argv, const char *prefix) +{ + int i, ret, nr_heads; + struct ref *ref; + char *dest = NULL, **heads; nr_heads = 0; heads = NULL; for (i = 1; i < argc; i++) { - char *arg = argv[i]; + const char *arg = argv[i]; if (*arg == '-') { if (!prefixcmp(arg, "--upload-pack=")) { - uploadpack = arg + 14; + args.uploadpack = arg + 14; continue; } if (!prefixcmp(arg, "--exec=")) { - uploadpack = arg + 7; + args.uploadpack = arg + 7; continue; } if (!strcmp("--quiet", arg) || !strcmp("-q", arg)) { - quiet = 1; + args.quiet = 1; continue; } if (!strcmp("--keep", arg) || !strcmp("-k", arg)) { - keep_pack++; - unpack_limit = 0; + args.lock_pack = args.keep_pack; + args.keep_pack = 1; continue; } if (!strcmp("--thin", arg)) { - use_thin_pack = 1; + args.use_thin_pack = 1; continue; } if (!strcmp("--all", arg)) { - fetch_all = 1; + args.fetch_all = 1; continue; } if (!strcmp("-v", arg)) { - verbose = 1; + args.verbose = 1; continue; } if (!prefixcmp(arg, "--depth=")) { - depth = strtol(arg + 8, NULL, 0); - if (stat(git_path("shallow"), &st)) - st.st_mtime = 0; + args.depth = strtol(arg + 8, NULL, 0); continue; } if (!strcmp("--no-progress", arg)) { - no_progress = 1; + args.no_progress = 1; continue; } usage(fetch_pack_usage); } - dest = arg; - heads = argv + i + 1; + dest = (char *)arg; + heads = (char **)(argv + i + 1); nr_heads = argc - i - 1; break; } if (!dest) usage(fetch_pack_usage); - pid = git_connect(fd, dest, uploadpack, verbose ? CONNECT_VERBOSE : 0); - if (pid < 0) - return 1; + + ref = fetch_pack(&args, dest, nr_heads, heads, NULL); + ret = !ref; + + while (ref) { + printf("%s %s\n", + sha1_to_hex(ref->old_sha1), ref->name); + ref = ref->next; + } + + return ret; +} + +struct ref *fetch_pack(struct fetch_pack_args *my_args, + const char *dest, + int nr_heads, + char **heads, + char **pack_lockfile) +{ + int i, ret; + int fd[2]; + struct child_process *conn; + struct ref *ref; + struct stat st; + + fetch_pack_setup(); + memcpy(&args, my_args, sizeof(args)); + if (args.depth > 0) { + if (stat(git_path("shallow"), &st)) + st.st_mtime = 0; + } + + conn = git_connect(fd, (char *)dest, args.uploadpack, + args.verbose ? CONNECT_VERBOSE : 0); if (heads && nr_heads) nr_heads = remove_duplicates(nr_heads, heads); - ret = fetch_pack(fd, nr_heads, heads); + ref = do_fetch_pack(fd, nr_heads, heads, pack_lockfile); close(fd[0]); close(fd[1]); - ret |= finish_connect(pid); + ret = finish_connect(conn); if (!ret && nr_heads) { /* If the heads to pull were given, we should have @@ -756,7 +769,7 @@ int main(int argc, char **argv) } } - if (!ret && depth > 0) { + if (!ret && args.depth > 0) { struct cache_time mtime; char *shallow = git_path("shallow"); int fd; @@ -785,5 +798,8 @@ int main(int argc, char **argv) } } - return !!ret; + if (ret) + ref = NULL; + + return ref; } diff --git a/builtin-fetch.c b/builtin-fetch.c new file mode 100644 index 0000000000..003ed76d16 --- /dev/null +++ b/builtin-fetch.c @@ -0,0 +1,586 @@ +/* + * "git fetch" + */ +#include "cache.h" +#include "refs.h" +#include "commit.h" +#include "builtin.h" +#include "path-list.h" +#include "remote.h" +#include "transport.h" + +static const char fetch_usage[] = "git-fetch [-a | --append] [--upload-pack <upload-pack>] [-f | --force] [--no-tags] [-t | --tags] [-k | --keep] [-u | --update-head-ok] [--depth <depth>] [-v | --verbose] [<repository> <refspec>...]"; + +static int append, force, tags, no_tags, update_head_ok, verbose, quiet; +static char *default_rla = NULL; +static struct transport *transport; + +static void unlock_pack(void) +{ + if (transport) + transport_unlock_pack(transport); +} + +static void unlock_pack_on_signal(int signo) +{ + unlock_pack(); + signal(SIGINT, SIG_DFL); + raise(signo); +} + +static void add_merge_config(struct ref **head, + struct ref *remote_refs, + struct branch *branch, + struct ref ***tail) +{ + int i; + + for (i = 0; i < branch->merge_nr; i++) { + struct ref *rm, **old_tail = *tail; + struct refspec refspec; + + for (rm = *head; rm; rm = rm->next) { + if (branch_merge_matches(branch, i, rm->name)) { + rm->merge = 1; + break; + } + } + if (rm) + continue; + + /* + * Not fetched to a tracking branch? We need to fetch + * it anyway to allow this branch's "branch.$name.merge" + * to be honored by git-pull, but we do not have to + * fail if branch.$name.merge is misconfigured to point + * at a nonexisting branch. If we were indeed called by + * git-pull, it will notice the misconfiguration because + * there is no entry in the resulting FETCH_HEAD marked + * for merging. + */ + refspec.src = branch->merge[i]->src; + refspec.dst = NULL; + refspec.pattern = 0; + refspec.force = 0; + get_fetch_map(remote_refs, &refspec, tail, 1); + for (rm = *old_tail; rm; rm = rm->next) + rm->merge = 1; + } +} + +static struct ref *get_ref_map(struct transport *transport, + struct refspec *refs, int ref_count, int tags, + int *autotags) +{ + int i; + struct ref *rm; + struct ref *ref_map = NULL; + struct ref **tail = &ref_map; + + struct ref *remote_refs = transport_get_remote_refs(transport); + + if (ref_count || tags) { + for (i = 0; i < ref_count; i++) { + get_fetch_map(remote_refs, &refs[i], &tail, 0); + if (refs[i].dst && refs[i].dst[0]) + *autotags = 1; + } + /* Merge everything on the command line, but not --tags */ + for (rm = ref_map; rm; rm = rm->next) + rm->merge = 1; + if (tags) { + struct refspec refspec; + refspec.src = "refs/tags/"; + refspec.dst = "refs/tags/"; + refspec.pattern = 1; + refspec.force = 0; + get_fetch_map(remote_refs, &refspec, &tail, 0); + } + } else { + /* Use the defaults */ + struct remote *remote = transport->remote; + struct branch *branch = branch_get(NULL); + int has_merge = branch_has_merge_config(branch); + if (remote && (remote->fetch_refspec_nr || has_merge)) { + for (i = 0; i < remote->fetch_refspec_nr; i++) { + get_fetch_map(remote_refs, &remote->fetch[i], &tail, 0); + if (remote->fetch[i].dst && + remote->fetch[i].dst[0]) + *autotags = 1; + if (!i && !has_merge && ref_map && + !remote->fetch[0].pattern) + ref_map->merge = 1; + } + /* + * if the remote we're fetching from is the same + * as given in branch.<name>.remote, we add the + * ref given in branch.<name>.merge, too. + */ + if (has_merge && + !strcmp(branch->remote_name, remote->name)) + add_merge_config(&ref_map, remote_refs, branch, &tail); + } else { + ref_map = get_remote_ref(remote_refs, "HEAD"); + if (!ref_map) + die("Couldn't find remote ref HEAD"); + ref_map->merge = 1; + } + } + ref_remove_duplicates(ref_map); + + return ref_map; +} + +static void show_new(enum object_type type, unsigned char *sha1_new) +{ + fprintf(stderr, " %s: %s\n", typename(type), + find_unique_abbrev(sha1_new, DEFAULT_ABBREV)); +} + +static int s_update_ref(const char *action, + struct ref *ref, + int check_old) +{ + char msg[1024]; + char *rla = getenv("GIT_REFLOG_ACTION"); + static struct ref_lock *lock; + + if (!rla) + rla = default_rla; + snprintf(msg, sizeof(msg), "%s: %s", rla, action); + lock = lock_any_ref_for_update(ref->name, + check_old ? ref->old_sha1 : NULL, 0); + if (!lock) + return 1; + if (write_ref_sha1(lock, ref->new_sha1, msg) < 0) + return 1; + return 0; +} + +static int update_local_ref(struct ref *ref, + const char *note, + int verbose) +{ + char oldh[41], newh[41]; + struct commit *current = NULL, *updated; + enum object_type type; + struct branch *current_branch = branch_get(NULL); + + type = sha1_object_info(ref->new_sha1, NULL); + if (type < 0) + die("object %s not found", sha1_to_hex(ref->new_sha1)); + + if (!*ref->name) { + /* Not storing */ + if (verbose) { + fprintf(stderr, "* fetched %s\n", note); + show_new(type, ref->new_sha1); + } + return 0; + } + + if (!hashcmp(ref->old_sha1, ref->new_sha1)) { + if (verbose) { + fprintf(stderr, "* %s: same as %s\n", + ref->name, note); + show_new(type, ref->new_sha1); + } + return 0; + } + + if (current_branch && + !strcmp(ref->name, current_branch->name) && + !(update_head_ok || is_bare_repository()) && + !is_null_sha1(ref->old_sha1)) { + /* + * If this is the head, and it's not okay to update + * the head, and the old value of the head isn't empty... + */ + fprintf(stderr, + " * %s: Cannot fetch into the current branch.\n", + ref->name); + return 1; + } + + if (!is_null_sha1(ref->old_sha1) && + !prefixcmp(ref->name, "refs/tags/")) { + fprintf(stderr, "* %s: updating with %s\n", + ref->name, note); + show_new(type, ref->new_sha1); + return s_update_ref("updating tag", ref, 0); + } + + current = lookup_commit_reference_gently(ref->old_sha1, 1); + updated = lookup_commit_reference_gently(ref->new_sha1, 1); + if (!current || !updated) { + char *msg; + if (!strncmp(ref->name, "refs/tags/", 10)) + msg = "storing tag"; + else + msg = "storing head"; + fprintf(stderr, "* %s: storing %s\n", + ref->name, note); + show_new(type, ref->new_sha1); + return s_update_ref(msg, ref, 0); + } + + strcpy(oldh, find_unique_abbrev(current->object.sha1, DEFAULT_ABBREV)); + strcpy(newh, find_unique_abbrev(ref->new_sha1, DEFAULT_ABBREV)); + + if (in_merge_bases(current, &updated, 1)) { + fprintf(stderr, "* %s: fast forward to %s\n", + ref->name, note); + fprintf(stderr, " old..new: %s..%s\n", oldh, newh); + return s_update_ref("fast forward", ref, 1); + } + if (!force && !ref->force) { + fprintf(stderr, + "* %s: not updating to non-fast forward %s\n", + ref->name, note); + fprintf(stderr, + " old...new: %s...%s\n", oldh, newh); + return 1; + } + fprintf(stderr, + "* %s: forcing update to non-fast forward %s\n", + ref->name, note); + fprintf(stderr, " old...new: %s...%s\n", oldh, newh); + return s_update_ref("forced-update", ref, 1); +} + +static void store_updated_refs(const char *url, struct ref *ref_map) +{ + FILE *fp; + struct commit *commit; + int url_len, i, note_len; + char note[1024]; + const char *what, *kind; + struct ref *rm; + + fp = fopen(git_path("FETCH_HEAD"), "a"); + for (rm = ref_map; rm; rm = rm->next) { + struct ref *ref = NULL; + + if (rm->peer_ref) { + ref = xcalloc(1, sizeof(*ref) + strlen(rm->peer_ref->name) + 1); + strcpy(ref->name, rm->peer_ref->name); + hashcpy(ref->old_sha1, rm->peer_ref->old_sha1); + hashcpy(ref->new_sha1, rm->old_sha1); + ref->force = rm->peer_ref->force; + } + + commit = lookup_commit_reference_gently(rm->old_sha1, 1); + if (!commit) + rm->merge = 0; + + if (!strcmp(rm->name, "HEAD")) { + kind = ""; + what = ""; + } + else if (!prefixcmp(rm->name, "refs/heads/")) { + kind = "branch"; + what = rm->name + 11; + } + else if (!prefixcmp(rm->name, "refs/tags/")) { + kind = "tag"; + what = rm->name + 10; + } + else if (!prefixcmp(rm->name, "refs/remotes/")) { + kind = "remote branch"; + what = rm->name + 13; + } + else { + kind = ""; + what = rm->name; + } + + url_len = strlen(url); + for (i = url_len - 1; url[i] == '/' && 0 <= i; i--) + ; + url_len = i + 1; + if (4 < i && !strncmp(".git", url + i - 3, 4)) + url_len = i - 3; + + note_len = 0; + if (*what) { + if (*kind) + note_len += sprintf(note + note_len, "%s ", + kind); + note_len += sprintf(note + note_len, "'%s' of ", what); + } + note_len += sprintf(note + note_len, "%.*s", url_len, url); + fprintf(fp, "%s\t%s\t%s\n", + sha1_to_hex(commit ? commit->object.sha1 : + rm->old_sha1), + rm->merge ? "" : "not-for-merge", + note); + + if (ref) + update_local_ref(ref, note, verbose); + } + fclose(fp); +} + +static int fetch_refs(struct transport *transport, struct ref *ref_map) +{ + int ret = transport_fetch_refs(transport, ref_map); + if (!ret) + store_updated_refs(transport->url, ref_map); + transport_unlock_pack(transport); + return ret; +} + +static int add_existing(const char *refname, const unsigned char *sha1, + int flag, void *cbdata) +{ + struct path_list *list = (struct path_list *)cbdata; + path_list_insert(refname, list); + return 0; +} + +static struct ref *find_non_local_tags(struct transport *transport, + struct ref *fetch_map) +{ + static struct path_list existing_refs = { NULL, 0, 0, 0 }; + struct path_list new_refs = { NULL, 0, 0, 1 }; + char *ref_name; + int ref_name_len; + unsigned char *ref_sha1; + struct ref *tag_ref; + struct ref *rm = NULL; + struct ref *ref_map = NULL; + struct ref **tail = &ref_map; + struct ref *ref; + + for_each_ref(add_existing, &existing_refs); + for (ref = transport_get_remote_refs(transport); ref; ref = ref->next) { + if (prefixcmp(ref->name, "refs/tags")) + continue; + + ref_name = xstrdup(ref->name); + ref_name_len = strlen(ref_name); + ref_sha1 = ref->old_sha1; + + if (!strcmp(ref_name + ref_name_len - 3, "^{}")) { + ref_name[ref_name_len - 3] = 0; + tag_ref = transport_get_remote_refs(transport); + while (tag_ref) { + if (!strcmp(tag_ref->name, ref_name)) { + ref_sha1 = tag_ref->old_sha1; + break; + } + tag_ref = tag_ref->next; + } + } + + if (!path_list_has_path(&existing_refs, ref_name) && + !path_list_has_path(&new_refs, ref_name) && + lookup_object(ref->old_sha1)) { + fprintf(stderr, "Auto-following %s\n", + ref_name); + + path_list_insert(ref_name, &new_refs); + + rm = alloc_ref(strlen(ref_name) + 1); + strcpy(rm->name, ref_name); + rm->peer_ref = alloc_ref(strlen(ref_name) + 1); + strcpy(rm->peer_ref->name, ref_name); + hashcpy(rm->old_sha1, ref_sha1); + + *tail = rm; + tail = &rm->next; + } + free(ref_name); + } + + return ref_map; +} + +static int do_fetch(struct transport *transport, + struct refspec *refs, int ref_count) +{ + struct ref *ref_map, *fetch_map; + struct ref *rm; + int autotags = (transport->remote->fetch_tags == 1); + if (transport->remote->fetch_tags == 2 && !no_tags) + tags = 1; + if (transport->remote->fetch_tags == -1) + no_tags = 1; + + if (!transport->get_refs_list || !transport->fetch) + die("Don't know how to fetch from %s", transport->url); + + /* if not appending, truncate FETCH_HEAD */ + if (!append) + fclose(fopen(git_path("FETCH_HEAD"), "w")); + + ref_map = get_ref_map(transport, refs, ref_count, tags, &autotags); + + for (rm = ref_map; rm; rm = rm->next) { + if (rm->peer_ref) + read_ref(rm->peer_ref->name, rm->peer_ref->old_sha1); + } + + if (fetch_refs(transport, ref_map)) { + free_refs(ref_map); + return 1; + } + + fetch_map = ref_map; + + /* if neither --no-tags nor --tags was specified, do automated tag + * following ... */ + if (!(tags || no_tags) && autotags) { + ref_map = find_non_local_tags(transport, fetch_map); + if (ref_map) { + transport_set_option(transport, TRANS_OPT_DEPTH, "0"); + fetch_refs(transport, ref_map); + } + free_refs(ref_map); + } + + free_refs(fetch_map); + + return 0; +} + +static void set_option(const char *name, const char *value) +{ + int r = transport_set_option(transport, name, value); + if (r < 0) + die("Option \"%s\" value \"%s\" is not valid for %s\n", + name, value, transport->url); + if (r > 0) + warning("Option \"%s\" is ignored for %s\n", + name, transport->url); +} + +int cmd_fetch(int argc, const char **argv, const char *prefix) +{ + struct remote *remote; + int i, j, rla_offset; + static const char **refs = NULL; + int ref_nr = 0; + int cmd_len = 0; + const char *depth = NULL, *upload_pack = NULL; + int keep = 0; + + for (i = 1; i < argc; i++) { + const char *arg = argv[i]; + cmd_len += strlen(arg); + + if (arg[0] != '-') + break; + if (!strcmp(arg, "--append") || !strcmp(arg, "-a")) { + append = 1; + continue; + } + if (!prefixcmp(arg, "--upload-pack=")) { + upload_pack = arg + 14; + continue; + } + if (!strcmp(arg, "--upload-pack")) { + i++; + if (i == argc) + usage(fetch_usage); + upload_pack = argv[i]; + continue; + } + if (!strcmp(arg, "--force") || !strcmp(arg, "-f")) { + force = 1; + continue; + } + if (!strcmp(arg, "--no-tags")) { + no_tags = 1; + continue; + } + if (!strcmp(arg, "--tags") || !strcmp(arg, "-t")) { + tags = 1; + continue; + } + if (!strcmp(arg, "--keep") || !strcmp(arg, "-k")) { + keep = 1; + continue; + } + if (!strcmp(arg, "--update-head-ok") || !strcmp(arg, "-u")) { + update_head_ok = 1; + continue; + } + if (!prefixcmp(arg, "--depth=")) { + depth = arg + 8; + continue; + } + if (!strcmp(arg, "--depth")) { + i++; + if (i == argc) + usage(fetch_usage); + depth = argv[i]; + continue; + } + if (!strcmp(arg, "--quiet")) { + quiet = 1; + continue; + } + if (!strcmp(arg, "--verbose") || !strcmp(arg, "-v")) { + verbose++; + continue; + } + usage(fetch_usage); + } + + for (j = i; j < argc; j++) + cmd_len += strlen(argv[j]); + + default_rla = xmalloc(cmd_len + 5 + argc + 1); + sprintf(default_rla, "fetch"); + rla_offset = strlen(default_rla); + for (j = 1; j < argc; j++) { + sprintf(default_rla + rla_offset, " %s", argv[j]); + rla_offset += strlen(argv[j]) + 1; + } + + if (i == argc) + remote = remote_get(NULL); + else + remote = remote_get(argv[i++]); + + transport = transport_get(remote, remote->url[0]); + if (verbose >= 2) + transport->verbose = 1; + if (quiet) + transport->verbose = -1; + if (upload_pack) + set_option(TRANS_OPT_UPLOADPACK, upload_pack); + if (keep) + set_option(TRANS_OPT_KEEP, "yes"); + if (depth) + set_option(TRANS_OPT_DEPTH, depth); + + if (!transport->url) + die("Where do you want to fetch from today?"); + + if (i < argc) { + int j = 0; + refs = xcalloc(argc - i + 1, sizeof(const char *)); + while (i < argc) { + if (!strcmp(argv[i], "tag")) { + char *ref; + i++; + ref = xmalloc(strlen(argv[i]) * 2 + 22); + strcpy(ref, "refs/tags/"); + strcat(ref, argv[i]); + strcat(ref, ":refs/tags/"); + strcat(ref, argv[i]); + refs[j++] = ref; + } else + refs[j++] = argv[i]; + i++; + } + refs[j] = NULL; + ref_nr = j; + } + + signal(SIGINT, unlock_pack_on_signal); + atexit(unlock_pack); + return do_fetch(transport, parse_ref_spec(ref_nr, refs), ref_nr); +} diff --git a/builtin-gc.c b/builtin-gc.c index 956c32d1af..3a2ca4f901 100644 --- a/builtin-gc.c +++ b/builtin-gc.c @@ -205,6 +205,10 @@ int cmd_gc(int argc, const char **argv, const char *prefix) prune = 0; if (!need_to_gc()) return 0; + fprintf(stderr, "Packing your repository for optimum " + "performance. You may also\n" + "run \"git gc\" manually. See " + "\"git help gc\" for more information.\n"); } else { /* * Use safer (for shared repos) "-A" option to diff --git a/builtin-http-fetch.c b/builtin-http-fetch.c new file mode 100644 index 0000000000..4a50dbd95b --- /dev/null +++ b/builtin-http-fetch.c @@ -0,0 +1,77 @@ +#include "cache.h" +#include "walker.h" + +int cmd_http_fetch(int argc, const char **argv, const char *prefix) +{ + struct walker *walker; + int commits_on_stdin = 0; + int commits; + const char **write_ref = NULL; + char **commit_id; + const char *url; + int arg = 1; + int rc = 0; + int get_tree = 0; + int get_history = 0; + int get_all = 0; + int get_verbosely = 0; + int get_recover = 0; + + git_config(git_default_config); + + while (arg < argc && argv[arg][0] == '-') { + if (argv[arg][1] == 't') { + get_tree = 1; + } else if (argv[arg][1] == 'c') { + get_history = 1; + } else if (argv[arg][1] == 'a') { + get_all = 1; + get_tree = 1; + get_history = 1; + } else if (argv[arg][1] == 'v') { + get_verbosely = 1; + } else if (argv[arg][1] == 'w') { + write_ref = &argv[arg + 1]; + arg++; + } else if (!strcmp(argv[arg], "--recover")) { + get_recover = 1; + } else if (!strcmp(argv[arg], "--stdin")) { + commits_on_stdin = 1; + } + arg++; + } + if (argc < arg + 2 - commits_on_stdin) { + usage("git-http-fetch [-c] [-t] [-a] [-v] [--recover] [-w ref] [--stdin] commit-id url"); + return 1; + } + if (commits_on_stdin) { + commits = walker_targets_stdin(&commit_id, &write_ref); + } else { + commit_id = (char **) &argv[arg++]; + commits = 1; + } + url = argv[arg]; + + walker = get_http_walker(url); + walker->get_tree = get_tree; + walker->get_history = get_history; + walker->get_all = get_all; + walker->get_verbosely = get_verbosely; + walker->get_recover = get_recover; + + rc = walker_fetch(walker, commits, commit_id, write_ref, url); + + if (commits_on_stdin) + walker_targets_free(commits, commit_id, write_ref); + + if (walker->corrupt_object_found) { + fprintf(stderr, +"Some loose object were found to be corrupt, but they might be just\n" +"a false '404 Not Found' error message sent with incorrect HTTP\n" +"status code. Suggest running git-fsck.\n"); + } + + walker_free(walker); + + return rc; +} diff --git a/builtin-mailinfo.c b/builtin-mailinfo.c index d7cb11dc0d..fb12248f82 100644 --- a/builtin-mailinfo.c +++ b/builtin-mailinfo.c @@ -288,7 +288,7 @@ static void cleanup_space(char *buf) } static void decode_header(char *it, unsigned itsize); -static char *header[MAX_HDR_PARSED] = { +static const char *header[MAX_HDR_PARSED] = { "From","Subject","Date", }; diff --git a/builtin-push.c b/builtin-push.c index 141380b852..4b39ef3852 100644 --- a/builtin-push.c +++ b/builtin-push.c @@ -6,10 +6,11 @@ #include "run-command.h" #include "builtin.h" #include "remote.h" +#include "transport.h" static const char push_usage[] = "git-push [--all] [--dry-run] [--tags] [--receive-pack=<git-receive-pack>] [--repo=all] [-f | --force] [-v] [<repository> <refspec>...]"; -static int all, dry_run, force, thin, verbose; +static int thin, verbose; static const char *receivepack; static const char **refspec; @@ -43,82 +44,40 @@ static void set_refspecs(const char **refs, int nr) } } -static int do_push(const char *repo) +static int do_push(const char *repo, int flags) { int i, errs; - int common_argc; - const char **argv; - int argc; struct remote *remote = remote_get(repo); if (!remote) die("bad repository '%s'", repo); - if (remote->receivepack) { - char *rp = xmalloc(strlen(remote->receivepack) + 16); - sprintf(rp, "--receive-pack=%s", remote->receivepack); - receivepack = rp; - } - if (!refspec && !all && remote->push_refspec_nr) { + if (!refspec + && !(flags & TRANSPORT_PUSH_ALL) + && remote->push_refspec_nr) { refspec = remote->push_refspec; refspec_nr = remote->push_refspec_nr; } - - argv = xmalloc((refspec_nr + 10) * sizeof(char *)); - argv[0] = "dummy-send-pack"; - argc = 1; - if (all) - argv[argc++] = "--all"; - if (dry_run) - argv[argc++] = "--dry-run"; - if (force) - argv[argc++] = "--force"; - if (receivepack) - argv[argc++] = receivepack; - common_argc = argc; - errs = 0; - for (i = 0; i < remote->uri_nr; i++) { + for (i = 0; i < remote->url_nr; i++) { + struct transport *transport = + transport_get(remote, remote->url[i]); int err; - int dest_argc = common_argc; - int dest_refspec_nr = refspec_nr; - const char **dest_refspec = refspec; - const char *dest = remote->uri[i]; - const char *sender = "send-pack"; - if (!prefixcmp(dest, "http://") || - !prefixcmp(dest, "https://")) - sender = "http-push"; - else { - char *rem = xmalloc(strlen(remote->name) + 10); - sprintf(rem, "--remote=%s", remote->name); - argv[dest_argc++] = rem; - if (thin) - argv[dest_argc++] = "--thin"; - } - argv[0] = sender; - argv[dest_argc++] = dest; - while (dest_refspec_nr--) - argv[dest_argc++] = *dest_refspec++; - argv[dest_argc] = NULL; + if (receivepack) + transport_set_option(transport, + TRANS_OPT_RECEIVEPACK, receivepack); + if (thin) + transport_set_option(transport, TRANS_OPT_THIN, "yes"); + if (verbose) - fprintf(stderr, "Pushing to %s\n", dest); - err = run_command_v_opt(argv, RUN_GIT_CMD); + fprintf(stderr, "Pushing to %s\n", remote->url[i]); + err = transport_push(transport, refspec_nr, refspec, flags); + err |= transport_disconnect(transport); + if (!err) continue; - error("failed to push to '%s'", remote->uri[i]); - switch (err) { - case -ERR_RUN_COMMAND_FORK: - error("unable to fork for %s", sender); - case -ERR_RUN_COMMAND_EXEC: - error("unable to exec %s", sender); - break; - case -ERR_RUN_COMMAND_WAITPID: - case -ERR_RUN_COMMAND_WAITPID_WRONG_PID: - case -ERR_RUN_COMMAND_WAITPID_SIGNAL: - case -ERR_RUN_COMMAND_WAITPID_NOEXIT: - error("%s died with strange error", sender); - } + error("failed to push to '%s'", remote->url[i]); errs++; } return !!errs; @@ -127,6 +86,7 @@ static int do_push(const char *repo) int cmd_push(int argc, const char **argv, const char *prefix) { int i; + int flags = 0; const char *repo = NULL; /* default repository */ for (i = 1; i < argc; i++) { @@ -146,11 +106,11 @@ int cmd_push(int argc, const char **argv, const char *prefix) continue; } if (!strcmp(arg, "--all")) { - all = 1; + flags |= TRANSPORT_PUSH_ALL; continue; } if (!strcmp(arg, "--dry-run")) { - dry_run = 1; + flags |= TRANSPORT_PUSH_DRY_RUN; continue; } if (!strcmp(arg, "--tags")) { @@ -158,7 +118,7 @@ int cmd_push(int argc, const char **argv, const char *prefix) continue; } if (!strcmp(arg, "--force") || !strcmp(arg, "-f")) { - force = 1; + flags |= TRANSPORT_PUSH_FORCE; continue; } if (!strcmp(arg, "--thin")) { @@ -170,18 +130,18 @@ int cmd_push(int argc, const char **argv, const char *prefix) continue; } if (!prefixcmp(arg, "--receive-pack=")) { - receivepack = arg; + receivepack = arg + 15; continue; } if (!prefixcmp(arg, "--exec=")) { - receivepack = arg; + receivepack = arg + 7; continue; } usage(push_usage); } set_refspecs(argv + i, argc - i); - if (all && refspec) + if ((flags & TRANSPORT_PUSH_ALL) && refspec) usage(push_usage); - return do_push(repo); + return do_push(repo, flags); } diff --git a/builtin-rerere.c b/builtin-rerere.c index b8206744c1..74493237c9 100644 --- a/builtin-rerere.c +++ b/builtin-rerere.c @@ -389,18 +389,39 @@ static int is_rerere_enabled(void) return 1; } -int cmd_rerere(int argc, const char **argv, const char *prefix) +static int setup_rerere(struct path_list *merge_rr) { - struct path_list merge_rr = { NULL, 0, 0, 1 }; - int i, fd = -1; + int fd; git_config(git_rerere_config); if (!is_rerere_enabled()) - return 0; + return -1; merge_rr_path = xstrdup(git_path("rr-cache/MERGE_RR")); fd = hold_lock_file_for_update(&write_lock, merge_rr_path, 1); - read_rr(&merge_rr); + read_rr(merge_rr); + return fd; +} + +int rerere(void) +{ + struct path_list merge_rr = { NULL, 0, 0, 1 }; + int fd; + + fd = setup_rerere(&merge_rr); + if (fd < 0) + return 0; + return do_plain_rerere(&merge_rr, fd); +} + +int cmd_rerere(int argc, const char **argv, const char *prefix) +{ + struct path_list merge_rr = { NULL, 0, 0, 1 }; + int i, fd; + + fd = setup_rerere(&merge_rr); + if (fd < 0) + return 0; if (argc < 2) return do_plain_rerere(&merge_rr, fd); diff --git a/builtin-reset.c b/builtin-reset.c index 99d5c082a6..e1dc31e0eb 100644 --- a/builtin-reset.c +++ b/builtin-reset.c @@ -169,7 +169,7 @@ static void prepend_reflog_action(const char *action, char *buf, size_t size) } enum reset_type { MIXED, SOFT, HARD, NONE }; -static char *reset_type_names[] = { "mixed", "soft", "hard", NULL }; +static const char *reset_type_names[] = { "mixed", "soft", "hard", NULL }; int cmd_reset(int argc, const char **argv, const char *prefix) { diff --git a/builtin-rev-list.c b/builtin-rev-list.c index 33726b8d84..44393320e8 100644 --- a/builtin-rev-list.c +++ b/builtin-rev-list.c @@ -9,6 +9,7 @@ #include "revision.h" #include "list-objects.h" #include "builtin.h" +#include "log-tree.h" /* bits #0-15 in revision.h */ @@ -38,7 +39,8 @@ static const char rev_list_usage[] = " --left-right\n" " special purpose:\n" " --bisect\n" -" --bisect-vars" +" --bisect-vars\n" +" --bisect-all" ; static struct rev_info revs; @@ -74,6 +76,7 @@ static void show_commit(struct commit *commit) parents = parents->next; } } + show_decorations(commit); if (revs.commit_format == CMIT_FMT_ONELINE) putchar(' '); else @@ -278,6 +281,57 @@ static struct commit_list *best_bisection(struct commit_list *list, int nr) return best; } +struct commit_dist { + struct commit *commit; + int distance; +}; + +static int compare_commit_dist(const void *a_, const void *b_) +{ + struct commit_dist *a, *b; + + a = (struct commit_dist *)a_; + b = (struct commit_dist *)b_; + if (a->distance != b->distance) + return b->distance - a->distance; /* desc sort */ + return hashcmp(a->commit->object.sha1, b->commit->object.sha1); +} + +static struct commit_list *best_bisection_sorted(struct commit_list *list, int nr) +{ + struct commit_list *p; + struct commit_dist *array = xcalloc(nr, sizeof(*array)); + int cnt, i; + + for (p = list, cnt = 0; p; p = p->next) { + int distance; + unsigned flags = p->item->object.flags; + + if (revs.prune_fn && !(flags & TREECHANGE)) + continue; + distance = weight(p); + if (nr - distance < distance) + distance = nr - distance; + array[cnt].commit = p->item; + array[cnt].distance = distance; + cnt++; + } + qsort(array, cnt, sizeof(*array), compare_commit_dist); + for (p = list, i = 0; i < cnt; i++) { + struct name_decoration *r = xmalloc(sizeof(*r) + 100); + struct object *obj = &(array[i].commit->object); + + sprintf(r->name, "dist=%d", array[i].distance); + r->next = add_decoration(&name_decoration, obj, r); + p->item = array[i].commit; + p = p->next; + } + if (p) + p->next = NULL; + free(array); + return list; +} + /* * zero or positive weight is the number of interesting commits it can * reach, including itself. Especially, weight = 0 means it does not @@ -292,7 +346,8 @@ static struct commit_list *best_bisection(struct commit_list *list, int nr) * or positive distance. */ static struct commit_list *do_find_bisection(struct commit_list *list, - int nr, int *weights) + int nr, int *weights, + int find_all) { int n, counted; struct commit_list *p; @@ -351,7 +406,7 @@ static struct commit_list *do_find_bisection(struct commit_list *list, clear_distance(list); /* Does it happen to be at exactly half-way? */ - if (halfway(p, nr)) + if (!find_all && halfway(p, nr)) return p; counted++; } @@ -389,19 +444,22 @@ static struct commit_list *do_find_bisection(struct commit_list *list, weight_set(p, weight(q)); /* Does it happen to be at exactly half-way? */ - if (halfway(p, nr)) + if (!find_all && halfway(p, nr)) return p; } } show_list("bisection 2 counted all", counted, nr, list); - /* Then find the best one */ - return best_bisection(list, nr); + if (!find_all) + return best_bisection(list, nr); + else + return best_bisection_sorted(list, nr); } static struct commit_list *find_bisection(struct commit_list *list, - int *reaches, int *all) + int *reaches, int *all, + int find_all) { int nr, on_list; struct commit_list *p, *best, *next, *last; @@ -434,14 +492,13 @@ static struct commit_list *find_bisection(struct commit_list *list, weights = xcalloc(on_list, sizeof(*weights)); /* Do the real work of finding bisection commit. */ - best = do_find_bisection(list, nr, weights); - + best = do_find_bisection(list, nr, weights, find_all); if (best) { - best->next = NULL; + if (!find_all) + best->next = NULL; *reaches = weight(best); } free(weights); - return best; } @@ -468,6 +525,7 @@ int cmd_rev_list(int argc, const char **argv, const char *prefix) int i; int read_from_stdin = 0; int bisect_show_vars = 0; + int bisect_find_all = 0; git_config(git_default_config); init_revisions(&revs, prefix); @@ -490,6 +548,11 @@ int cmd_rev_list(int argc, const char **argv, const char *prefix) bisect_list = 1; continue; } + if (!strcmp(arg, "--bisect-all")) { + bisect_list = 1; + bisect_find_all = 1; + continue; + } if (!strcmp(arg, "--bisect-vars")) { bisect_list = 1; bisect_show_vars = 1; @@ -536,9 +599,11 @@ int cmd_rev_list(int argc, const char **argv, const char *prefix) if (bisect_list) { int reaches = reaches, all = all; - revs.commits = find_bisection(revs.commits, &reaches, &all); + revs.commits = find_bisection(revs.commits, &reaches, &all, + bisect_find_all); if (bisect_show_vars) { int cnt; + char hex[41]; if (!revs.commits) return 1; /* @@ -550,15 +615,22 @@ int cmd_rev_list(int argc, const char **argv, const char *prefix) * A bisect set of size N has (N-1) commits further * to test, as we already know one bad one. */ - cnt = all-reaches; + cnt = all - reaches; if (cnt < reaches) cnt = reaches; + strcpy(hex, sha1_to_hex(revs.commits->item->object.sha1)); + + if (bisect_find_all) { + traverse_commit_list(&revs, show_commit, show_object); + printf("------\n"); + } + printf("bisect_rev=%s\n" "bisect_nr=%d\n" "bisect_good=%d\n" "bisect_bad=%d\n" "bisect_all=%d\n", - sha1_to_hex(revs.commits->item->object.sha1), + hex, cnt - 1, all - reaches - 1, reaches - 1, diff --git a/builtin-revert.c b/builtin-revert.c index a655c8ee2a..e855b206cf 100644 --- a/builtin-revert.c +++ b/builtin-revert.c @@ -349,7 +349,7 @@ static int revert_or_cherry_pick(int argc, const char **argv) die ("Error wrapping up %s", defmsg); fprintf(stderr, "Automatic %s failed. " "After resolving the conflicts,\n" - "mark the corrected paths with 'git-add <paths>'\n" + "mark the corrected paths with 'git add <paths>' " "and commit the result.\n", me); if (action == CHERRY_PICK) { fprintf(stderr, "When commiting, use the option " @@ -6,6 +6,7 @@ extern const char git_version_string[]; extern const char git_usage_string[]; +extern void list_common_cmds_help(void); extern void help_unknown_cmd(const char *cmd); extern int write_tree(unsigned char *sha1, int missing_ok, const char *prefix); extern void prune_packed_objects(int); @@ -30,6 +31,8 @@ extern int cmd_diff_files(int argc, const char **argv, const char *prefix); extern int cmd_diff_index(int argc, const char **argv, const char *prefix); extern int cmd_diff(int argc, const char **argv, const char *prefix); extern int cmd_diff_tree(int argc, const char **argv, const char *prefix); +extern int cmd_fetch(int argc, const char **argv, const char *prefix); +extern int cmd_fetch_pack(int argc, const char **argv, const char *prefix); extern int cmd_fetch__tool(int argc, const char **argv, const char *prefix); extern int cmd_fmt_merge_msg(int argc, const char **argv, const char *prefix); extern int cmd_for_each_ref(int argc, const char **argv, const char *prefix); @@ -39,6 +42,7 @@ extern int cmd_gc(int argc, const char **argv, const char *prefix); extern int cmd_get_tar_commit_id(int argc, const char **argv, const char *prefix); extern int cmd_grep(int argc, const char **argv, const char *prefix); extern int cmd_help(int argc, const char **argv, const char *prefix); +extern int cmd_http_fetch(int argc, const char **argv, const char *prefix); extern int cmd_init_db(int argc, const char **argv, const char *prefix); extern int cmd_log(int argc, const char **argv, const char *prefix); extern int cmd_log_reflog(int argc, const char **argv, const char *prefix); diff --git a/bundle.c b/bundle.c new file mode 100644 index 0000000000..0869fcf026 --- /dev/null +++ b/bundle.c @@ -0,0 +1,343 @@ +#include "cache.h" +#include "bundle.h" +#include "object.h" +#include "commit.h" +#include "diff.h" +#include "revision.h" +#include "list-objects.h" +#include "run-command.h" + +static const char bundle_signature[] = "# v2 git bundle\n"; + +static void add_to_ref_list(const unsigned char *sha1, const char *name, + struct ref_list *list) +{ + if (list->nr + 1 >= list->alloc) { + list->alloc = alloc_nr(list->nr + 1); + list->list = xrealloc(list->list, + list->alloc * sizeof(list->list[0])); + } + memcpy(list->list[list->nr].sha1, sha1, 20); + list->list[list->nr].name = xstrdup(name); + list->nr++; +} + +/* returns an fd */ +int read_bundle_header(const char *path, struct bundle_header *header) { + char buffer[1024]; + int fd; + long fpos; + FILE *ffd = fopen(path, "rb"); + + if (!ffd) + return error("could not open '%s'", path); + if (!fgets(buffer, sizeof(buffer), ffd) || + strcmp(buffer, bundle_signature)) { + fclose(ffd); + return error("'%s' does not look like a v2 bundle file", path); + } + while (fgets(buffer, sizeof(buffer), ffd) + && buffer[0] != '\n') { + int is_prereq = buffer[0] == '-'; + int offset = is_prereq ? 1 : 0; + int len = strlen(buffer); + unsigned char sha1[20]; + struct ref_list *list = is_prereq ? &header->prerequisites + : &header->references; + char delim; + + if (buffer[len - 1] == '\n') + buffer[len - 1] = '\0'; + if (get_sha1_hex(buffer + offset, sha1)) { + warning("unrecognized header: %s", buffer); + continue; + } + delim = buffer[40 + offset]; + if (!isspace(delim) && (delim != '\0' || !is_prereq)) + die ("invalid header: %s", buffer); + add_to_ref_list(sha1, isspace(delim) ? + buffer + 41 + offset : "", list); + } + fpos = ftell(ffd); + fclose(ffd); + fd = open(path, O_RDONLY); + if (fd < 0) + return error("could not open '%s'", path); + lseek(fd, fpos, SEEK_SET); + return fd; +} + +static int list_refs(struct ref_list *r, int argc, const char **argv) +{ + int i; + + for (i = 0; i < r->nr; i++) { + if (argc > 1) { + int j; + for (j = 1; j < argc; j++) + if (!strcmp(r->list[i].name, argv[j])) + break; + if (j == argc) + continue; + } + printf("%s %s\n", sha1_to_hex(r->list[i].sha1), + r->list[i].name); + } + return 0; +} + +#define PREREQ_MARK (1u<<16) + +int verify_bundle(struct bundle_header *header, int verbose) +{ + /* + * Do fast check, then if any prereqs are missing then go line by line + * to be verbose about the errors + */ + struct ref_list *p = &header->prerequisites; + struct rev_info revs; + const char *argv[] = {NULL, "--all"}; + struct object_array refs; + struct commit *commit; + int i, ret = 0, req_nr; + const char *message = "Repository lacks these prerequisite commits:"; + + init_revisions(&revs, NULL); + for (i = 0; i < p->nr; i++) { + struct ref_list_entry *e = p->list + i; + struct object *o = parse_object(e->sha1); + if (o) { + o->flags |= PREREQ_MARK; + add_pending_object(&revs, o, e->name); + continue; + } + if (++ret == 1) + error(message); + error("%s %s", sha1_to_hex(e->sha1), e->name); + } + if (revs.pending.nr != p->nr) + return ret; + req_nr = revs.pending.nr; + setup_revisions(2, argv, &revs, NULL); + + memset(&refs, 0, sizeof(struct object_array)); + for (i = 0; i < revs.pending.nr; i++) { + struct object_array_entry *e = revs.pending.objects + i; + add_object_array(e->item, e->name, &refs); + } + + prepare_revision_walk(&revs); + + i = req_nr; + while (i && (commit = get_revision(&revs))) + if (commit->object.flags & PREREQ_MARK) + i--; + + for (i = 0; i < req_nr; i++) + if (!(refs.objects[i].item->flags & SHOWN)) { + if (++ret == 1) + error(message); + error("%s %s", sha1_to_hex(refs.objects[i].item->sha1), + refs.objects[i].name); + } + + for (i = 0; i < refs.nr; i++) + clear_commit_marks((struct commit *)refs.objects[i].item, -1); + + if (verbose) { + struct ref_list *r; + + r = &header->references; + printf("The bundle contains %d ref%s\n", + r->nr, (1 < r->nr) ? "s" : ""); + list_refs(r, 0, NULL); + r = &header->prerequisites; + printf("The bundle requires these %d ref%s\n", + r->nr, (1 < r->nr) ? "s" : ""); + list_refs(r, 0, NULL); + } + return ret; +} + +int list_bundle_refs(struct bundle_header *header, int argc, const char **argv) +{ + return list_refs(&header->references, argc, argv); +} + +int create_bundle(struct bundle_header *header, const char *path, + int argc, const char **argv) +{ + static struct lock_file lock; + int bundle_fd = -1; + int bundle_to_stdout; + const char **argv_boundary = xmalloc((argc + 4) * sizeof(const char *)); + const char **argv_pack = xmalloc(5 * sizeof(const char *)); + int i, ref_count = 0; + char buffer[1024]; + struct rev_info revs; + struct child_process rls; + FILE *rls_fout; + + bundle_to_stdout = !strcmp(path, "-"); + if (bundle_to_stdout) + bundle_fd = 1; + else + bundle_fd = hold_lock_file_for_update(&lock, path, 1); + + /* write signature */ + write_or_die(bundle_fd, bundle_signature, strlen(bundle_signature)); + + /* init revs to list objects for pack-objects later */ + save_commit_buffer = 0; + init_revisions(&revs, NULL); + + /* write prerequisites */ + memcpy(argv_boundary + 3, argv + 1, argc * sizeof(const char *)); + argv_boundary[0] = "rev-list"; + argv_boundary[1] = "--boundary"; + argv_boundary[2] = "--pretty=oneline"; + argv_boundary[argc + 2] = NULL; + memset(&rls, 0, sizeof(rls)); + rls.argv = argv_boundary; + rls.out = -1; + rls.git_cmd = 1; + if (start_command(&rls)) + return -1; + rls_fout = fdopen(rls.out, "r"); + while (fgets(buffer, sizeof(buffer), rls_fout)) { + unsigned char sha1[20]; + if (buffer[0] == '-') { + write_or_die(bundle_fd, buffer, strlen(buffer)); + if (!get_sha1_hex(buffer + 1, sha1)) { + struct object *object = parse_object(sha1); + object->flags |= UNINTERESTING; + add_pending_object(&revs, object, buffer); + } + } else if (!get_sha1_hex(buffer, sha1)) { + struct object *object = parse_object(sha1); + object->flags |= SHOWN; + } + } + fclose(rls_fout); + if (finish_command(&rls)) + return error("rev-list died"); + + /* write references */ + argc = setup_revisions(argc, argv, &revs, NULL); + if (argc > 1) + return error("unrecognized argument: %s'", argv[1]); + + for (i = 0; i < revs.pending.nr; i++) { + struct object_array_entry *e = revs.pending.objects + i; + unsigned char sha1[20]; + char *ref; + + if (e->item->flags & UNINTERESTING) + continue; + if (dwim_ref(e->name, strlen(e->name), sha1, &ref) != 1) + continue; + /* + * Make sure the refs we wrote out is correct; --max-count and + * other limiting options could have prevented all the tips + * from getting output. + * + * Non commit objects such as tags and blobs do not have + * this issue as they are not affected by those extra + * constraints. + */ + if (!(e->item->flags & SHOWN) && e->item->type == OBJ_COMMIT) { + warning("ref '%s' is excluded by the rev-list options", + e->name); + free(ref); + continue; + } + /* + * If you run "git bundle create bndl v1.0..v2.0", the + * name of the positive ref is "v2.0" but that is the + * commit that is referenced by the tag, and not the tag + * itself. + */ + if (hashcmp(sha1, e->item->sha1)) { + /* + * Is this the positive end of a range expressed + * in terms of a tag (e.g. v2.0 from the range + * "v1.0..v2.0")? + */ + struct commit *one = lookup_commit_reference(sha1); + struct object *obj; + + if (e->item == &(one->object)) { + /* + * Need to include e->name as an + * independent ref to the pack-objects + * input, so that the tag is included + * in the output; otherwise we would + * end up triggering "empty bundle" + * error. + */ + obj = parse_object(sha1); + obj->flags |= SHOWN; + add_pending_object(&revs, obj, e->name); + } + free(ref); + continue; + } + + ref_count++; + write_or_die(bundle_fd, sha1_to_hex(e->item->sha1), 40); + write_or_die(bundle_fd, " ", 1); + write_or_die(bundle_fd, ref, strlen(ref)); + write_or_die(bundle_fd, "\n", 1); + free(ref); + } + if (!ref_count) + die ("Refusing to create empty bundle."); + + /* end header */ + write_or_die(bundle_fd, "\n", 1); + + /* write pack */ + argv_pack[0] = "pack-objects"; + argv_pack[1] = "--all-progress"; + argv_pack[2] = "--stdout"; + argv_pack[3] = "--thin"; + argv_pack[4] = NULL; + memset(&rls, 0, sizeof(rls)); + rls.argv = argv_pack; + rls.in = -1; + rls.out = bundle_fd; + rls.git_cmd = 1; + if (start_command(&rls)) + return error("Could not spawn pack-objects"); + for (i = 0; i < revs.pending.nr; i++) { + struct object *object = revs.pending.objects[i].item; + if (object->flags & UNINTERESTING) + write(rls.in, "^", 1); + write(rls.in, sha1_to_hex(object->sha1), 40); + write(rls.in, "\n", 1); + } + if (finish_command(&rls)) + return error ("pack-objects died"); + close(bundle_fd); + if (!bundle_to_stdout) + commit_lock_file(&lock); + return 0; +} + +int unbundle(struct bundle_header *header, int bundle_fd) +{ + const char *argv_index_pack[] = {"index-pack", + "--fix-thin", "--stdin", NULL}; + struct child_process ip; + + if (verify_bundle(header, 0)) + return -1; + memset(&ip, 0, sizeof(ip)); + ip.argv = argv_index_pack; + ip.in = bundle_fd; + ip.no_stdout = 1; + ip.git_cmd = 1; + if (run_command(&ip)) + return error("index-pack died"); + return 0; +} diff --git a/bundle.h b/bundle.h new file mode 100644 index 0000000000..e2aedd60d6 --- /dev/null +++ b/bundle.h @@ -0,0 +1,25 @@ +#ifndef BUNDLE_H +#define BUNDLE_H + +struct ref_list { + unsigned int nr, alloc; + struct ref_list_entry { + unsigned char sha1[20]; + char *name; + } *list; +}; + +struct bundle_header { + struct ref_list prerequisites; + struct ref_list references; +}; + +int read_bundle_header(const char *path, struct bundle_header *header); +int create_bundle(struct bundle_header *header, const char *path, + int argc, const char **argv); +int verify_bundle(struct bundle_header *header, int verbose); +int unbundle(struct bundle_header *header, int bundle_fd); +int list_bundle_refs(struct bundle_header *header, + int argc, const char **argv); + +#endif @@ -493,6 +493,7 @@ struct ref { unsigned char old_sha1[20]; unsigned char new_sha1[20]; unsigned char force; + unsigned char merge; struct ref *peer_ref; /* when renaming */ char name[FLEX_ARRAY]; /* more */ }; @@ -502,8 +503,8 @@ struct ref { #define REF_TAGS (1u << 2) #define CONNECT_VERBOSE (1u << 0) -extern pid_t git_connect(int fd[2], char *url, const char *prog, int flags); -extern int finish_connect(pid_t pid); +extern struct child_process *git_connect(int fd[2], char *url, const char *prog, int flags); +extern int finish_connect(struct child_process *conn); extern int path_match(const char *path, int nr, char **match); extern int get_ack(int fd, unsigned char *result_sha1); extern struct ref **get_remote_heads(int in, struct ref **list, int nr_match, char **match, unsigned int flags); @@ -135,39 +135,39 @@ int git_config_colorbool(const char *var, const char *value) return git_config_bool(var, value); } -static int color_vprintf(const char *color, const char *fmt, +static int color_vfprintf(FILE *fp, const char *color, const char *fmt, va_list args, const char *trail) { int r = 0; if (*color) - r += printf("%s", color); - r += vprintf(fmt, args); + r += fprintf(fp, "%s", color); + r += vfprintf(fp, fmt, args); if (*color) - r += printf("%s", COLOR_RESET); + r += fprintf(fp, "%s", COLOR_RESET); if (trail) - r += printf("%s", trail); + r += fprintf(fp, "%s", trail); return r; } -int color_printf(const char *color, const char *fmt, ...) +int color_fprintf(FILE *fp, const char *color, const char *fmt, ...) { va_list args; int r; va_start(args, fmt); - r = color_vprintf(color, fmt, args, NULL); + r = color_vfprintf(fp, color, fmt, args, NULL); va_end(args); return r; } -int color_printf_ln(const char *color, const char *fmt, ...) +int color_fprintf_ln(FILE *fp, const char *color, const char *fmt, ...) { va_list args; int r; va_start(args, fmt); - r = color_vprintf(color, fmt, args, "\n"); + r = color_vfprintf(fp, color, fmt, args, "\n"); va_end(args); return r; } @@ -6,7 +6,7 @@ int git_config_colorbool(const char *var, const char *value); void color_parse(const char *var, const char *value, char *dst); -int color_printf(const char *color, const char *fmt, ...); -int color_printf_ln(const char *color, const char *fmt, ...); +int color_fprintf(FILE *fp, const char *color, const char *fmt, ...); +int color_fprintf_ln(FILE *fp, const char *color, const char *fmt, ...); #endif /* COLOR_H */ @@ -128,4 +128,9 @@ extern struct commit_list *get_shallow_commits(struct object_array *heads, int depth, int shallow_flag, int not_shallow_flag); int in_merge_bases(struct commit *, struct commit **, int); + +extern int interactive_add(void); +extern void add_files_to_cache(int verbose, const char *prefix, const char **files); +extern int rerere(void); + #endif /* COMMIT_H */ diff --git a/compat/mkdtemp.c b/compat/mkdtemp.c new file mode 100644 index 0000000000..34d4b49818 --- /dev/null +++ b/compat/mkdtemp.c @@ -0,0 +1,8 @@ +#include "../git-compat-util.h" + +char *gitmkdtemp(char *template) +{ + if (!mktemp(template) || mkdir(template, 0700)) + return NULL; + return template; +} @@ -72,9 +72,9 @@ struct ref **get_remote_heads(int in, struct ref **list, continue; if (nr_match && !path_match(name, nr_match, match)) continue; - ref = alloc_ref(len - 40); + ref = alloc_ref(name_len + 1); hashcpy(ref->old_sha1, old_sha1); - memcpy(ref->name, buffer + 41, len - 40); + memcpy(ref->name, buffer + 41, name_len + 1); *list = ref; list = &ref->next; } @@ -468,24 +468,26 @@ char *get_port(char *host) } /* - * This returns 0 if the transport protocol does not need fork(2), - * or a process id if it does. Once done, finish the connection + * This returns NULL if the transport protocol does not need fork(2), or a + * struct child_process object if it does. Once done, finish the connection * with finish_connect() with the value returned from this function - * (it is safe to call finish_connect() with 0 to support the former + * (it is safe to call finish_connect() with NULL to support the former * case). * - * Does not return a negative value on error; it just dies. + * If it returns, the connect is successful; it just dies on errors. */ -pid_t git_connect(int fd[2], char *url, const char *prog, int flags) +struct child_process *git_connect(int fd[2], char *url, + const char *prog, int flags) { char *host, *path = url; char *end; int c; - int pipefd[2][2]; - pid_t pid; + struct child_process *conn; enum protocol protocol = PROTO_LOCAL; int free_path = 0; char *port = NULL; + const char **arg; + struct strbuf cmd; /* Without this we cannot rely on waitpid() to tell * what happened to our children. @@ -568,74 +570,68 @@ pid_t git_connect(int fd[2], char *url, const char *prog, int flags) free(target_host); if (free_path) free(path); - return 0; + return NULL; } - if (pipe(pipefd[0]) < 0 || pipe(pipefd[1]) < 0) - die("unable to create pipe pair for communication"); - pid = fork(); - if (pid < 0) - die("unable to fork"); - if (!pid) { - struct strbuf cmd; - - strbuf_init(&cmd, MAX_CMD_LEN); - strbuf_addstr(&cmd, prog); - strbuf_addch(&cmd, ' '); - sq_quote_buf(&cmd, path); - if (cmd.len >= MAX_CMD_LEN) - die("command line too long"); - - dup2(pipefd[1][0], 0); - dup2(pipefd[0][1], 1); - close(pipefd[0][0]); - close(pipefd[0][1]); - close(pipefd[1][0]); - close(pipefd[1][1]); - if (protocol == PROTO_SSH) { - const char *ssh, *ssh_basename; - ssh = getenv("GIT_SSH"); - if (!ssh) ssh = "ssh"; - ssh_basename = strrchr(ssh, '/'); - if (!ssh_basename) - ssh_basename = ssh; - else - ssh_basename++; - - if (!port) - execlp(ssh, ssh_basename, host, cmd.buf, NULL); - else - execlp(ssh, ssh_basename, "-p", port, host, - cmd.buf, NULL); - } - else { - unsetenv(ALTERNATE_DB_ENVIRONMENT); - unsetenv(DB_ENVIRONMENT); - unsetenv(GIT_DIR_ENVIRONMENT); - unsetenv(GIT_WORK_TREE_ENVIRONMENT); - unsetenv(GRAFT_ENVIRONMENT); - unsetenv(INDEX_ENVIRONMENT); - execlp("sh", "sh", "-c", cmd.buf, NULL); + conn = xcalloc(1, sizeof(*conn)); + + strbuf_init(&cmd, MAX_CMD_LEN); + strbuf_addstr(&cmd, prog); + strbuf_addch(&cmd, ' '); + sq_quote_buf(&cmd, path); + if (cmd.len >= MAX_CMD_LEN) + die("command line too long"); + + conn->in = conn->out = -1; + conn->argv = arg = xcalloc(6, sizeof(*arg)); + if (protocol == PROTO_SSH) { + const char *ssh = getenv("GIT_SSH"); + if (!ssh) ssh = "ssh"; + + *arg++ = ssh; + if (port) { + *arg++ = "-p"; + *arg++ = port; } - die("exec failed"); + *arg++ = host; } - fd[0] = pipefd[0][0]; - fd[1] = pipefd[1][1]; - close(pipefd[0][1]); - close(pipefd[1][0]); + else { + /* remove these from the environment */ + const char *env[] = { + ALTERNATE_DB_ENVIRONMENT, + DB_ENVIRONMENT, + GIT_DIR_ENVIRONMENT, + GIT_WORK_TREE_ENVIRONMENT, + GRAFT_ENVIRONMENT, + INDEX_ENVIRONMENT, + NULL + }; + conn->env = env; + *arg++ = "sh"; + *arg++ = "-c"; + } + *arg++ = cmd.buf; + *arg = NULL; + + if (start_command(conn)) + die("unable to fork"); + + fd[0] = conn->out; /* read from child's stdout */ + fd[1] = conn->in; /* write to child's stdin */ + strbuf_release(&cmd); if (free_path) free(path); - return pid; + return conn; } -int finish_connect(pid_t pid) +int finish_connect(struct child_process *conn) { - if (pid == 0) + int code; + if (!conn) return 0; - while (waitpid(pid, NULL, 0) < 0) { - if (errno != EINTR) - return -1; - } - return 0; + code = finish_command(conn); + free(conn->argv); + free(conn); + return code; } diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash index e760930740..599b2fc571 100755 --- a/contrib/completion/git-completion.bash +++ b/contrib/completion/git-completion.bash @@ -346,7 +346,6 @@ __git_commands () ssh-*) : transport;; stripspace) : plumbing;; svn) : import export;; - svnimport) : import;; symbolic-ref) : plumbing;; tar-tree) : deprecated;; unpack-file) : plumbing;; diff --git a/contrib/emacs/git.el b/contrib/emacs/git.el index 4286d160a0..e147da0596 100644 --- a/contrib/emacs/git.el +++ b/contrib/emacs/git.el @@ -796,6 +796,7 @@ Return the list of files that haven't been handled." (with-current-buffer buffer (erase-buffer)) (dolist (info files) (git-set-fileinfo-state info 'uptodate)) (git-call-process-env nil nil "rerere") + (git-call-process-env nil nil "gc" "--auto") (git-refresh-files) (git-refresh-ewoc-hf git-status) (message "Committed %s." commit) @@ -842,7 +843,8 @@ Return the list of files that haven't been handled." "Mark all files." (interactive) (unless git-status (error "Not in git-status buffer.")) - (ewoc-map (lambda (info) (setf (git-fileinfo->marked info) t) t) git-status) + (ewoc-map (lambda (info) (unless (git-fileinfo->marked info) + (setf (git-fileinfo->marked info) t))) git-status) ; move back to goal column after invalidate (when goal-column (move-to-column goal-column))) @@ -850,7 +852,9 @@ Return the list of files that haven't been handled." "Unmark all files." (interactive) (unless git-status (error "Not in git-status buffer.")) - (ewoc-map (lambda (info) (setf (git-fileinfo->marked info) nil) t) git-status) + (ewoc-map (lambda (info) (when (git-fileinfo->marked info) + (setf (git-fileinfo->marked info) nil) + t)) git-status) ; move back to goal column after invalidate (when goal-column (move-to-column goal-column))) @@ -955,7 +959,7 @@ Return the list of files that haven't been handled." (when modified (apply #'git-call-process-env nil nil "checkout" "HEAD" modified)) (git-update-status-files (append added modified) 'uptodate) - (git-success-message "Reverted" files)))) + (git-success-message "Reverted" (git-get-filenames files))))) (defun git-resolve-file () "Resolve conflicts in marked file(s)." @@ -1353,7 +1357,7 @@ Commands: "Update the corresponding git-status buffer when a file is saved. Meant to be used in `after-save-hook'." (let* ((file (expand-file-name buffer-file-name)) - (dir (condition-case nil (git-get-top-dir (file-name-directory file)))) + (dir (condition-case nil (git-get-top-dir (file-name-directory file)) (error nil))) (buffer (and dir (git-find-status-buffer dir)))) (when buffer (with-current-buffer buffer diff --git a/git-fetch.sh b/contrib/examples/git-fetch.sh index e44af2c86d..e44af2c86d 100755 --- a/git-fetch.sh +++ b/contrib/examples/git-fetch.sh diff --git a/git-svnimport.perl b/contrib/examples/git-svnimport.perl index ea8c1b2f60..ea8c1b2f60 100755 --- a/git-svnimport.perl +++ b/contrib/examples/git-svnimport.perl diff --git a/Documentation/git-svnimport.txt b/contrib/examples/git-svnimport.txt index 71aad8b45b..71aad8b45b 100644 --- a/Documentation/git-svnimport.txt +++ b/contrib/examples/git-svnimport.txt diff --git a/contrib/fast-import/git-p4 b/contrib/fast-import/git-p4 index 52cd2a46ba..bf33f74b70 100755 --- a/contrib/fast-import/git-p4 +++ b/contrib/fast-import/git-p4 @@ -399,6 +399,7 @@ class P4Submit(Command): optparse.make_option("--dry-run", action="store_true"), optparse.make_option("--direct", dest="directSubmit", action="store_true"), optparse.make_option("--trust-me-like-a-fool", dest="trustMeLikeAFool", action="store_true"), + optparse.make_option("-M", dest="detectRename", action="store_true"), ] self.description = "Submit changes from git to the perforce depot." self.usage += " [name of git branch to submit into perforce depot]" @@ -411,6 +412,7 @@ class P4Submit(Command): self.origin = "" self.directSubmit = False self.trustMeLikeAFool = False + self.detectRename = False self.verbose = False self.isWindows = (platform.system() == "Windows") @@ -491,7 +493,8 @@ class P4Submit(Command): diff = self.diffStatus else: print "Applying %s" % (read_pipe("git log --max-count=1 --pretty=oneline %s" % id)) - diff = read_pipe_lines("git diff-tree -r --name-status \"%s^\" \"%s\"" % (id, id)) + diffOpts = ("", "-M")[self.detectRename] + diff = read_pipe_lines("git diff-tree -r --name-status %s \"%s^\" \"%s\"" % (diffOpts, id, id)) filesToAdd = set() filesToDelete = set() editedFiles = set() @@ -509,6 +512,13 @@ class P4Submit(Command): filesToDelete.add(path) if path in filesToAdd: filesToAdd.remove(path) + elif modifier == "R": + src, dest = line.strip().split("\t")[1:3] + system("p4 integrate -Dt \"%s\" \"%s\"" % (src, dest)) + system("p4 edit \"%s\"" % (dest)) + os.unlink(dest) + editedFiles.add(dest) + filesToDelete.add(src) else: die("unknown modifier %s for %s" % (modifier, path)) @@ -529,6 +539,10 @@ class P4Submit(Command): "and with .rej files / [w]rite the patch to a file (patch.txt) ") if response == "s": print "Skipping! Good luck with the next patches..." + for f in editedFiles: + system("p4 revert \"%s\"" % f); + for f in filesToAdd: + system("rm %s" %f) return elif response == "a": os.system(applyPatchCmd) diff --git a/contrib/hooks/post-receive-email b/contrib/hooks/post-receive-email index b188aa3d67..2aa9bb501c 100644 --- a/contrib/hooks/post-receive-email +++ b/contrib/hooks/post-receive-email @@ -331,7 +331,7 @@ generate_update_branch_email() echo " via $rev ($revtype)" done - if [ -z "$fastforward" ]; then + if [ "$fast_forward" ]; then echo " from $oldrev ($oldrev_type)" else # 1. Existing revisions were removed. In this case newrev is a @@ -192,48 +192,39 @@ static int crlf_to_worktree(const char *path, const char *src, size_t len, return 1; } -static int filter_buffer(const char *path, const char *src, - unsigned long size, const char *cmd) +struct filter_params { + const char *src; + unsigned long size; + const char *cmd; +}; + +static int filter_buffer(int fd, void *data) { /* * Spawn cmd and feed the buffer contents through its stdin. */ struct child_process child_process; - int pipe_feed[2]; + struct filter_params *params = (struct filter_params *)data; int write_err, status; + const char *argv[] = { "sh", "-c", params->cmd, NULL }; memset(&child_process, 0, sizeof(child_process)); + child_process.argv = argv; + child_process.in = -1; + child_process.out = fd; - if (pipe(pipe_feed) < 0) { - error("cannot create pipe to run external filter %s", cmd); - return 1; - } - - child_process.pid = fork(); - if (child_process.pid < 0) { - error("cannot fork to run external filter %s", cmd); - close(pipe_feed[0]); - close(pipe_feed[1]); - return 1; - } - if (!child_process.pid) { - dup2(pipe_feed[0], 0); - close(pipe_feed[0]); - close(pipe_feed[1]); - execlp("sh", "sh", "-c", cmd, NULL); - return 1; - } - close(pipe_feed[0]); + if (start_command(&child_process)) + return error("cannot fork to run external filter %s", params->cmd); - write_err = (write_in_full(pipe_feed[1], src, size) < 0); - if (close(pipe_feed[1])) + write_err = (write_in_full(child_process.in, params->src, params->size) < 0); + if (close(child_process.in)) write_err = 1; if (write_err) - error("cannot feed the input to external filter %s", cmd); + error("cannot feed the input to external filter %s", params->cmd); status = finish_command(&child_process); if (status) - error("external filter %s failed %d", cmd, -status); + error("external filter %s failed %d", params->cmd, -status); return (write_err || status); } @@ -246,49 +237,36 @@ static int apply_filter(const char *path, const char *src, size_t len, * * (child --> cmd) --> us */ - int pipe_feed[2]; - int status, ret = 1; - struct child_process child_process; + int ret = 1; struct strbuf nbuf; + struct async async; + struct filter_params params; if (!cmd) return 0; - memset(&child_process, 0, sizeof(child_process)); - - if (pipe(pipe_feed) < 0) { - error("cannot create pipe to run external filter %s", cmd); - return 0; - } + memset(&async, 0, sizeof(async)); + async.proc = filter_buffer; + async.data = ¶ms; + params.src = src; + params.size = len; + params.cmd = cmd; fflush(NULL); - child_process.pid = fork(); - if (child_process.pid < 0) { - error("cannot fork to run external filter %s", cmd); - close(pipe_feed[0]); - close(pipe_feed[1]); - return 0; - } - if (!child_process.pid) { - dup2(pipe_feed[1], 1); - close(pipe_feed[0]); - close(pipe_feed[1]); - exit(filter_buffer(path, src, len, cmd)); - } - close(pipe_feed[1]); + if (start_async(&async)) + return 0; /* error was already reported */ strbuf_init(&nbuf, 0); - if (strbuf_read(&nbuf, pipe_feed[0], len) < 0) { + if (strbuf_read(&nbuf, async.out, len) < 0) { error("read from external filter %s failed", cmd); ret = 0; } - if (close(pipe_feed[0])) { + if (close(async.out)) { error("read from external filter %s failed", cmd); ret = 0; } - status = finish_command(&child_process); - if (status) { - error("external filter %s failed %d", cmd, -status); + if (finish_async(&async)) { + error("external filter %s failed", cmd); ret = 0; } @@ -9,6 +9,10 @@ #define HOST_NAME_MAX 256 #endif +#ifndef NI_MAXSERV +#define NI_MAXSERV 32 +#endif + static int log_syslog; static int verbose; static int reuseaddr; @@ -9,6 +9,7 @@ #include "xdiff-interface.h" #include "color.h" #include "attr.h" +#include "run-command.h" #ifdef NO_FAST_WORKING_DIRECTORY #define FAST_WORKING_DIRECTORY 0 @@ -1440,9 +1441,18 @@ struct diff_filespec *alloc_filespec(const char *path) memset(spec, 0, sizeof(*spec)); spec->path = (char *)(spec + 1); memcpy(spec->path, path, namelen+1); + spec->count = 1; return spec; } +void free_filespec(struct diff_filespec *spec) +{ + if (!--spec->count) { + diff_free_filespec_data(spec); + free(spec); + } +} + void fill_filespec(struct diff_filespec *spec, const unsigned char *sha1, unsigned short mode) { @@ -1512,6 +1522,7 @@ static int reuse_worktree_file(const char *name, const unsigned char *sha1, int static int populate_from_stdin(struct diff_filespec *s) { struct strbuf buf; + size_t size = 0; strbuf_init(&buf, 0); if (strbuf_read(&buf, 0, 0) < 0) @@ -1519,7 +1530,8 @@ static int populate_from_stdin(struct diff_filespec *s) strerror(errno)); s->should_munmap = 0; - s->data = strbuf_detach(&buf, &s->size); + s->data = strbuf_detach(&buf, &size); + s->size = size; s->should_free = 1; return 0; } @@ -1609,9 +1621,11 @@ int diff_populate_filespec(struct diff_filespec *s, int size_only) */ strbuf_init(&buf, 0); if (convert_to_git(s->path, s->data, s->size, &buf)) { + size_t size = 0; munmap(s->data, s->size); s->should_munmap = 0; - s->data = strbuf_detach(&buf, &s->size); + s->data = strbuf_detach(&buf, &size); + s->size = size; s->should_free = 1; } } @@ -1748,40 +1762,6 @@ static void remove_tempfile_on_signal(int signo) raise(signo); } -static int spawn_prog(const char *pgm, const char **arg) -{ - pid_t pid; - int status; - - fflush(NULL); - pid = fork(); - if (pid < 0) - die("unable to fork"); - if (!pid) { - execvp(pgm, (char *const*) arg); - exit(255); - } - - while (waitpid(pid, &status, 0) < 0) { - if (errno == EINTR) - continue; - return -1; - } - - /* Earlier we did not check the exit status because - * diff exits non-zero if files are different, and - * we are not interested in knowing that. It was a - * mistake which made it harder to quit a diff-* - * session that uses the git-apply-patch-script as - * the GIT_EXTERNAL_DIFF. A custom GIT_EXTERNAL_DIFF - * should also exit non-zero only when it wants to - * abort the entire diff-* session. - */ - if (WIFEXITED(status) && !WEXITSTATUS(status)) - return 0; - return -1; -} - /* An external diff command takes: * * diff-cmd name infile1 infile1-sha1 infile1-mode \ @@ -1834,7 +1814,8 @@ static void run_external_diff(const char *pgm, *arg++ = name; } *arg = NULL; - retval = spawn_prog(pgm, spawn_arg); + fflush(NULL); + retval = run_command_v_opt(spawn_arg, 0); remove_tempfile(); if (retval) { fprintf(stderr, "external diff died, stopping at %s.\n", name); @@ -2431,10 +2412,8 @@ struct diff_filepair *diff_queue(struct diff_queue_struct *queue, void diff_free_filepair(struct diff_filepair *p) { - diff_free_filespec_data(p->one); - diff_free_filespec_data(p->two); - free(p->one); - free(p->two); + free_filespec(p->one); + free_filespec(p->two); free(p); } @@ -2586,9 +2565,9 @@ void diff_debug_filepair(const struct diff_filepair *p, int i) { diff_debug_filespec(p->one, i, "one"); diff_debug_filespec(p->two, i, "two"); - fprintf(stderr, "score %d, status %c stays %d broken %d\n", + fprintf(stderr, "score %d, status %c rename_used %d broken %d\n", p->score, p->status ? p->status : '?', - p->source_stays, p->broken_pair); + p->one->rename_used, p->broken_pair); } void diff_debug_queue(const char *msg, struct diff_queue_struct *q) @@ -2606,8 +2585,8 @@ void diff_debug_queue(const char *msg, struct diff_queue_struct *q) static void diff_resolve_rename_copy(void) { - int i, j; - struct diff_filepair *p, *pp; + int i; + struct diff_filepair *p; struct diff_queue_struct *q = &diff_queued_diff; diff_debug_queue("resolve-rename-copy", q); @@ -2629,27 +2608,21 @@ static void diff_resolve_rename_copy(void) * either in-place edit or rename/copy edit. */ else if (DIFF_PAIR_RENAME(p)) { - if (p->source_stays) { - p->status = DIFF_STATUS_COPIED; - continue; - } - /* See if there is some other filepair that - * copies from the same source as us. If so - * we are a copy. Otherwise we are either a - * copy if the path stays, or a rename if it - * does not, but we already handled "stays" case. + /* + * A rename might have re-connected a broken + * pair up, causing the pathnames to be the + * same again. If so, that's not a rename at + * all, just a modification.. + * + * Otherwise, see if this source was used for + * multiple renames, in which case we decrement + * the count, and call it a copy. */ - for (j = i + 1; j < q->nr; j++) { - pp = q->queue[j]; - if (strcmp(pp->one->path, p->one->path)) - continue; /* not us */ - if (!DIFF_PAIR_RENAME(pp)) - continue; /* not a rename/copy */ - /* pp is a rename/copy from the same source */ + if (!strcmp(p->one->path, p->two->path)) + p->status = DIFF_STATUS_MODIFIED; + else if (--p->one->rename_used > 0) p->status = DIFF_STATUS_COPIED; - break; - } - if (!p->status) + else p->status = DIFF_STATUS_RENAMED; } else if (hashcmp(p->one->sha1, p->two->sha1) || diff --git a/diffcore-break.c b/diffcore-break.c index ae8a7d03e2..c71a22621a 100644 --- a/diffcore-break.c +++ b/diffcore-break.c @@ -45,8 +45,8 @@ static int should_break(struct diff_filespec *src, * The value we return is 1 if we want the pair to be broken, * or 0 if we do not. */ - unsigned long delta_size, base_size, src_copied, literal_added, - src_removed; + unsigned long delta_size, base_size, max_size; + unsigned long src_copied, literal_added, src_removed; *merge_score_p = 0; /* assume no deletion --- "do not break" * is the default. @@ -63,7 +63,8 @@ static int should_break(struct diff_filespec *src, return 0; /* error but caught downstream */ base_size = ((src->size < dst->size) ? src->size : dst->size); - if (base_size < MINIMUM_BREAK_SIZE) + max_size = ((src->size > dst->size) ? src->size : dst->size); + if (max_size < MINIMUM_BREAK_SIZE) return 0; /* we do not break too small filepair */ if (diffcore_count_changes(src, dst, @@ -89,12 +90,14 @@ static int should_break(struct diff_filespec *src, * less than the minimum, after rename/copy runs. */ *merge_score_p = (int)(src_removed * MAX_SCORE / src->size); + if (*merge_score_p > break_score) + return 1; /* Extent of damage, which counts both inserts and * deletes. */ delta_size = src_removed + literal_added; - if (delta_size * MAX_SCORE / base_size < break_score) + if (delta_size * MAX_SCORE / max_size < break_score) return 0; /* If you removed a lot without adding new material, that is diff --git a/diffcore-delta.c b/diffcore-delta.c index d9729e5ec2..e670f85125 100644 --- a/diffcore-delta.c +++ b/diffcore-delta.c @@ -46,22 +46,6 @@ struct spanhash_top { struct spanhash data[FLEX_ARRAY]; }; -static struct spanhash *spanhash_find(struct spanhash_top *top, - unsigned int hashval) -{ - int sz = 1 << top->alloc_log2; - int bucket = hashval & (sz - 1); - while (1) { - struct spanhash *h = &(top->data[bucket++]); - if (!h->cnt) - return NULL; - if (h->hashval == hashval) - return h; - if (sz <= bucket) - bucket = 0; - } -} - static struct spanhash_top *spanhash_rehash(struct spanhash_top *orig) { struct spanhash_top *new; @@ -122,6 +106,20 @@ static struct spanhash_top *add_spanhash(struct spanhash_top *top, } } +static int spanhash_cmp(const void *a_, const void *b_) +{ + const struct spanhash *a = a_; + const struct spanhash *b = b_; + + /* A count of zero compares at the end.. */ + if (!a->cnt) + return !b->cnt ? 0 : 1; + if (!b->cnt) + return -1; + return a->hashval < b->hashval ? -1 : + a->hashval > b->hashval ? 1 : 0; +} + static struct spanhash_top *hash_chars(struct diff_filespec *one) { int i, n; @@ -158,6 +156,10 @@ static struct spanhash_top *hash_chars(struct diff_filespec *one) n = 0; accum1 = accum2 = 0; } + qsort(hash->data, + 1ul << hash->alloc_log2, + sizeof(hash->data[0]), + spanhash_cmp); return hash; } @@ -169,7 +171,7 @@ int diffcore_count_changes(struct diff_filespec *src, unsigned long *src_copied, unsigned long *literal_added) { - int i, ssz; + struct spanhash *s, *d; struct spanhash_top *src_count, *dst_count; unsigned long sc, la; @@ -190,22 +192,26 @@ int diffcore_count_changes(struct diff_filespec *src, } sc = la = 0; - ssz = 1 << src_count->alloc_log2; - for (i = 0; i < ssz; i++) { - struct spanhash *s = &(src_count->data[i]); - struct spanhash *d; + s = src_count->data; + d = dst_count->data; + for (;;) { unsigned dst_cnt, src_cnt; if (!s->cnt) - continue; + break; /* we checked all in src */ + while (d->cnt) { + if (d->hashval >= s->hashval) + break; + d++; + } src_cnt = s->cnt; - d = spanhash_find(dst_count, s->hashval); - dst_cnt = d ? d->cnt : 0; + dst_cnt = d->hashval == s->hashval ? d->cnt : 0; if (src_cnt < dst_cnt) { la += dst_cnt - src_cnt; sc += src_cnt; } else sc += dst_cnt; + s++; } if (!src_count_p) diff --git a/diffcore-rename.c b/diffcore-rename.c index 142e5376dd..f9ebea5640 100644 --- a/diffcore-rename.c +++ b/diffcore-rename.c @@ -4,6 +4,7 @@ #include "cache.h" #include "diff.h" #include "diffcore.h" +#include "hash.h" /* Table of rename/copy destinations */ @@ -55,12 +56,10 @@ static struct diff_rename_dst *locate_rename_dst(struct diff_filespec *two, static struct diff_rename_src { struct diff_filespec *one; unsigned short score; /* to remember the break score */ - unsigned src_path_left : 1; } *rename_src; static int rename_src_nr, rename_src_alloc; static struct diff_rename_src *register_rename_src(struct diff_filespec *one, - int src_path_left, unsigned short score) { int first, last; @@ -92,33 +91,9 @@ static struct diff_rename_src *register_rename_src(struct diff_filespec *one, (rename_src_nr - first - 1) * sizeof(*rename_src)); rename_src[first].one = one; rename_src[first].score = score; - rename_src[first].src_path_left = src_path_left; return &(rename_src[first]); } -static int is_exact_match(struct diff_filespec *src, - struct diff_filespec *dst, - int contents_too) -{ - if (src->sha1_valid && dst->sha1_valid && - !hashcmp(src->sha1, dst->sha1)) - return 1; - if (!contents_too) - return 0; - if (diff_populate_filespec(src, 1) || diff_populate_filespec(dst, 1)) - return 0; - if (src->size != dst->size) - return 0; - if (src->sha1_valid && dst->sha1_valid) - return !hashcmp(src->sha1, dst->sha1); - if (diff_populate_filespec(src, 0) || diff_populate_filespec(dst, 0)) - return 0; - if (src->size == dst->size && - !memcmp(src->data, dst->data, src->size)) - return 1; - return 0; -} - static int basename_same(struct diff_filespec *src, struct diff_filespec *dst) { int src_len = strlen(src->path), dst_len = strlen(dst->path); @@ -169,6 +144,20 @@ static int estimate_similarity(struct diff_filespec *src, if (!S_ISREG(src->mode) || !S_ISREG(dst->mode)) return 0; + /* + * Need to check that source and destination sizes are + * filled in before comparing them. + * + * If we already have "cnt_data" filled in, we know it's + * all good (avoid checking the size for zero, as that + * is a possible size - we really should have a flag to + * say whether the size is valid or not!) + */ + if (!src->cnt_data && diff_populate_filespec(src, 0)) + return 0; + if (!dst->cnt_data && diff_populate_filespec(dst, 0)) + return 0; + max_size = ((src->size > dst->size) ? src->size : dst->size); base_size = ((src->size < dst->size) ? src->size : dst->size); delta_size = max_size - base_size; @@ -184,11 +173,6 @@ static int estimate_similarity(struct diff_filespec *src, if (base_size * (MAX_SCORE-minimum_score) < delta_size * MAX_SCORE) return 0; - if ((!src->cnt_data && diff_populate_filespec(src, 0)) - || (!dst->cnt_data && diff_populate_filespec(dst, 0))) - return 0; /* error but caught downstream */ - - delta_limit = (unsigned long) (base_size * (MAX_SCORE-minimum_score) / MAX_SCORE); if (diffcore_count_changes(src, dst, @@ -209,27 +193,25 @@ static int estimate_similarity(struct diff_filespec *src, static void record_rename_pair(int dst_index, int src_index, int score) { - struct diff_filespec *one, *two, *src, *dst; + struct diff_filespec *src, *dst; struct diff_filepair *dp; if (rename_dst[dst_index].pair) die("internal error: dst already matched."); src = rename_src[src_index].one; - one = alloc_filespec(src->path); - fill_filespec(one, src->sha1, src->mode); + src->rename_used++; + src->count++; dst = rename_dst[dst_index].two; - two = alloc_filespec(dst->path); - fill_filespec(two, dst->sha1, dst->mode); + dst->count++; - dp = diff_queue(NULL, one, two); + dp = diff_queue(NULL, src, dst); dp->renamed_pair = 1; if (!strcmp(src->path, dst->path)) dp->score = rename_src[src_index].score; else dp->score = score; - dp->source_stays = rename_src[src_index].src_path_left; rename_dst[dst_index].pair = dp; } @@ -247,19 +229,155 @@ static int score_compare(const void *a_, const void *b_) return b->score - a->score; } -static int compute_stays(struct diff_queue_struct *q, - struct diff_filespec *one) +struct file_similarity { + int src_dst, index; + struct diff_filespec *filespec; + struct file_similarity *next; +}; + +static int find_identical_files(struct file_similarity *src, + struct file_similarity *dst) { - int i; - for (i = 0; i < q->nr; i++) { - struct diff_filepair *p = q->queue[i]; - if (strcmp(one->path, p->two->path)) - continue; - if (DIFF_PAIR_RENAME(p)) { - return 0; /* something else is renamed into this */ + int renames = 0; + + /* + * Walk over all the destinations ... + */ + do { + struct diff_filespec *one = dst->filespec; + struct file_similarity *p, *best; + int i = 100; + + /* + * .. to find the best source match + */ + best = NULL; + for (p = src; p; p = p->next) { + struct diff_filespec *two = p->filespec; + + /* False hash collission? */ + if (hashcmp(one->sha1, two->sha1)) + continue; + /* Non-regular files? If so, the modes must match! */ + if (!S_ISREG(one->mode) || !S_ISREG(two->mode)) { + if (one->mode != two->mode) + continue; + } + best = p; + if (basename_same(one, two)) + break; + + /* Too many identical alternatives? Pick one */ + if (!--i) + break; + } + if (best) { + record_rename_pair(dst->index, best->index, MAX_SCORE); + renames++; } + } while ((dst = dst->next) != NULL); + return renames; +} + +static void free_similarity_list(struct file_similarity *p) +{ + while (p) { + struct file_similarity *entry = p; + p = p->next; + free(entry); } - return 1; +} + +static int find_same_files(void *ptr) +{ + int ret; + struct file_similarity *p = ptr; + struct file_similarity *src = NULL, *dst = NULL; + + /* Split the hash list up into sources and destinations */ + do { + struct file_similarity *entry = p; + p = p->next; + if (entry->src_dst < 0) { + entry->next = src; + src = entry; + } else { + entry->next = dst; + dst = entry; + } + } while (p); + + /* + * If we have both sources *and* destinations, see if + * we can match them up + */ + ret = (src && dst) ? find_identical_files(src, dst) : 0; + + /* Free the hashes and return the number of renames found */ + free_similarity_list(src); + free_similarity_list(dst); + return ret; +} + +static unsigned int hash_filespec(struct diff_filespec *filespec) +{ + unsigned int hash; + if (!filespec->sha1_valid) { + if (diff_populate_filespec(filespec, 0)) + return 0; + hash_sha1_file(filespec->data, filespec->size, "blob", filespec->sha1); + } + memcpy(&hash, filespec->sha1, sizeof(hash)); + return hash; +} + +static void insert_file_table(struct hash_table *table, int src_dst, int index, struct diff_filespec *filespec) +{ + void **pos; + unsigned int hash; + struct file_similarity *entry = xmalloc(sizeof(*entry)); + + entry->src_dst = src_dst; + entry->index = index; + entry->filespec = filespec; + entry->next = NULL; + + hash = hash_filespec(filespec); + pos = insert_hash(hash, entry, table); + + /* We already had an entry there? */ + if (pos) { + entry->next = *pos; + *pos = entry; + } +} + +/* + * Find exact renames first. + * + * The first round matches up the up-to-date entries, + * and then during the second round we try to match + * cache-dirty entries as well. + */ +static int find_exact_renames(void) +{ + int i; + struct hash_table file_table; + + init_hash(&file_table); + for (i = 0; i < rename_src_nr; i++) + insert_file_table(&file_table, -1, i, rename_src[i].one); + + for (i = 0; i < rename_dst_nr; i++) + insert_file_table(&file_table, 1, i, rename_dst[i].two); + + /* Find the renames */ + i = for_each_hash(&file_table, find_same_files); + + /* .. and free the hash data structure */ + free_hash(&file_table); + + return i; } void diffcore_rename(struct diff_options *options) @@ -270,12 +388,11 @@ void diffcore_rename(struct diff_options *options) struct diff_queue_struct *q = &diff_queued_diff; struct diff_queue_struct outq; struct diff_score *mx; - int i, j, rename_count, contents_too; + int i, j, rename_count; int num_create, num_src, dst_cnt; if (!minimum_score) minimum_score = DEFAULT_RENAME_SCORE; - rename_count = 0; for (i = 0; i < q->nr; i++) { struct diff_filepair *p = q->queue[i]; @@ -289,81 +406,66 @@ void diffcore_rename(struct diff_options *options) locate_rename_dst(p->two, 1); } else if (!DIFF_FILE_VALID(p->two)) { - /* If the source is a broken "delete", and + /* + * If the source is a broken "delete", and * they did not really want to get broken, * that means the source actually stays. + * So we increment the "rename_used" score + * by one, to indicate ourselves as a user + */ + if (p->broken_pair && !p->score) + p->one->rename_used++; + register_rename_src(p->one, p->score); + } + else if (detect_rename == DIFF_DETECT_COPY) { + /* + * Increment the "rename_used" score by + * one, to indicate ourselves as a user. */ - int stays = (p->broken_pair && !p->score); - register_rename_src(p->one, stays, p->score); + p->one->rename_used++; + register_rename_src(p->one, p->score); } - else if (detect_rename == DIFF_DETECT_COPY) - register_rename_src(p->one, 1, p->score); } if (rename_dst_nr == 0 || rename_src_nr == 0) goto cleanup; /* nothing to do */ /* + * We really want to cull the candidates list early + * with cheap tests in order to avoid doing deltas. + */ + rename_count = find_exact_renames(); + + /* Did we only want exact renames? */ + if (minimum_score == MAX_SCORE) + goto cleanup; + + /* + * Calculate how many renames are left (but all the source + * files still remain as options for rename/copies!) + */ + num_create = (rename_dst_nr - rename_count); + num_src = rename_src_nr; + + /* All done? */ + if (!num_create) + goto cleanup; + + /* * This basically does a test for the rename matrix not * growing larger than a "rename_limit" square matrix, ie: * - * rename_dst_nr * rename_src_nr > rename_limit * rename_limit + * num_create * num_src > rename_limit * rename_limit * * but handles the potential overflow case specially (and we * assume at least 32-bit integers) */ if (rename_limit <= 0 || rename_limit > 32767) rename_limit = 32767; - if (rename_dst_nr > rename_limit && rename_src_nr > rename_limit) + if (num_create > rename_limit && num_src > rename_limit) goto cleanup; - if (rename_dst_nr * rename_src_nr > rename_limit * rename_limit) + if (num_create * num_src > rename_limit * rename_limit) goto cleanup; - /* We really want to cull the candidates list early - * with cheap tests in order to avoid doing deltas. - * The first round matches up the up-to-date entries, - * and then during the second round we try to match - * cache-dirty entries as well. - */ - for (contents_too = 0; contents_too < 2; contents_too++) { - for (i = 0; i < rename_dst_nr; i++) { - struct diff_filespec *two = rename_dst[i].two; - if (rename_dst[i].pair) - continue; /* dealt with an earlier round */ - for (j = 0; j < rename_src_nr; j++) { - int k; - struct diff_filespec *one = rename_src[j].one; - if (!is_exact_match(one, two, contents_too)) - continue; - - /* see if there is a basename match, too */ - for (k = j; k < rename_src_nr; k++) { - one = rename_src[k].one; - if (basename_same(one, two) && - is_exact_match(one, two, - contents_too)) { - j = k; - break; - } - } - - record_rename_pair(i, j, (int)MAX_SCORE); - rename_count++; - break; /* we are done with this entry */ - } - } - } - - /* Have we run out the created file pool? If so we can avoid - * doing the delta matrix altogether. - */ - if (rename_count == rename_dst_nr) - goto cleanup; - - if (minimum_score == MAX_SCORE) - goto cleanup; - - num_create = (rename_dst_nr - rename_count); - num_src = rename_src_nr; mx = xmalloc(sizeof(*mx) * num_create * num_src); for (dst_cnt = i = 0; i < rename_dst_nr; i++) { int base = dst_cnt * num_src; @@ -452,16 +554,7 @@ void diffcore_rename(struct diff_options *options) pair_to_free = p; } else { - for (j = 0; j < rename_dst_nr; j++) { - if (!rename_dst[j].pair) - continue; - if (strcmp(rename_dst[j].pair-> - one->path, - p->one->path)) - continue; - break; - } - if (j < rename_dst_nr) + if (p->one->rename_used) /* this path remains */ pair_to_free = p; } @@ -487,27 +580,8 @@ void diffcore_rename(struct diff_options *options) *q = outq; diff_debug_queue("done collapsing", q); - /* We need to see which rename source really stays here; - * earlier we only checked if the path is left in the result, - * but even if a path remains in the result, if that is coming - * from copying something else on top of it, then the original - * source is lost and does not stay. - */ - for (i = 0; i < q->nr; i++) { - struct diff_filepair *p = q->queue[i]; - if (DIFF_PAIR_RENAME(p) && p->source_stays) { - /* If one appears as the target of a rename-copy, - * then mark p->source_stays = 0; otherwise - * leave it as is. - */ - p->source_stays = compute_stays(q, p->one); - } - } - - for (i = 0; i < rename_dst_nr; i++) { - diff_free_filespec_data(rename_dst[i].two); - free(rename_dst[i].two); - } + for (i = 0; i < rename_dst_nr; i++) + free_filespec(rename_dst[i].two); free(rename_dst); rename_dst = NULL; diff --git a/diffcore.h b/diffcore.h index eb618b1ec0..cc96c20734 100644 --- a/diffcore.h +++ b/diffcore.h @@ -29,7 +29,9 @@ struct diff_filespec { void *cnt_data; const char *funcname_pattern_ident; unsigned long size; + int count; /* Reference count */ int xfrm_flags; /* for use by the xfrm */ + int rename_used; /* Count of rename users */ unsigned short mode; /* file mode */ unsigned sha1_valid : 1; /* if true, use sha1 and trust mode; * if false, use the name and read from @@ -43,6 +45,7 @@ struct diff_filespec { }; extern struct diff_filespec *alloc_filespec(const char *); +extern void free_filespec(struct diff_filespec *); extern void fill_filespec(struct diff_filespec *, const unsigned char *, unsigned short); @@ -56,7 +59,6 @@ struct diff_filepair { struct diff_filespec *two; unsigned short int score; char status; /* M C R N D U (see Documentation/diff-format.txt) */ - unsigned source_stays : 1; /* all of R/C are copies */ unsigned broken_pair : 1; unsigned renamed_pair : 1; unsigned is_unmerged : 1; @@ -118,14 +118,32 @@ int match_pathspec(const char **pathspec, const char *name, int namelen, int pre return retval; } +static int no_wildcard(const char *string) +{ + return string[strcspn(string, "*?[{")] == '\0'; +} + void add_exclude(const char *string, const char *base, int baselen, struct exclude_list *which) { struct exclude *x = xmalloc(sizeof (*x)); + x->to_exclude = 1; + if (*string == '!') { + x->to_exclude = 0; + string++; + } x->pattern = string; + x->patternlen = strlen(string); x->base = base; x->baselen = baselen; + x->flags = 0; + if (!strchr(string, '/')) + x->flags |= EXC_FLAG_NODIR; + if (no_wildcard(string)) + x->flags |= EXC_FLAG_NOWILDCARD; + if (*string == '*' && no_wildcard(string+1)) + x->flags |= EXC_FLAG_ENDSWITH; if (which->nr == which->alloc) { which->alloc = alloc_nr(which->alloc); which->excludes = xrealloc(which->excludes, @@ -209,7 +227,7 @@ void pop_exclude_per_directory(struct dir_struct *dir, int stk) * Return 1 for exclude, 0 for include and -1 for undecided. */ static int excluded_1(const char *pathname, - int pathlen, + int pathlen, const char *basename, struct exclude_list *el) { int i; @@ -218,19 +236,21 @@ static int excluded_1(const char *pathname, for (i = el->nr - 1; 0 <= i; i--) { struct exclude *x = el->excludes[i]; const char *exclude = x->pattern; - int to_exclude = 1; + int to_exclude = x->to_exclude; - if (*exclude == '!') { - to_exclude = 0; - exclude++; - } - - if (!strchr(exclude, '/')) { + if (x->flags & EXC_FLAG_NODIR) { /* match basename */ - const char *basename = strrchr(pathname, '/'); - basename = (basename) ? basename+1 : pathname; - if (fnmatch(exclude, basename, 0) == 0) - return to_exclude; + if (x->flags & EXC_FLAG_NOWILDCARD) { + if (!strcmp(exclude, basename)) + return to_exclude; + } else if (x->flags & EXC_FLAG_ENDSWITH) { + if (x->patternlen - 1 <= pathlen && + !strcmp(exclude + 1, pathname + pathlen - x->patternlen + 1)) + return to_exclude; + } else { + if (fnmatch(exclude, basename, 0) == 0) + return to_exclude; + } } else { /* match with FNM_PATHNAME: @@ -246,9 +266,14 @@ static int excluded_1(const char *pathname, strncmp(pathname, x->base, baselen)) continue; - if (fnmatch(exclude, pathname+baselen, - FNM_PATHNAME) == 0) - return to_exclude; + if (x->flags & EXC_FLAG_NOWILDCARD) { + if (!strcmp(exclude, pathname + baselen)) + return to_exclude; + } else { + if (fnmatch(exclude, pathname+baselen, + FNM_PATHNAME) == 0) + return to_exclude; + } } } } @@ -259,9 +284,11 @@ int excluded(struct dir_struct *dir, const char *pathname) { int pathlen = strlen(pathname); int st; + const char *basename = strrchr(pathname, '/'); + basename = (basename) ? basename+1 : pathname; for (st = EXC_CMDL; st <= EXC_FILE; st++) { - switch (excluded_1(pathname, pathlen, &dir->exclude_list[st])) { + switch (excluded_1(pathname, pathlen, basename, &dir->exclude_list[st])) { case 0: return 0; case 1: @@ -443,6 +470,24 @@ static int in_pathspec(const char *path, int len, const struct path_simplify *si return 0; } +static int get_dtype(struct dirent *de, const char *path) +{ + int dtype = DTYPE(de); + struct stat st; + + if (dtype != DT_UNKNOWN) + return dtype; + if (lstat(path, &st)) + return dtype; + if (S_ISREG(st.st_mode)) + return DT_REG; + if (S_ISDIR(st.st_mode)) + return DT_DIR; + if (S_ISLNK(st.st_mode)) + return DT_LNK; + return dtype; +} + /* * Read a directory tree. We currently ignore anything but * directories, regular files and symlinks. That's because git @@ -466,7 +511,7 @@ static int read_directory_recursive(struct dir_struct *dir, const char *path, co exclude_stk = push_exclude_per_directory(dir, base, baselen); while ((de = readdir(fdir)) != NULL) { - int len; + int len, dtype; int exclude; if ((de->d_name[0] == '.') && @@ -486,24 +531,30 @@ static int read_directory_recursive(struct dir_struct *dir, const char *path, co if (exclude && dir->collect_ignored && in_pathspec(fullname, baselen + len, simplify)) dir_add_ignored(dir, fullname, baselen + len); - if (exclude != dir->show_ignored) { - if (!dir->show_ignored || DTYPE(de) != DT_DIR) { + + /* + * Excluded? If we don't explicitly want to show + * ignored files, ignore it + */ + if (exclude && !dir->show_ignored) + continue; + + dtype = get_dtype(de, fullname); + + /* + * Do we want to see just the ignored files? + * We still need to recurse into directories, + * even if we don't ignore them, since the + * directory may contain files that we do.. + */ + if (!exclude && dir->show_ignored) { + if (dtype != DT_DIR) continue; - } } - switch (DTYPE(de)) { - struct stat st; + switch (dtype) { default: continue; - case DT_UNKNOWN: - if (lstat(fullname, &st)) - continue; - if (S_ISREG(st.st_mode) || S_ISLNK(st.st_mode)) - break; - if (!S_ISDIR(st.st_mode)) - continue; - /* fallthrough */ case DT_DIR: memcpy(fullname + baselen + len, "/", 2); len++; @@ -685,3 +736,44 @@ int is_inside_dir(const char *dir) char buffer[PATH_MAX]; return get_relative_cwd(buffer, sizeof(buffer), dir) != NULL; } + +int remove_dir_recursively(struct strbuf *path, int only_empty) +{ + DIR *dir = opendir(path->buf); + struct dirent *e; + int ret = 0, original_len = path->len, len; + + if (!dir) + return -1; + if (path->buf[original_len - 1] != '/') + strbuf_addch(path, '/'); + + len = path->len; + while ((e = readdir(dir)) != NULL) { + struct stat st; + if ((e->d_name[0] == '.') && + ((e->d_name[1] == 0) || + ((e->d_name[1] == '.') && e->d_name[2] == 0))) + continue; /* "." and ".." */ + + strbuf_setlen(path, len); + strbuf_addstr(path, e->d_name); + if (lstat(path->buf, &st)) + ; /* fall thru */ + else if (S_ISDIR(st.st_mode)) { + if (!remove_dir_recursively(path, only_empty)) + continue; /* happy */ + } else if (!only_empty && !unlink(path->buf)) + continue; /* happy, too */ + + /* path too long, stat fails, or non-directory still exists */ + ret = -1; + break; + } + closedir(dir); + + strbuf_setlen(path, original_len); + if (!ret) + ret = rmdir(path->buf); + return ret; +} @@ -17,13 +17,20 @@ struct dir_entry { char name[FLEX_ARRAY]; /* more */ }; +#define EXC_FLAG_NODIR 1 +#define EXC_FLAG_NOWILDCARD 2 +#define EXC_FLAG_ENDSWITH 4 + struct exclude_list { int nr; int alloc; struct exclude { const char *pattern; + int patternlen; const char *base; int baselen; + int to_exclude; + int flags; } **excludes; }; @@ -64,4 +71,6 @@ extern struct dir_entry *dir_add_name(struct dir_struct *dir, const char *pathna extern char *get_relative_cwd(char *buffer, int size, const char *dir); extern int is_inside_dir(const char *dir); +extern int remove_dir_recursively(struct strbuf *path, int only_empty); + #endif @@ -119,8 +119,10 @@ static int write_entry(struct cache_entry *ce, char *path, const struct checkout */ strbuf_init(&buf, 0); if (convert_to_working_tree(ce->name, new, size, &buf)) { + size_t newsize = 0; free(new); - new = strbuf_detach(&buf, &size); + new = strbuf_detach(&buf, &newsize); + size = newsize; } if (to_tempfile) { diff --git a/exec_cmd.c b/exec_cmd.c index 9b74ed2f42..2d0a758512 100644 --- a/exec_cmd.c +++ b/exec_cmd.c @@ -5,11 +5,11 @@ extern char **environ; static const char *builtin_exec_path = GIT_EXEC_PATH; -static const char *current_exec_path; +static const char *argv_exec_path; -void git_set_exec_path(const char *exec_path) +void git_set_argv_exec_path(const char *exec_path) { - current_exec_path = exec_path; + argv_exec_path = exec_path; } @@ -18,8 +18,8 @@ const char *git_exec_path(void) { const char *env; - if (current_exec_path) - return current_exec_path; + if (argv_exec_path) + return argv_exec_path; env = getenv(EXEC_PATH_ENVIRONMENT); if (env && *env) { @@ -29,85 +29,69 @@ const char *git_exec_path(void) return builtin_exec_path; } +static void add_path(struct strbuf *out, const char *path) +{ + if (path && *path) { + if (is_absolute_path(path)) + strbuf_addstr(out, path); + else + strbuf_addstr(out, make_absolute_path(path)); + + strbuf_addch(out, ':'); + } +} + +void setup_path(const char *cmd_path) +{ + const char *old_path = getenv("PATH"); + struct strbuf new_path; + + strbuf_init(&new_path, 0); + + add_path(&new_path, argv_exec_path); + add_path(&new_path, getenv(EXEC_PATH_ENVIRONMENT)); + add_path(&new_path, builtin_exec_path); + add_path(&new_path, cmd_path); + + if (old_path) + strbuf_addstr(&new_path, old_path); + else + strbuf_addstr(&new_path, "/usr/local/bin:/usr/bin:/bin"); + + setenv("PATH", new_path.buf, 1); + + strbuf_release(&new_path); +} int execv_git_cmd(const char **argv) { - char git_command[PATH_MAX + 1]; - int i; - const char *paths[] = { current_exec_path, - getenv(EXEC_PATH_ENVIRONMENT), - builtin_exec_path }; - - for (i = 0; i < ARRAY_SIZE(paths); ++i) { - size_t len; - int rc; - const char *exec_dir = paths[i]; - const char *tmp; - - if (!exec_dir || !*exec_dir) continue; - - if (*exec_dir != '/') { - if (!getcwd(git_command, sizeof(git_command))) { - fprintf(stderr, "git: cannot determine " - "current directory: %s\n", - strerror(errno)); - break; - } - len = strlen(git_command); - - /* Trivial cleanup */ - while (!prefixcmp(exec_dir, "./")) { - exec_dir += 2; - while (*exec_dir == '/') - exec_dir++; - } - - rc = snprintf(git_command + len, - sizeof(git_command) - len, "/%s", - exec_dir); - if (rc < 0 || rc >= sizeof(git_command) - len) { - fprintf(stderr, "git: command name given " - "is too long.\n"); - break; - } - } else { - if (strlen(exec_dir) + 1 > sizeof(git_command)) { - fprintf(stderr, "git: command name given " - "is too long.\n"); - break; - } - strcpy(git_command, exec_dir); - } - - len = strlen(git_command); - rc = snprintf(git_command + len, sizeof(git_command) - len, - "/git-%s", argv[0]); - if (rc < 0 || rc >= sizeof(git_command) - len) { - fprintf(stderr, - "git: command name given is too long.\n"); - break; - } + struct strbuf cmd; + const char *tmp; - /* argv[0] must be the git command, but the argv array - * belongs to the caller, and my be reused in - * subsequent loop iterations. Save argv[0] and - * restore it on error. - */ + strbuf_init(&cmd, 0); + strbuf_addf(&cmd, "git-%s", argv[0]); - tmp = argv[0]; - argv[0] = git_command; + /* + * argv[0] must be the git command, but the argv array + * belongs to the caller, and may be reused in + * subsequent loop iterations. Save argv[0] and + * restore it on error. + */ + tmp = argv[0]; + argv[0] = cmd.buf; - trace_argv_printf(argv, -1, "trace: exec:"); + trace_argv_printf(argv, -1, "trace: exec:"); - /* execve() can only ever return if it fails */ - execve(git_command, (char **)argv, environ); + /* execvp() can only ever return if it fails */ + execvp(cmd.buf, (char **)argv); - trace_printf("trace: exec failed: %s\n", strerror(errno)); + trace_printf("trace: exec failed: %s\n", strerror(errno)); - argv[0] = tmp; - } - return -1; + argv[0] = tmp; + strbuf_release(&cmd); + + return -1; } diff --git a/exec_cmd.h b/exec_cmd.h index 849a8395a0..a892355c82 100644 --- a/exec_cmd.h +++ b/exec_cmd.h @@ -1,8 +1,9 @@ #ifndef GIT_EXEC_CMD_H #define GIT_EXEC_CMD_H -extern void git_set_exec_path(const char *exec_path); +extern void git_set_argv_exec_path(const char *exec_path); extern const char* git_exec_path(void); +extern void setup_path(const char *); extern int execv_git_cmd(const char **argv); /* NULL terminated */ extern int execl_git_cmd(const char *cmd, ...); diff --git a/fast-import.c b/fast-import.c index e9c80be4cd..f93d7d6c9b 100644 --- a/fast-import.c +++ b/fast-import.c @@ -1616,6 +1616,7 @@ static void cmd_data(struct strbuf *sb) char *term = xstrdup(command_buf.buf + 5 + 2); size_t term_len = command_buf.len - 5 - 2; + strbuf_detach(&command_buf, NULL); for (;;) { if (strbuf_getline(&command_buf, stdin, '\n') == EOF) die("EOF in data (terminator '%s' not found)", term); @@ -1817,7 +1818,7 @@ static void file_change_m(struct branch *b) } else if (oe) { if (oe->type != OBJ_BLOB) die("Not a blob (actually a %s): %s", - command_buf.buf, typename(oe->type)); + typename(oe->type), command_buf.buf); } else { enum object_type type = sha1_object_info(sha1, NULL); if (type < 0) diff --git a/fetch-pack.h b/fetch-pack.h new file mode 100644 index 0000000000..a7888ea302 --- /dev/null +++ b/fetch-pack.h @@ -0,0 +1,24 @@ +#ifndef FETCH_PACK_H +#define FETCH_PACK_H + +struct fetch_pack_args +{ + const char *uploadpack; + int unpacklimit; + int depth; + unsigned quiet:1, + keep_pack:1, + lock_pack:1, + use_thin_pack:1, + fetch_all:1, + verbose:1, + no_progress:1; +}; + +struct ref *fetch_pack(struct fetch_pack_args *args, + const char *dest, + int nr_heads, + char **heads, + char **pack_lockfile); + +#endif diff --git a/fetch.h b/fetch.h deleted file mode 100644 index be48c6f190..0000000000 --- a/fetch.h +++ /dev/null @@ -1,54 +0,0 @@ -#ifndef PULL_H -#define PULL_H - -/* - * Fetch object given SHA1 from the remote, and store it locally under - * GIT_OBJECT_DIRECTORY. Return 0 on success, -1 on failure. To be - * provided by the particular implementation. - */ -extern int fetch(unsigned char *sha1); - -/* - * Fetch the specified object and store it locally; fetch() will be - * called later to determine success. To be provided by the particular - * implementation. - */ -extern void prefetch(unsigned char *sha1); - -/* - * Fetch ref (relative to $GIT_DIR/refs) from the remote, and store - * the 20-byte SHA1 in sha1. Return 0 on success, -1 on failure. To - * be provided by the particular implementation. - */ -extern int fetch_ref(char *ref, unsigned char *sha1); - -/* Set to fetch the target tree. */ -extern int get_tree; - -/* Set to fetch the commit history. */ -extern int get_history; - -/* Set to fetch the trees in the commit history. */ -extern int get_all; - -/* Set to be verbose */ -extern int get_verbosely; - -/* Set to check on all reachable objects. */ -extern int get_recover; - -/* Report what we got under get_verbosely */ -extern void pull_say(const char *, const char *); - -/* Load pull targets from stdin */ -extern int pull_targets_stdin(char ***target, const char ***write_ref); - -/* Free up loaded targets */ -extern void pull_targets_free(int targets, char **target, const char **write_ref); - -/* If write_ref is set, the ref filename to write the target value to. */ -/* If write_ref_log_details is set, additional text will appear in the ref log. */ -extern int pull(int targets, char **target, const char **write_ref, - const char *write_ref_log_details); - -#endif /* PULL_H */ @@ -394,9 +394,7 @@ do stop_here $this fi - echo printf 'Applying %s\n' "$SUBJECT" - echo case "$resolved" in '') @@ -452,10 +450,8 @@ do fi tree=$(git write-tree) && - echo Wrote tree $tree && parent=$(git rev-parse --verify HEAD) && commit=$(git commit-tree $tree -p $parent <"$dotest/final-commit") && - echo Committed: $commit && git update-ref -m "$GIT_REFLOG_ACTION: $SUBJECT" HEAD $commit $parent || stop_here $this diff --git a/git-bisect.sh b/git-bisect.sh index 388887a556..b74f44df60 100755 --- a/git-bisect.sh +++ b/git-bisect.sh @@ -1,12 +1,14 @@ #!/bin/sh -USAGE='[start|bad|good|next|reset|visualize|replay|log|run]' +USAGE='[start|bad|good|skip|next|reset|visualize|replay|log|run]' LONG_USAGE='git bisect start [<bad> [<good>...]] [--] [<pathspec>...] reset bisect state and start bisection. git bisect bad [<rev>] mark <rev> a known-bad revision. git bisect good [<rev>...] mark <rev>... known-good revisions. +git bisect skip [<rev>...] + mark <rev>... untestable revisions. git bisect next find next bisection to test and check it out. git bisect reset [<branch>] @@ -64,7 +66,7 @@ bisect_start() { branch=`cat "$GIT_DIR/head-name"` else branch=master - fi + fi git checkout $branch || exit ;; refs/heads/*) @@ -95,75 +97,74 @@ bisect_start() { arg="$1" case "$arg" in --) - shift + shift break ;; *) - rev=$(git rev-parse --verify "$arg^{commit}" 2>/dev/null) || { + rev=$(git rev-parse --verify "$arg^{commit}" 2>/dev/null) || { test $has_double_dash -eq 1 && die "'$arg' does not appear to be a valid revision" break } - if [ $bad_seen -eq 0 ]; then - bad_seen=1 - bisect_write_bad "$rev" - else - bisect_write_good "$rev" - fi - shift + case $bad_seen in + 0) state='bad' ; bad_seen=1 ;; + *) state='good' ;; + esac + bisect_write "$state" "$rev" 'nolog' + shift ;; esac - done + done sq "$@" >"$GIT_DIR/BISECT_NAMES" echo "git-bisect start$orig_args" >>"$GIT_DIR/BISECT_LOG" bisect_auto_next } -bisect_bad() { - bisect_autostart - case "$#" in - 0) - rev=$(git rev-parse --verify HEAD) ;; - 1) - rev=$(git rev-parse --verify "$1^{commit}") ;; - *) - usage ;; - esac || exit - bisect_write_bad "$rev" - echo "git-bisect bad $rev" >>"$GIT_DIR/BISECT_LOG" - bisect_auto_next -} - -bisect_write_bad() { - rev="$1" - echo "$rev" >"$GIT_DIR/refs/bisect/bad" - echo "# bad: "$(git show-branch $rev) >>"$GIT_DIR/BISECT_LOG" +bisect_write() { + state="$1" + rev="$2" + nolog="$3" + case "$state" in + bad) tag="$state" ;; + good|skip) tag="$state"-"$rev" ;; + *) die "Bad bisect_write argument: $state" ;; + esac + echo "$rev" >"$GIT_DIR/refs/bisect/$tag" + echo "# $state: "$(git show-branch $rev) >>"$GIT_DIR/BISECT_LOG" + test -z "$nolog" && echo "git-bisect $state $rev" >>"$GIT_DIR/BISECT_LOG" } -bisect_good() { +bisect_state() { bisect_autostart - case "$#" in - 0) revs=$(git rev-parse --verify HEAD) || exit ;; - *) revs=$(git rev-parse --revs-only --no-flags "$@") && - test '' != "$revs" || die "Bad rev input: $@" ;; + state=$1 + case "$#,$state" in + 0,*) + die "Please call 'bisect_state' with at least one argument." ;; + 1,bad|1,good|1,skip) + rev=$(git rev-parse --verify HEAD) || + die "Bad rev input: HEAD" + bisect_write "$state" "$rev" ;; + 2,bad) + rev=$(git rev-parse --verify "$2^{commit}") || + die "Bad rev input: $2" + bisect_write "$state" "$rev" ;; + *,good|*,skip) + shift + revs=$(git rev-parse --revs-only --no-flags "$@") && + test '' != "$revs" || die "Bad rev input: $@" + for rev in $revs + do + rev=$(git rev-parse --verify "$rev^{commit}") || + die "Bad rev commit: $rev^{commit}" + bisect_write "$state" "$rev" + done ;; + *) + usage ;; esac - for rev in $revs - do - rev=$(git rev-parse --verify "$rev^{commit}") || exit - bisect_write_good "$rev" - echo "git-bisect good $rev" >>"$GIT_DIR/BISECT_LOG" - - done bisect_auto_next } -bisect_write_good() { - rev="$1" - echo "$rev" >"$GIT_DIR/refs/bisect/good-$rev" - echo "# good: "$(git show-branch $rev) >>"$GIT_DIR/BISECT_LOG" -} - bisect_next_check() { missing_good= missing_bad= git show-ref -q --verify refs/bisect/bad || missing_bad=t @@ -206,17 +207,97 @@ bisect_auto_next() { bisect_next_check && bisect_next || : } +filter_skipped() { + _eval="$1" + _skip="$2" + + if [ -z "$_skip" ]; then + eval $_eval + return + fi + + # Let's parse the output of: + # "git rev-list --bisect-vars --bisect-all ..." + eval $_eval | while read hash line + do + case "$VARS,$FOUND,$TRIED,$hash" in + # We display some vars. + 1,*,*,*) echo "$hash $line" ;; + + # Split line. + ,*,*,---*) ;; + + # We had nothing to search. + ,,,bisect_rev*) + echo "bisect_rev=" + VARS=1 + ;; + + # We did not find a good bisect rev. + # This should happen only if the "bad" + # commit is also a "skip" commit. + ,,*,bisect_rev*) + echo "bisect_rev=$TRIED" + VARS=1 + ;; + + # We are searching. + ,,*,*) + TRIED="${TRIED:+$TRIED|}$hash" + case "$_skip" in + *$hash*) ;; + *) + echo "bisect_rev=$hash" + echo "bisect_tried=\"$TRIED\"" + FOUND=1 + ;; + esac + ;; + + # We have already found a rev to be tested. + ,1,*,bisect_rev*) VARS=1 ;; + ,1,*,*) ;; + + # ??? + *) die "filter_skipped error " \ + "VARS: '$VARS' " \ + "FOUND: '$FOUND' " \ + "TRIED: '$TRIED' " \ + "hash: '$hash' " \ + "line: '$line'" + ;; + esac + done +} + +exit_if_skipped_commits () { + _tried=$1 + if expr "$_tried" : ".*[|].*" > /dev/null ; then + echo "There are only 'skip'ped commit left to test." + echo "The first bad commit could be any of:" + echo "$_tried" | sed -e 's/[|]/\n/g' + echo "We cannot bisect more!" + exit 2 + fi +} + bisect_next() { - case "$#" in 0) ;; *) usage ;; esac + case "$#" in 0) ;; *) usage ;; esac bisect_autostart bisect_next_check good + skip=$(git for-each-ref --format='%(objectname)' \ + "refs/bisect/skip-*" | tr '[\012]' ' ') || exit + + BISECT_OPT='' + test -n "$skip" && BISECT_OPT='--bisect-all' + bad=$(git rev-parse --verify refs/bisect/bad) && good=$(git for-each-ref --format='^%(objectname)' \ "refs/bisect/good-*" | tr '[\012]' ' ') && - eval="git rev-list --bisect-vars $good $bad --" && + eval="git rev-list --bisect-vars $BISECT_OPT $good $bad --" && eval="$eval $(cat "$GIT_DIR/BISECT_NAMES")" && - eval=$(eval "$eval") && + eval=$(filter_skipped "$eval" "$skip") && eval "$eval" || exit if [ -z "$bisect_rev" ]; then @@ -224,11 +305,16 @@ bisect_next() { exit 1 fi if [ "$bisect_rev" = "$bad" ]; then + exit_if_skipped_commits "$bisect_tried" echo "$bisect_rev is first bad commit" git diff-tree --pretty $bisect_rev exit 0 fi + # We should exit here only if the "bad" + # commit is also a "skip" commit (see above). + exit_if_skipped_commits "$bisect_rev" + echo "Bisecting: $bisect_nr revisions left to test after this" echo "$bisect_rev" >"$GIT_DIR/refs/heads/new-bisect" git checkout -q new-bisect || exit @@ -250,12 +336,10 @@ bisect_reset() { else branch=master fi ;; - 1) git show-ref --verify --quiet -- "refs/heads/$1" || { - echo >&2 "$1 does not seem to be a valid branch" - exit 1 - } + 1) git show-ref --verify --quiet -- "refs/heads/$1" || + die "$1 does not seem to be a valid branch" branch="$1" ;; - *) + *) usage ;; esac if git checkout "$branch"; then @@ -273,10 +357,7 @@ bisect_clean_state() { } bisect_replay () { - test -r "$1" || { - echo >&2 "cannot read $1 for replaying" - exit 1 - } + test -r "$1" || die "cannot read $1 for replaying" bisect_reset while read bisect command rev do @@ -284,21 +365,11 @@ bisect_replay () { case "$command" in start) cmd="bisect_start $rev" - eval "$cmd" - ;; - good) - echo "$rev" >"$GIT_DIR/refs/bisect/good-$rev" - echo "# good: "$(git show-branch $rev) >>"$GIT_DIR/BISECT_LOG" - echo "git-bisect good $rev" >>"$GIT_DIR/BISECT_LOG" - ;; - bad) - echo "$rev" >"$GIT_DIR/refs/bisect/bad" - echo "# bad: "$(git show-branch $rev) >>"$GIT_DIR/BISECT_LOG" - echo "git-bisect bad $rev" >>"$GIT_DIR/BISECT_LOG" - ;; + eval "$cmd" ;; + good|bad|skip) + bisect_write "$command" "$rev" ;; *) - echo >&2 "?? what are you talking about?" - exit 1 ;; + die "?? what are you talking about?" ;; esac done <"$1" bisect_auto_next @@ -320,24 +391,31 @@ bisect_run () { exit $res fi - # Use "bisect_good" or "bisect_bad" - # depending on run success or failure. - if [ $res -gt 0 ]; then - next_bisect='bisect_bad' + # Find current state depending on run success or failure. + # A special exit code of 125 means cannot test. + if [ $res -eq 125 ]; then + state='skip' + elif [ $res -gt 0 ]; then + state='bad' else - next_bisect='bisect_good' + state='good' fi - # We have to use a subshell because bisect_good or - # bisect_bad functions can exit. - ( $next_bisect > "$GIT_DIR/BISECT_RUN" ) + # We have to use a subshell because "bisect_state" can exit. + ( bisect_state $state > "$GIT_DIR/BISECT_RUN" ) res=$? cat "$GIT_DIR/BISECT_RUN" + if grep "first bad commit could be any of" "$GIT_DIR/BISECT_RUN" \ + > /dev/null; then + echo >&2 "bisect run cannot continue any more" + exit $res + fi + if [ $res -ne 0 ]; then echo >&2 "bisect run failed:" - echo >&2 "$next_bisect exited with error code $res" + echo >&2 "'bisect_state $state' exited with error code $res" exit $res fi @@ -359,10 +437,8 @@ case "$#" in case "$cmd" in start) bisect_start "$@" ;; - bad) - bisect_bad "$@" ;; - good) - bisect_good "$@" ;; + bad|good|skip) + bisect_state "$cmd" "$@" ;; next) # Not sure we want "next" at the UI level anymore. bisect_next "$@" ;; diff --git a/git-clone.sh b/git-clone.sh index 5e582fe247..0ea3c24f59 100755 --- a/git-clone.sh +++ b/git-clone.sh @@ -28,7 +28,8 @@ get_repo_base() { ) 2>/dev/null } -if [ -n "$GIT_SSL_NO_VERIFY" ]; then +if [ -n "$GIT_SSL_NO_VERIFY" -o \ + "`git config --bool http.sslVerify`" = false ]; then curl_extra_args="-k" fi diff --git a/git-compat-util.h b/git-compat-util.h index f23d934f66..474f1d1ffb 100644 --- a/git-compat-util.h +++ b/git-compat-util.h @@ -147,6 +147,11 @@ extern ssize_t git_pread(int fd, void *buf, size_t count, off_t offset); extern int gitsetenv(const char *, const char *, int); #endif +#ifdef NO_MKDTEMP +#define mkdtemp gitmkdtemp +extern char *gitmkdtemp(char *); +#endif + #ifdef NO_UNSETENV #define unsetenv gitunsetenv extern void gitunsetenv(const char *); diff --git a/git-cvsexportcommit.perl b/git-cvsexportcommit.perl index 7a955d4530..26844af439 100755 --- a/git-cvsexportcommit.perl +++ b/git-cvsexportcommit.perl @@ -82,6 +82,7 @@ foreach my $line (@commit) { } } +my $noparent = "0000000000000000000000000000000000000000"; if ($parent) { my $found; # double check that it's a valid parent @@ -95,8 +96,10 @@ if ($parent) { } else { # we don't have a parent from the cmdline... if (@parents == 1) { # it's safe to get it from the commit $parent = $parents[0]; - } else { # or perhaps not! - die "This commit has more than one parent -- please name the parent you want to use explicitly"; + } elsif (@parents == 0) { # there is no parent + $parent = $noparent; + } else { # cannot choose automatically from multiple parents + die "This commit has more than one parent -- please name the parent you want to use explicitly"; } } @@ -116,7 +119,11 @@ if ($opt_a) { } close MSG; -`git-diff-tree --binary -p $parent $commit >.cvsexportcommit.diff`;# || die "Cannot diff"; +if ($parent eq $noparent) { + `git-diff-tree --binary -p --root $commit >.cvsexportcommit.diff`;# || die "Cannot diff"; +} else { + `git-diff-tree --binary -p $parent $commit >.cvsexportcommit.diff`;# || die "Cannot diff"; +} ## apply non-binary changes @@ -219,6 +226,17 @@ print "Applying\n"; print "Patch applied successfully. Adding new files and directories to CVS\n"; my $dirtypatch = 0; + +# +# We have to add the directories in order otherwise we will have +# problems when we try and add the sub-directory of a directory we +# have not added yet. +# +# Luckily this is easy to deal with by sorting the directories and +# dealing with the shortest ones first. +# +@dirs = sort { length $a <=> length $b} @dirs; + foreach my $d (@dirs) { if (system(@cvs,'add',$d)) { $dirtypatch = 1; diff --git a/git-cvsserver.perl b/git-cvsserver.perl index 13dbd27a80..0d55fec04f 100755 --- a/git-cvsserver.perl +++ b/git-cvsserver.perl @@ -145,8 +145,10 @@ if ($state->{method} eq 'pserver') { } my $request = $1; $line = <STDIN>; chomp $line; - req_Root('root', $line) # reuse Root - or die "E Invalid root $line \n"; + unless (req_Root('root', $line)) { # reuse Root + print "E Invalid root $line \n"; + exit 1; + } $line = <STDIN>; chomp $line; unless ($line eq 'anonymous') { print "E Only anonymous user allowed via pserver\n"; diff --git a/git-gui/git-gui.sh b/git-gui/git-gui.sh index f789e91b66..9335a9761b 100755 --- a/git-gui/git-gui.sh +++ b/git-gui/git-gui.sh @@ -305,7 +305,7 @@ proc _which {what} { global env _search_exe _search_path if {$_search_path eq {}} { - if {[is_Cygwin]} { + if {[is_Cygwin] && [regexp {^(/|\.:)} $env(PATH)]} { set _search_path [split [exec cygpath \ --windows \ --path \ @@ -498,7 +498,11 @@ proc rmsel_tag {text} { set _git [_which git] if {$_git eq {}} { catch {wm withdraw .} - error_popup "Cannot find git in PATH." + tk_messageBox \ + -icon error \ + -type ok \ + -title [mc "git-gui: fatal error"] \ + -message [mc "Cannot find git in PATH."] exit 1 } @@ -534,6 +538,7 @@ regsub -- {-dirty$} $_git_version {} _git_version regsub {\.[0-9]+\.g[0-9a-f]+$} $_git_version {} _git_version regsub {\.rc[0-9]+$} $_git_version {} _git_version regsub {\.GIT$} $_git_version {} _git_version +regsub {\.[a-zA-Z]+\.[0-9]+$} $_git_version {} _git_version if {![regexp {^[1-9]+(\.[0-9]+)+$} $_git_version]} { catch {wm withdraw .} @@ -903,6 +908,35 @@ proc rescan {after {honor_trustmtime 1}} { } } +if {[is_Cygwin]} { + set is_git_info_link {} + set is_git_info_exclude {} + proc have_info_exclude {} { + global is_git_info_link is_git_info_exclude + + if {$is_git_info_link eq {}} { + set is_git_info_link [file isfile [gitdir info.lnk]] + } + + if {$is_git_info_link} { + if {$is_git_info_exclude eq {}} { + if {[catch {exec test -f [gitdir info exclude]}]} { + set is_git_info_exclude 0 + } else { + set is_git_info_exclude 1 + } + } + return $is_git_info_exclude + } else { + return [file readable [gitdir info exclude]] + } + } +} else { + proc have_info_exclude {} { + return [file readable [gitdir info exclude]] + } +} + proc rescan_stage2 {fd after} { global rescan_active buf_rdi buf_rdf buf_rlo @@ -913,9 +947,8 @@ proc rescan_stage2 {fd after} { } set ls_others [list --exclude-per-directory=.gitignore] - set info_exclude [gitdir info exclude] - if {[file readable $info_exclude]} { - lappend ls_others "--exclude-from=$info_exclude" + if {[have_info_exclude]} { + lappend ls_others "--exclude-from=[gitdir info exclude]" } set user_exclude [get_config core.excludesfile] if {$user_exclude ne {} && [file readable $user_exclude]} { @@ -1093,11 +1126,17 @@ proc mapdesc {state path} { } proc ui_status {msg} { - $::main_status show $msg + global main_status + if {[info exists main_status]} { + $main_status show $msg + } } proc ui_ready {{test {}}} { - $::main_status show {Ready.} $test + global main_status + if {[info exists main_status]} { + $main_status show [mc "Ready."] $test + } } proc escape_path {path} { @@ -1436,7 +1475,27 @@ proc do_gitk {revs} { if {! [file exists $exe]} { error_popup "Unable to start gitk:\n\n$exe does not exist" } else { + global env + + if {[info exists env(GIT_DIR)]} { + set old_GIT_DIR $env(GIT_DIR) + } else { + set old_GIT_DIR {} + } + + set pwd [pwd] + cd [file dirname [gitdir]] + set env(GIT_DIR) [file tail [gitdir]] + eval exec $cmd $revs & + + if {$old_GIT_DIR eq {}} { + unset env(GIT_DIR) + } else { + set env(GIT_DIR) $old_GIT_DIR + } + cd $pwd + ui_status $::starting_gitk_msg after 10000 { ui_ready $starting_gitk_msg @@ -1648,7 +1707,7 @@ proc apply_config {} { set font [lindex $option 1] if {[catch { foreach {cn cv} $repo_config(gui.$name) { - font configure $font $cn $cv + font configure $font $cn $cv -weight normal } } err]} { error_popup "Invalid font specified in gui.$name:\n\n$err" diff --git a/git-gui/lib/commit.tcl b/git-gui/lib/commit.tcl index f857a2ff5b..57238129e4 100644 --- a/git-gui/lib/commit.tcl +++ b/git-gui/lib/commit.tcl @@ -253,7 +253,7 @@ proc commit_committree {fd_wt curHEAD msg} { global repo_config gets $fd_wt tree_id - if {$tree_id eq {} || [catch {close $fd_wt} err]} { + if {[catch {close $fd_wt} err]} { error_popup "write-tree failed:\n\n$err" ui_status {Commit failed.} unlock_index diff --git a/git-gui/lib/console.tcl b/git-gui/lib/console.tcl index 6f718fbac3..b038a78358 100644 --- a/git-gui/lib/console.tcl +++ b/git-gui/lib/console.tcl @@ -122,7 +122,7 @@ method _read {fd after} { } else { $w.m.t delete $console_cr end $w.m.t insert end "\n" - $w.m.t insert end [string range $buf $c $cr] + $w.m.t insert end [string range $buf $c [expr {$cr - 1}]] set c $cr incr c } diff --git a/git-gui/lib/status_bar.tcl b/git-gui/lib/status_bar.tcl index 72a8fe1fd3..3bf79eb6e0 100644 --- a/git-gui/lib/status_bar.tcl +++ b/git-gui/lib/status_bar.tcl @@ -69,7 +69,10 @@ method update_meter {buf} { set prior [string range $meter 0 $r] set meter [string range $meter [expr {$r + 1}] end] - if {[regexp "\\((\\d+)/(\\d+)\\)\\s+done\r\$" $prior _j a b]} { + set p "\\((\\d+)/(\\d+)\\)" + if {[regexp ":\\s*\\d+% $p\(?:, done.\\s*\n|\\s*\r)\$" $prior _j a b]} { + update $this $a $b + } elseif {[regexp "$p\\s+done\r\$" $prior _j a b]} { update $this $a $b } } diff --git a/git-ls-remote.sh b/git-ls-remote.sh index d56cf92ebf..fec70bbf88 100755 --- a/git-ls-remote.sh +++ b/git-ls-remote.sh @@ -54,9 +54,10 @@ tmpdir=$tmp-d case "$peek_repo" in http://* | https://* | ftp://* ) - if [ -n "$GIT_SSL_NO_VERIFY" ]; then - curl_extra_args="-k" - fi + if [ -n "$GIT_SSL_NO_VERIFY" -o \ + "`git config --bool http.sslVerify`" = false ]; then + curl_extra_args="-k" + fi if [ -n "$GIT_CURL_FTP_NO_EPSV" -o \ "`git config --bool http.noEPSV`" = true ]; then curl_extra_args="${curl_extra_args} --disable-epsv" diff --git a/git-mergetool.sh b/git-mergetool.sh index 9f4f3134b6..a68b40386b 100755 --- a/git-mergetool.sh +++ b/git-mergetool.sh @@ -192,10 +192,10 @@ merge_file () { case "$merge_tool" in kdiff3) if base_present ; then - (kdiff3 --auto --L1 "$path (Base)" --L2 "$path (Local)" --L3 "$path (Remote)" \ + ("$merge_tool_path" --auto --L1 "$path (Base)" --L2 "$path (Local)" --L3 "$path (Remote)" \ -o "$path" -- "$BASE" "$LOCAL" "$REMOTE" > /dev/null 2>&1) else - (kdiff3 --auto --L1 "$path (Local)" --L2 "$path (Remote)" \ + ("$merge_tool_path" --auto --L1 "$path (Local)" --L2 "$path (Remote)" \ -o "$path" -- "$LOCAL" "$REMOTE" > /dev/null 2>&1) fi status=$? @@ -203,35 +203,35 @@ merge_file () { ;; tkdiff) if base_present ; then - tkdiff -a "$BASE" -o "$path" -- "$LOCAL" "$REMOTE" + "$merge_tool_path" -a "$BASE" -o "$path" -- "$LOCAL" "$REMOTE" else - tkdiff -o "$path" -- "$LOCAL" "$REMOTE" + "$merge_tool_path" -o "$path" -- "$LOCAL" "$REMOTE" fi status=$? save_backup ;; meld|vimdiff) touch "$BACKUP" - $merge_tool -- "$LOCAL" "$path" "$REMOTE" + "$merge_tool_path" -- "$LOCAL" "$path" "$REMOTE" check_unchanged save_backup ;; gvimdiff) touch "$BACKUP" - gvimdiff -f -- "$LOCAL" "$path" "$REMOTE" + "$merge_tool_path" -f -- "$LOCAL" "$path" "$REMOTE" check_unchanged save_backup ;; xxdiff) touch "$BACKUP" if base_present ; then - xxdiff -X --show-merged-pane \ + "$merge_tool_path" -X --show-merged-pane \ -R 'Accel.SaveAsMerged: "Ctrl-S"' \ -R 'Accel.Search: "Ctrl+F"' \ -R 'Accel.SearchForward: "Ctrl-G"' \ --merged-file "$path" -- "$LOCAL" "$BASE" "$REMOTE" else - xxdiff -X --show-merged-pane \ + "$merge_tool_path" -X --show-merged-pane \ -R 'Accel.SaveAsMerged: "Ctrl-S"' \ -R 'Accel.Search: "Ctrl+F"' \ -R 'Accel.SearchForward: "Ctrl-G"' \ @@ -243,18 +243,28 @@ merge_file () { opendiff) touch "$BACKUP" if base_present; then - opendiff "$LOCAL" "$REMOTE" -ancestor "$BASE" -merge "$path" | cat + "$merge_tool_path" "$LOCAL" "$REMOTE" -ancestor "$BASE" -merge "$path" | cat else - opendiff "$LOCAL" "$REMOTE" -merge "$path" | cat + "$merge_tool_path" "$LOCAL" "$REMOTE" -merge "$path" | cat + fi + check_unchanged + save_backup + ;; + ecmerge) + touch "$BACKUP" + if base_present; then + "$merge_tool_path" "$BASE" "$LOCAL" "$REMOTE" --mode=merge3 --to="$path" + else + "$merge_tool_path" "$LOCAL" "$REMOTE" --mode=merge2 --to="$path" fi check_unchanged save_backup ;; emerge) if base_present ; then - emacs -f emerge-files-with-ancestor-command "$LOCAL" "$REMOTE" "$BASE" "$(basename "$path")" + "$merge_tool_path" -f emerge-files-with-ancestor-command "$LOCAL" "$REMOTE" "$BASE" "$(basename "$path")" else - emacs -f emerge-files-command "$LOCAL" "$REMOTE" "$(basename "$path")" + "$merge_tool_path" -f emerge-files-command "$LOCAL" "$REMOTE" "$(basename "$path")" fi status=$? save_backup @@ -297,17 +307,38 @@ do shift done +valid_tool() { + case "$1" in + kdiff3 | tkdiff | xxdiff | meld | opendiff | emerge | vimdiff | gvimdiff | ecmerge) + ;; # happy + *) + return 1 + ;; + esac +} + +init_merge_tool_path() { + merge_tool_path=`git config mergetool.$1.path` + if test -z "$merge_tool_path" ; then + case "$1" in + emerge) + merge_tool_path=emacs + ;; + *) + merge_tool_path=$1 + ;; + esac + fi +} + + if test -z "$merge_tool"; then merge_tool=`git config merge.tool` - case "$merge_tool" in - kdiff3 | tkdiff | xxdiff | meld | opendiff | emerge | vimdiff | gvimdiff | "") - ;; # happy - *) + if test -n "$merge_tool" && ! valid_tool "$merge_tool"; then echo >&2 "git config option merge.tool set to unknown tool: $merge_tool" echo >&2 "Resetting to default..." unset merge_tool - ;; - esac + fi fi if test -z "$merge_tool" ; then @@ -329,40 +360,30 @@ if test -z "$merge_tool" ; then merge_tool_candidates="$merge_tool_candidates opendiff emerge vimdiff" echo "merge tool candidates: $merge_tool_candidates" for i in $merge_tool_candidates; do - if test $i = emerge ; then - cmd=emacs - else - cmd=$i - fi - if type $cmd > /dev/null 2>&1; then + init_merge_tool_path $i + if type "$merge_tool_path" > /dev/null 2>&1; then merge_tool=$i break fi done if test -z "$merge_tool" ; then - echo "No available merge resolution programs available." + echo "No known merge resolution program available." exit 1 fi +else + if ! valid_tool "$merge_tool"; then + echo >&2 "Unknown merge_tool $merge_tool" + exit 1 + fi + + init_merge_tool_path "$merge_tool" + + if ! type "$merge_tool_path" > /dev/null 2>&1; then + echo "The merge tool $merge_tool is not available as '$merge_tool_path'" + exit 1 + fi fi -case "$merge_tool" in - kdiff3|tkdiff|meld|xxdiff|vimdiff|gvimdiff|opendiff) - if ! type "$merge_tool" > /dev/null 2>&1; then - echo "The merge tool $merge_tool is not available" - exit 1 - fi - ;; - emerge) - if ! type "emacs" > /dev/null 2>&1; then - echo "Emacs is not available" - exit 1 - fi - ;; - *) - echo "Unknown merge tool: $merge_tool" - exit 1 - ;; -esac if test $# -eq 0 ; then files=`git ls-files -u | sed -e 's/^[^ ]* //' | sort -u` diff --git a/git-pull.sh b/git-pull.sh index 74bfc16744..75ec011969 100755 --- a/git-pull.sh +++ b/git-pull.sh @@ -4,7 +4,7 @@ # # Fetch one or more remote refs and merge it/them into the current HEAD. -USAGE='[-n | --no-summary] [--no-commit] [-s strategy]... [<fetch-options>] <repo> <head>...' +USAGE='[-n | --no-summary] [--[no-]commit] [--[no-]squash] [--[no-]ff] [-s strategy]... [<fetch-options>] <repo> <head>...' LONG_USAGE='Fetch one or more remote refs and merge it/them into the current HEAD.' SUBDIRECTORY_OK=Yes . git-sh-setup @@ -15,7 +15,7 @@ cd_to_toplevel test -z "$(git ls-files -u)" || die "You are in the middle of a conflicted merge." -strategy_args= no_summary= no_commit= squash= +strategy_args= no_summary= no_commit= squash= no_ff= while : do case "$1" in @@ -27,8 +27,16 @@ do ;; --no-c|--no-co|--no-com|--no-comm|--no-commi|--no-commit) no_commit=--no-commit ;; + --c|--co|--com|--comm|--commi|--commit) + no_commit=--commit ;; --sq|--squ|--squa|--squas|--squash) squash=--squash ;; + --no-sq|--no-squ|--no-squa|--no-squas|--no-squash) + squash=--no-squash ;; + --ff) + no_ff=--ff ;; + --no-ff) + no_ff=--no-ff ;; -s=*|--s=*|--st=*|--str=*|--stra=*|--strat=*|--strate=*|\ --strateg=*|--strategy=*|\ -s|--s|--st|--str|--stra|--strat|--strate|--strateg|--strategy) @@ -133,5 +141,5 @@ then fi merge_name=$(git fmt-merge-msg <"$GIT_DIR/FETCH_HEAD") || exit -exec git-merge $no_summary $no_commit $squash $strategy_args \ +exec git-merge $no_summary $no_commit $squash $no_ff $strategy_args \ "$merge_name" HEAD $merge_head diff --git a/git-rebase--interactive.sh b/git-rebase--interactive.sh index 0dd77b4005..76dc679e62 100755 --- a/git-rebase--interactive.sh +++ b/git-rebase--interactive.sh @@ -116,7 +116,7 @@ pick_one () { sha1=$(git rev-parse --short $sha1) output warn Fast forward to $sha1 else - output git cherry-pick $STRATEGY "$@" + output git cherry-pick "$@" fi } @@ -172,6 +172,8 @@ pick_one_preserving_merges () { author_script=$(get_author_ident_from_commit $sha1) eval "$author_script" msg="$(git cat-file commit $sha1 | sed -e '1,/^$/d')" + # No point in merging the first parent, that's HEAD + new_parents=${new_parents# $first_parent} # NEEDSWORK: give rerere a chance if ! GIT_AUTHOR_NAME="$GIT_AUTHOR_NAME" \ GIT_AUTHOR_EMAIL="$GIT_AUTHOR_EMAIL" \ @@ -184,7 +186,7 @@ pick_one_preserving_merges () { fi ;; *) - output git cherry-pick $STRATEGY "$@" || + output git cherry-pick "$@" || die_with_patch $sha1 "Could not pick $sha1" ;; esac @@ -387,7 +389,6 @@ do output git reset --hard && do_rest ;; -s|--strategy) - shift case "$#,$1" in *,*=*) STRATEGY="-s `expr "z$1" : 'z-[^=]*=\(.*\)'`" ;; diff --git a/git-rebase.sh b/git-rebase.sh index 1583402a06..224cca98ee 100755 --- a/git-rebase.sh +++ b/git-rebase.sh @@ -59,7 +59,7 @@ continue_merge () { die "$RESOLVEMSG" fi - cmt=`cat $dotest/current` + cmt=`cat "$dotest/current"` if ! git diff-index --quiet HEAD then if ! git-commit -C "$cmt" @@ -84,14 +84,14 @@ continue_merge () { } call_merge () { - cmt="$(cat $dotest/cmt.$1)" + cmt="$(cat "$dotest/cmt.$1")" echo "$cmt" > "$dotest/current" hd=$(git rev-parse --verify HEAD) cmt_name=$(git symbolic-ref HEAD) - msgnum=$(cat $dotest/msgnum) - end=$(cat $dotest/end) + msgnum=$(cat "$dotest/msgnum") + end=$(cat "$dotest/end") eval GITHEAD_$cmt='"${cmt_name##refs/heads/}~$(($end - $msgnum))"' - eval GITHEAD_$hd='"$(cat $dotest/onto_name)"' + eval GITHEAD_$hd='$(cat "$dotest/onto_name")' export GITHEAD_$cmt GITHEAD_$hd git-merge-$strategy "$cmt^" -- "$hd" "$cmt" rv=$? @@ -140,10 +140,10 @@ do } if test -d "$dotest" then - prev_head="`cat $dotest/prev_head`" - end="`cat $dotest/end`" - msgnum="`cat $dotest/msgnum`" - onto="`cat $dotest/onto`" + prev_head=$(cat "$dotest/prev_head") + end=$(cat "$dotest/end") + msgnum=$(cat "$dotest/msgnum") + onto=$(cat "$dotest/onto") continue_merge while test "$msgnum" -le "$end" do @@ -160,11 +160,11 @@ do if test -d "$dotest" then git rerere clear - prev_head="`cat $dotest/prev_head`" - end="`cat $dotest/end`" - msgnum="`cat $dotest/msgnum`" + prev_head=$(cat "$dotest/prev_head") + end=$(cat "$dotest/end") + msgnum=$(cat "$dotest/msgnum") msgnum=$(($msgnum + 1)) - onto="`cat $dotest/onto`" + onto=$(cat "$dotest/onto") while test "$msgnum" -le "$end" do call_merge "$msgnum" diff --git a/git-remote.perl b/git-remote.perl index 9ca3e7ef37..d13e4c1fea 100755 --- a/git-remote.perl +++ b/git-remote.perl @@ -244,7 +244,8 @@ sub show_remote { print "* remote $name\n"; print " URL: $info->{'URL'}\n"; for my $branchname (sort keys %$branch) { - next if ($branch->{$branchname}{'REMOTE'} ne $name); + next unless (defined $branch->{$branchname}{'REMOTE'} && + $branch->{$branchname}{'REMOTE'} eq $name); my @merged = map { s|^refs/heads/||; $_; diff --git a/git-send-email.perl b/git-send-email.perl index 62e1429733..96051bc01e 100755 --- a/git-send-email.perl +++ b/git-send-email.perl @@ -191,6 +191,7 @@ my %config_settings = ( "smtpserverport" => \$smtp_server_port, "smtpuser" => \$smtp_authuser, "smtppass" => \$smtp_authpass, + "to" => \@to, "cccmd" => \$cc_cmd, "aliasfiletype" => \$aliasfiletype, "bcc" => \@bcclist, diff --git a/git-sh-setup.sh b/git-sh-setup.sh index 3c325fd133..86d7d4c4e7 100755 --- a/git-sh-setup.sh +++ b/git-sh-setup.sh @@ -110,7 +110,7 @@ esac if [ -z "$SUBDIRECTORY_OK" ] then : ${GIT_DIR=.git} - GIT_DIR=$(GIT_DIR="$GIT_DIR" git rev-parse --git-dir) || { + test -z "$(git rev-parse --show-cdup)" || { exit=$? echo >&2 "You need to run this command from the toplevel of the working tree." exit $exit diff --git a/git-stash.sh b/git-stash.sh index 7ba61625ba..5bbda47b7b 100755 --- a/git-stash.sh +++ b/git-stash.sh @@ -139,7 +139,7 @@ apply_stash () { unstashed_index_tree= if test -n "$unstash_index" && test "$b_tree" != "$i_tree" then - git diff --binary $s^2^..$s^2 | git apply --cached + git diff-tree --binary $s^2^..$s^2 | git apply --cached test $? -ne 0 && die 'Conflicts in index. Try without --index.' unstashed_index_tree=$(git-write-tree) || @@ -162,7 +162,7 @@ apply_stash () { git read-tree "$unstashed_index_tree" else a="$TMP-added" && - git diff --cached --name-only --diff-filter=A $c_tree >"$a" && + git diff-index --cached --name-only --diff-filter=A $c_tree >"$a" && git read-tree --reset $c_tree && git update-index --add --stdin <"$a" || die "Cannot unstage modified files" @@ -6,28 +6,6 @@ const char git_usage_string[] = "git [--version] [--exec-path[=GIT_EXEC_PATH]] [-p|--paginate|--no-pager] [--bare] [--git-dir=GIT_DIR] [--work-tree=GIT_WORK_TREE] [--help] COMMAND [ARGS]"; -static void prepend_to_path(const char *dir, int len) -{ - const char *old_path = getenv("PATH"); - char *path; - int path_len = len; - - if (!old_path) - old_path = "/usr/local/bin:/usr/bin:/bin"; - - path_len = len + strlen(old_path) + 1; - - path = xmalloc(path_len + 1); - - memcpy(path, dir, len); - path[len] = ':'; - memcpy(path + len + 1, old_path, path_len - len); - - setenv("PATH", path, 1); - - free(path); -} - static int handle_options(const char*** argv, int* argc, int* envchanged) { int handled = 0; @@ -51,7 +29,7 @@ static int handle_options(const char*** argv, int* argc, int* envchanged) if (!prefixcmp(cmd, "--exec-path")) { cmd += 11; if (*cmd == '=') - git_set_exec_path(cmd + 1); + git_set_argv_exec_path(cmd + 1); else { puts(git_exec_path()); exit(0); @@ -328,6 +306,8 @@ static void handle_internal_command(int argc, const char **argv) { "diff-files", cmd_diff_files }, { "diff-index", cmd_diff_index, RUN_SETUP }, { "diff-tree", cmd_diff_tree, RUN_SETUP }, + { "fetch", cmd_fetch, RUN_SETUP }, + { "fetch-pack", cmd_fetch_pack, RUN_SETUP }, { "fetch--tool", cmd_fetch__tool, RUN_SETUP }, { "fmt-merge-msg", cmd_fmt_merge_msg, RUN_SETUP }, { "for-each-ref", cmd_for_each_ref, RUN_SETUP }, @@ -338,6 +318,9 @@ static void handle_internal_command(int argc, const char **argv) { "get-tar-commit-id", cmd_get_tar_commit_id }, { "grep", cmd_grep, RUN_SETUP | USE_PAGER }, { "help", cmd_help }, +#ifndef NO_CURL + { "http-fetch", cmd_http_fetch, RUN_SETUP }, +#endif { "init", cmd_init_db }, { "init-db", cmd_init_db }, { "log", cmd_log, RUN_SETUP | USE_PAGER }, @@ -403,19 +386,17 @@ int main(int argc, const char **argv) { const char *cmd = argv[0] ? argv[0] : "git-help"; char *slash = strrchr(cmd, '/'); - const char *exec_path = NULL; + const char *cmd_path = NULL; int done_alias = 0; /* * Take the basename of argv[0] as the command * name, and the dirname as the default exec_path - * if it's an absolute path and we don't have - * anything better. + * if we don't have anything better. */ if (slash) { *slash++ = 0; - if (*cmd == '/') - exec_path = cmd; + cmd_path = cmd; cmd = slash; } @@ -444,23 +425,20 @@ int main(int argc, const char **argv) if (!prefixcmp(argv[0], "--")) argv[0] += 2; } else { - /* Default command: "help" */ - argv[0] = "help"; - argc = 1; + /* The user didn't specify a command; give them help */ + printf("usage: %s\n\n", git_usage_string); + list_common_cmds_help(); + exit(1); } cmd = argv[0]; /* - * We execute external git command via execv_git_cmd(), - * which looks at "--exec-path" option, GIT_EXEC_PATH - * environment, and $(gitexecdir) in Makefile while built, - * in this order. For scripted commands, we prepend - * the value of the exec_path variable to the PATH. + * We use PATH to find git commands, but we prepend some higher + * precidence paths: the "--exec-path" option, the GIT_EXEC_PATH + * environment, and the $(gitexecdir) from the Makefile at build + * time. */ - if (exec_path) - prepend_to_path(exec_path, strlen(exec_path)); - exec_path = git_exec_path(); - prepend_to_path(exec_path, strlen(exec_path)); + setup_path(cmd_path); while (1) { /* See if it's an internal command */ @@ -82,17 +82,20 @@ proc dorunq {} { proc start_rev_list {view} { global startmsecs global commfd leftover tclencoding datemode - global viewargs viewfiles commitidx - global lookingforhead showlocalchanges + global viewargs viewfiles commitidx viewcomplete vnextroot + global showlocalchanges commitinterest mainheadid + global progressdirn progresscoords proglastnc curview set startmsecs [clock clicks -milliseconds] set commitidx($view) 0 + set viewcomplete($view) 0 + set vnextroot($view) 0 set order "--topo-order" if {$datemode} { set order "--date-order" } if {[catch { - set fd [open [concat | git log -z --pretty=raw $order --parents \ + set fd [open [concat | git log --no-color -z --pretty=raw $order --parents \ --boundary $viewargs($view) "--" $viewfiles($view)] r] } err]} { error_popup "Error executing git rev-list: $err" @@ -100,13 +103,20 @@ proc start_rev_list {view} { } set commfd($view) $fd set leftover($view) {} - set lookingforhead $showlocalchanges + if {$showlocalchanges} { + lappend commitinterest($mainheadid) {dodiffindex} + } fconfigure $fd -blocking 0 -translation lf -eofchar {} if {$tclencoding != {}} { fconfigure $fd -encoding $tclencoding } filerun $fd [list getcommitlines $fd $view] - nowbusy $view + nowbusy $view "Reading" + if {$view == $curview} { + set progressdirn 1 + set progresscoords {0 0} + set proglastnc 0 + } } proc stop_rev_list {} { @@ -123,7 +133,7 @@ proc stop_rev_list {} { } proc getcommits {} { - global phase canv mainfont curview + global phase canv curview set phase getcommits initlayout @@ -131,12 +141,26 @@ proc getcommits {} { show_status "Reading commits..." } +# This makes a string representation of a positive integer which +# sorts as a string in numerical order +proc strrep {n} { + if {$n < 16} { + return [format "%x" $n] + } elseif {$n < 256} { + return [format "x%.2x" $n] + } elseif {$n < 65536} { + return [format "y%.4x" $n] + } + return [format "z%.8x" $n] +} + proc getcommitlines {fd view} { - global commitlisted + global commitlisted commitinterest global leftover commfd - global displayorder commitidx commitrow commitdata + global displayorder commitidx viewcomplete commitrow commitdata global parentlist children curview hlview global vparentlist vdisporder vcmitlisted + global ordertok vnextroot idpending set stuff [read $fd 500000] # git log doesn't terminate the last commit with a null... @@ -147,9 +171,29 @@ proc getcommitlines {fd view} { if {![eof $fd]} { return 1 } - global viewname + # Check if we have seen any ids listed as parents that haven't + # appeared in the list + foreach vid [array names idpending "$view,*"] { + # should only get here if git log is buggy + set id [lindex [split $vid ","] 1] + set commitrow($vid) $commitidx($view) + incr commitidx($view) + if {$view == $curview} { + lappend parentlist {} + lappend displayorder $id + lappend commitlisted 0 + } else { + lappend vparentlist($view) {} + lappend vdisporder($view) $id + lappend vcmitlisted($view) 0 + } + } + set viewcomplete($view) 1 + global viewname progresscoords unset commfd($view) notbusy $view + set progresscoords {0 0} + adjustprogress # set it blocking so we wait for the process to terminate fconfigure $fd -blocking 1 if {[catch {close $fd} err]} { @@ -221,14 +265,35 @@ proc getcommitlines {fd view} { exit 1 } set id [lindex $ids 0] + if {![info exists ordertok($view,$id)]} { + set otok "o[strrep $vnextroot($view)]" + incr vnextroot($view) + set ordertok($view,$id) $otok + } else { + set otok $ordertok($view,$id) + unset idpending($view,$id) + } if {$listed} { set olds [lrange $ids 1 end] - set i 0 - foreach p $olds { - if {$i == 0 || [lsearch -exact $olds $p] >= $i} { - lappend children($view,$p) $id + if {[llength $olds] == 1} { + set p [lindex $olds 0] + lappend children($view,$p) $id + if {![info exists ordertok($view,$p)]} { + set ordertok($view,$p) $ordertok($view,$id) + set idpending($view,$p) 1 + } + } else { + set i 0 + foreach p $olds { + if {$i == 0 || [lsearch -exact $olds $p] >= $i} { + lappend children($view,$p) $id + } + if {![info exists ordertok($view,$p)]} { + set ordertok($view,$p) "$otok[strrep $i]]" + set idpending($view,$p) 1 + } + incr i } - incr i } } else { set olds {} @@ -248,24 +313,54 @@ proc getcommitlines {fd view} { lappend vdisporder($view) $id lappend vcmitlisted($view) $listed } + if {[info exists commitinterest($id)]} { + foreach script $commitinterest($id) { + eval [string map [list "%I" $id] $script] + } + unset commitinterest($id) + } set gotsome 1 } if {$gotsome} { run chewcommits $view + if {$view == $curview} { + # update progress bar + global progressdirn progresscoords proglastnc + set inc [expr {($commitidx($view) - $proglastnc) * 0.0002}] + set proglastnc $commitidx($view) + set l [lindex $progresscoords 0] + set r [lindex $progresscoords 1] + if {$progressdirn} { + set r [expr {$r + $inc}] + if {$r >= 1.0} { + set r 1.0 + set progressdirn 0 + } + if {$r > 0.2} { + set l [expr {$r - 0.2}] + } + } else { + set l [expr {$l - $inc}] + if {$l <= 0.0} { + set l 0.0 + set progressdirn 1 + } + set r [expr {$l + 0.2}] + } + set progresscoords [list $l $r] + adjustprogress + } } return 2 } proc chewcommits {view} { - global curview hlview commfd + global curview hlview viewcomplete global selectedline pending_select - set more 0 if {$view == $curview} { - set allread [expr {![info exists commfd($view)]}] - set tlimit [expr {[clock clicks -milliseconds] + 50}] - set more [layoutmore $tlimit $allread] - if {$allread && !$more} { + layoutmore + if {$viewcomplete($view)} { global displayorder commitidx phase global numcommits startmsecs @@ -286,7 +381,7 @@ proc chewcommits {view} { if {[info exists hlview] && $view == $hlview} { vhighlightmore } - return $more + return 0 } proc readcommit {id} { @@ -295,7 +390,7 @@ proc readcommit {id} { } proc updatecommits {} { - global viewdata curview phase displayorder + global viewdata curview phase displayorder ordertok idpending global children commitrow selectedline thickerline showneartags if {$phase ne {}} { @@ -306,6 +401,10 @@ proc updatecommits {} { foreach id $displayorder { catch {unset children($n,$id)} catch {unset commitrow($n,$id)} + catch {unset ordertok($n,$id)} + } + foreach vid [array names idpending "$n,*"] { + unset idpending($vid) } set curview -1 catch {unset selectedline} @@ -516,7 +615,7 @@ proc confirm_popup msg { proc makewindow {} { global canv canv2 canv3 linespc charspc ctext cflist - global textfont mainfont uifont tabstop + global tabstop global findtype findtypemenu findloc findstring fstring geometry global entries sha1entry sha1string sha1but global diffcontextstring diffcontext @@ -525,23 +624,26 @@ proc makewindow {} { global highlight_files gdttype global searchstring sstring global bgcolor fgcolor bglist fglist diffcolors selectbgcolor - global headctxmenu + global headctxmenu progresscanv progressitem progresscoords statusw + global fprogitem fprogcoord lastprogupdate progupdatepending + global rprogitem rprogcoord + global have_tk85 menu .bar .bar add cascade -label "File" -menu .bar.file - .bar configure -font $uifont + .bar configure -font uifont menu .bar.file .bar.file add command -label "Update" -command updatecommits .bar.file add command -label "Reread references" -command rereadrefs .bar.file add command -label "List references" -command showrefs .bar.file add command -label "Quit" -command doquit - .bar.file configure -font $uifont + .bar.file configure -font uifont menu .bar.edit .bar add cascade -label "Edit" -menu .bar.edit .bar.edit add command -label "Preferences" -command doprefs - .bar.edit configure -font $uifont + .bar.edit configure -font uifont - menu .bar.view -font $uifont + menu .bar.view -font uifont .bar add cascade -label "View" -menu .bar.view .bar.view add command -label "New view..." -command {newview 0} .bar.view add command -label "Edit view..." -command editview \ @@ -555,7 +657,7 @@ proc makewindow {} { .bar add cascade -label "Help" -menu .bar.help .bar.help add command -label "About gitk" -command about .bar.help add command -label "Key bindings" -command keys - .bar.help configure -font $uifont + .bar.help configure -font uifont . configure -menu .bar # the gui has upper and lower half, parts of a paned window. @@ -612,10 +714,10 @@ proc makewindow {} { set entries $sha1entry set sha1but .tf.bar.sha1label button $sha1but -text "SHA1 ID: " -state disabled -relief flat \ - -command gotocommit -width 8 -font $uifont + -command gotocommit -width 8 -font uifont $sha1but conf -disabledforeground [$sha1but cget -foreground] pack .tf.bar.sha1label -side left - entry $sha1entry -width 40 -font $textfont -textvariable sha1string + entry $sha1entry -width 40 -font textfont -textvariable sha1string trace add variable sha1string write sha1change pack $sha1entry -side left -pady 2 @@ -642,62 +744,61 @@ proc makewindow {} { -state disabled -width 26 pack .tf.bar.rightbut -side left -fill y - button .tf.bar.findbut -text "Find" -command dofind -font $uifont - pack .tf.bar.findbut -side left + # Status label and progress bar + set statusw .tf.bar.status + label $statusw -width 15 -relief sunken -font uifont + pack $statusw -side left -padx 5 + set h [expr {[font metrics uifont -linespace] + 2}] + set progresscanv .tf.bar.progress + canvas $progresscanv -relief sunken -height $h -borderwidth 2 + set progressitem [$progresscanv create rect -1 0 0 $h -fill green] + set fprogitem [$progresscanv create rect -1 0 0 $h -fill yellow] + set rprogitem [$progresscanv create rect -1 0 0 $h -fill red] + pack $progresscanv -side right -expand 1 -fill x + set progresscoords {0 0} + set fprogcoord 0 + set rprogcoord 0 + bind $progresscanv <Configure> adjustprogress + set lastprogupdate [clock clicks -milliseconds] + set progupdatepending 0 + + # build up the bottom bar of upper window + label .tf.lbar.flabel -text "Find " -font uifont + button .tf.lbar.fnext -text "next" -command {dofind 1 1} -font uifont + button .tf.lbar.fprev -text "prev" -command {dofind -1 1} -font uifont + label .tf.lbar.flab2 -text " commit " -font uifont + pack .tf.lbar.flabel .tf.lbar.fnext .tf.lbar.fprev .tf.lbar.flab2 \ + -side left -fill y + set gdttype "containing:" + set gm [tk_optionMenu .tf.lbar.gdttype gdttype \ + "containing:" \ + "touching paths:" \ + "adding/removing string:"] + trace add variable gdttype write gdttype_change + $gm conf -font uifont + .tf.lbar.gdttype conf -font uifont + pack .tf.lbar.gdttype -side left -fill y + set findstring {} - set fstring .tf.bar.findstring + set fstring .tf.lbar.findstring lappend entries $fstring - entry $fstring -width 30 -font $textfont -textvariable findstring + entry $fstring -width 30 -font textfont -textvariable findstring trace add variable findstring write find_change - pack $fstring -side left -expand 1 -fill x -in .tf.bar set findtype Exact - set findtypemenu [tk_optionMenu .tf.bar.findtype \ + set findtypemenu [tk_optionMenu .tf.lbar.findtype \ findtype Exact IgnCase Regexp] - trace add variable findtype write find_change - .tf.bar.findtype configure -font $uifont - .tf.bar.findtype.menu configure -font $uifont + trace add variable findtype write findcom_change + .tf.lbar.findtype configure -font uifont + .tf.lbar.findtype.menu configure -font uifont set findloc "All fields" - tk_optionMenu .tf.bar.findloc findloc "All fields" Headline \ + tk_optionMenu .tf.lbar.findloc findloc "All fields" Headline \ Comments Author Committer trace add variable findloc write find_change - .tf.bar.findloc configure -font $uifont - .tf.bar.findloc.menu configure -font $uifont - pack .tf.bar.findloc -side right - pack .tf.bar.findtype -side right - - # build up the bottom bar of upper window - label .tf.lbar.flabel -text "Highlight: Commits " \ - -font $uifont - pack .tf.lbar.flabel -side left -fill y - set gdttype "touching paths:" - set gm [tk_optionMenu .tf.lbar.gdttype gdttype "touching paths:" \ - "adding/removing string:"] - trace add variable gdttype write hfiles_change - $gm conf -font $uifont - .tf.lbar.gdttype conf -font $uifont - pack .tf.lbar.gdttype -side left -fill y - entry .tf.lbar.fent -width 25 -font $textfont \ - -textvariable highlight_files - trace add variable highlight_files write hfiles_change - lappend entries .tf.lbar.fent - pack .tf.lbar.fent -side left -fill x -expand 1 - label .tf.lbar.vlabel -text " OR in view" -font $uifont - pack .tf.lbar.vlabel -side left -fill y - global viewhlmenu selectedhlview - set viewhlmenu [tk_optionMenu .tf.lbar.vhl selectedhlview None] - $viewhlmenu entryconf None -command delvhighlight - $viewhlmenu conf -font $uifont - .tf.lbar.vhl conf -font $uifont - pack .tf.lbar.vhl -side left -fill y - label .tf.lbar.rlabel -text " OR " -font $uifont - pack .tf.lbar.rlabel -side left -fill y - global highlight_related - set m [tk_optionMenu .tf.lbar.relm highlight_related None \ - "Descendent" "Not descendent" "Ancestor" "Not ancestor"] - $m conf -font $uifont - .tf.lbar.relm conf -font $uifont - trace add variable highlight_related write vrel_change - pack .tf.lbar.relm -side left -fill y + .tf.lbar.findloc configure -font uifont + .tf.lbar.findloc.menu configure -font uifont + pack .tf.lbar.findloc -side right + pack .tf.lbar.findtype -side right + pack $fstring -side left -expand 1 -fill x # Finish putting the upper half of the viewer together pack .tf.lbar -in .tf -side bottom -fill x @@ -722,23 +823,23 @@ proc makewindow {} { frame .bleft.mid button .bleft.top.search -text "Search" -command dosearch \ - -font $uifont + -font uifont pack .bleft.top.search -side left -padx 5 set sstring .bleft.top.sstring - entry $sstring -width 20 -font $textfont -textvariable searchstring + entry $sstring -width 20 -font textfont -textvariable searchstring lappend entries $sstring trace add variable searchstring write incrsearch pack $sstring -side left -expand 1 -fill x - radiobutton .bleft.mid.diff -text "Diff" \ + radiobutton .bleft.mid.diff -text "Diff" -font uifont \ -command changediffdisp -variable diffelide -value {0 0} - radiobutton .bleft.mid.old -text "Old version" \ + radiobutton .bleft.mid.old -text "Old version" -font uifont \ -command changediffdisp -variable diffelide -value {0 1} - radiobutton .bleft.mid.new -text "New version" \ + radiobutton .bleft.mid.new -text "New version" -font uifont \ -command changediffdisp -variable diffelide -value {1 0} label .bleft.mid.labeldiffcontext -text " Lines of context: " \ - -font $uifont + -font uifont pack .bleft.mid.diff .bleft.mid.old .bleft.mid.new -side left - spinbox .bleft.mid.diffcontext -width 5 -font $textfont \ + spinbox .bleft.mid.diffcontext -width 5 -font textfont \ -from 1 -increment 1 -to 10000000 \ -validate all -validatecommand "diffcontextvalidate %P" \ -textvariable diffcontextstring @@ -748,9 +849,11 @@ proc makewindow {} { pack .bleft.mid.labeldiffcontext .bleft.mid.diffcontext -side left set ctext .bleft.ctext text $ctext -background $bgcolor -foreground $fgcolor \ - -tabs "[expr {$tabstop * $charspc}]" \ - -state disabled -font $textfont \ + -state disabled -font textfont \ -yscrollcommand scrolltext -wrap none + if {$have_tk85} { + $ctext conf -tabstyle wordprocessor + } scrollbar .bleft.sb -command "$ctext yview" pack .bleft.top -side top -fill x pack .bleft.mid -side top -fill x @@ -760,7 +863,7 @@ proc makewindow {} { lappend fglist $ctext $ctext tag conf comment -wrap $wrapcomment - $ctext tag conf filesep -font [concat $textfont bold] -back "#aaaaaa" + $ctext tag conf filesep -font textfontbold -back "#aaaaaa" $ctext tag conf hunksep -fore [lindex $diffcolors 2] $ctext tag conf d0 -fore [lindex $diffcolors 0] $ctext tag conf d1 -fore [lindex $diffcolors 1] @@ -782,8 +885,8 @@ proc makewindow {} { $ctext tag conf m15 -fore "#ff70b0" $ctext tag conf mmax -fore darkgrey set mergemax 16 - $ctext tag conf mresult -font [concat $textfont bold] - $ctext tag conf msep -font [concat $textfont bold] + $ctext tag conf mresult -font textfontbold + $ctext tag conf msep -font textfontbold $ctext tag conf found -back yellow .pwbottom add .bleft @@ -794,18 +897,18 @@ proc makewindow {} { frame .bright.mode radiobutton .bright.mode.patch -text "Patch" \ -command reselectline -variable cmitmode -value "patch" - .bright.mode.patch configure -font $uifont + .bright.mode.patch configure -font uifont radiobutton .bright.mode.tree -text "Tree" \ -command reselectline -variable cmitmode -value "tree" - .bright.mode.tree configure -font $uifont + .bright.mode.tree configure -font uifont grid .bright.mode.patch .bright.mode.tree -sticky ew pack .bright.mode -side top -fill x set cflist .bright.cfiles - set indent [font measure $mainfont "nn"] + set indent [font measure mainfont "nn"] text $cflist \ -selectbackground $selectbgcolor \ -background $bgcolor -foreground $fgcolor \ - -font $mainfont \ + -font mainfont \ -tabs [list $indent [expr {2 * $indent}]] \ -yscrollcommand ".bright.sb set" \ -cursor [. cget -cursor] \ @@ -817,7 +920,7 @@ proc makewindow {} { pack $cflist -side left -fill both -expand 1 $cflist tag configure highlight \ -background [$cflist cget -selectbackground] - $cflist tag configure bold -font [concat $mainfont bold] + $cflist tag configure bold -font mainfontbold .pwbottom add .bright .ctop add .pwbottom @@ -843,6 +946,12 @@ proc makewindow {} { } else { bindall <ButtonRelease-4> "allcanvs yview scroll -5 units" bindall <ButtonRelease-5> "allcanvs yview scroll 5 units" + if {[tk windowingsystem] eq "aqua"} { + bindall <MouseWheel> { + set delta [expr {- (%D)}] + allcanvs yview scroll $delta units + } + } } bindall <2> "canvscan mark %W %x %y" bindall <B2-Motion> "canvscan dragto %W %x %y" @@ -850,8 +959,8 @@ proc makewindow {} { bindkey <End> sellastline bind . <Key-Up> "selnextline -1" bind . <Key-Down> "selnextline 1" - bind . <Shift-Key-Up> "next_highlight -1" - bind . <Shift-Key-Down> "next_highlight 1" + bind . <Shift-Key-Up> "dofind -1 0" + bind . <Shift-Key-Down> "dofind 1 0" bindkey <Key-Right> "goforw" bindkey <Key-Left> "goback" bind . <Key-Prior> "selnextpage -1" @@ -876,14 +985,14 @@ proc makewindow {} { bindkey b "$ctext yview scroll -1 pages" bindkey d "$ctext yview scroll 18 units" bindkey u "$ctext yview scroll -18 units" - bindkey / {findnext 1} - bindkey <Key-Return> {findnext 0} - bindkey ? findprev + bindkey / {dofind 1 1} + bindkey <Key-Return> {dofind 1 1} + bindkey ? {dofind -1 1} bindkey f nextfile bindkey <F5> updatecommits bind . <$M1B-q> doquit - bind . <$M1B-f> dofind - bind . <$M1B-g> {findnext 0} + bind . <$M1B-f> {dofind 1 1} + bind . <$M1B-g> {dofind 1 0} bind . <$M1B-r> dosearchback bind . <$M1B-s> dosearch bind . <$M1B-equal> {incrfont 1} @@ -892,7 +1001,7 @@ proc makewindow {} { bind . <$M1B-KP_Subtract> {incrfont -1} wm protocol . WM_DELETE_WINDOW doquit bind . <Button-1> "click %W" - bind $fstring <Key-Return> dofind + bind $fstring <Key-Return> {dofind 1 1} bind $sha1entry <Key-Return> gotocommit bind $sha1entry <<PasteSelection>> clearsha1 bind $cflist <1> {sel_flist %W %x %y; break} @@ -1008,12 +1117,45 @@ proc click {w} { focus . } +# Adjust the progress bar for a change in requested extent or canvas size +proc adjustprogress {} { + global progresscanv progressitem progresscoords + global fprogitem fprogcoord lastprogupdate progupdatepending + global rprogitem rprogcoord + + set w [expr {[winfo width $progresscanv] - 4}] + set x0 [expr {$w * [lindex $progresscoords 0]}] + set x1 [expr {$w * [lindex $progresscoords 1]}] + set h [winfo height $progresscanv] + $progresscanv coords $progressitem $x0 0 $x1 $h + $progresscanv coords $fprogitem 0 0 [expr {$w * $fprogcoord}] $h + $progresscanv coords $rprogitem 0 0 [expr {$w * $rprogcoord}] $h + set now [clock clicks -milliseconds] + if {$now >= $lastprogupdate + 100} { + set progupdatepending 0 + update + } elseif {!$progupdatepending} { + set progupdatepending 1 + after [expr {$lastprogupdate + 100 - $now}] doprogupdate + } +} + +proc doprogupdate {} { + global lastprogupdate progupdatepending + + if {$progupdatepending} { + set progupdatepending 0 + set lastprogupdate [clock clicks -milliseconds] + update + } +} + proc savestuff {w} { - global canv canv2 canv3 ctext cflist mainfont textfont uifont tabstop + global canv canv2 canv3 mainfont textfont uifont tabstop global stuffsaved findmergefiles maxgraphpct global maxwidth showneartags showlocalchanges global viewname viewfiles viewargs viewperm nextviewnum - global cmitmode wrapcomment datetimeformat + global cmitmode wrapcomment datetimeformat limitdiffs global colors bgcolor fgcolor diffcolors diffcontext selectbgcolor if {$stuffsaved} return @@ -1032,6 +1174,7 @@ proc savestuff {w} { puts $f [list set showneartags $showneartags] puts $f [list set showlocalchanges $showlocalchanges] puts $f [list set datetimeformat $datetimeformat] + puts $f [list set limitdiffs $limitdiffs] puts $f [list set bgcolor $bgcolor] puts $f [list set fgcolor $fgcolor] puts $f [list set colors $colors] @@ -1143,10 +1286,10 @@ Copyright © 2005-2006 Paul Mackerras Use and redistribute under the terms of the GNU General Public License} \ -justify center -aspect 400 -border 2 -bg white -relief groove pack $w.m -side top -fill x -padx 2 -pady 2 - $w.m configure -font $uifont + $w.m configure -font uifont button $w.ok -text Close -command "destroy $w" -default active pack $w.ok -side bottom - $w.ok configure -font $uifont + $w.ok configure -font uifont bind $w <Visibility> "focus $w.ok" bind $w <Key-Escape> "destroy $w" bind $w <Key-Return> "destroy $w" @@ -1184,8 +1327,8 @@ Gitk key bindings: <$M1T-Down> Scroll commit list down one line <$M1T-PageUp> Scroll commit list up one page <$M1T-PageDown> Scroll commit list down one page -<Shift-Up> Move to previous highlighted line -<Shift-Down> Move to next highlighted line +<Shift-Up> Find backwards (upwards, later commits) +<Shift-Down> Find forwards (downwards, earlier commits) <Delete>, b Scroll diff view up one page <Backspace> Scroll diff view up one page <Space> Scroll diff view down one page @@ -1207,10 +1350,10 @@ f Scroll diff view to next file " \ -justify left -bg white -border 2 -relief groove pack $w.m -side top -fill both -padx 2 -pady 2 - $w.m configure -font $uifont + $w.m configure -font uifont button $w.ok -text Close -command "destroy $w" -default active pack $w.ok -side bottom - $w.ok configure -font $uifont + $w.ok configure -font uifont bind $w <Visibility> "focus $w.ok" bind $w <Key-Escape> "destroy $w" bind $w <Key-Return> "destroy $w" @@ -1583,6 +1726,7 @@ proc pop_flist_menu {w X Y x y} { global ctext cflist cmitmode flist_menu flist_menu_file global treediffs diffids + stopfinding set l [lindex [split [$w index "@$x,$y"] "."] 0] if {$l <= 1} return if {$cmitmode eq "tree"} { @@ -1596,14 +1740,15 @@ proc pop_flist_menu {w X Y x y} { } proc flist_hl {only} { - global flist_menu_file highlight_files + global flist_menu_file findstring gdttype set x [shellquote $flist_menu_file] - if {$only || $highlight_files eq {}} { - set highlight_files $x + if {$only || $findstring eq {} || $gdttype ne "touching paths:"} { + set findstring $x } else { - append highlight_files " " $x + append findstring " " $x } + set gdttype "touching paths:" } # Functions for adding and removing shell-type quoting @@ -1740,22 +1885,22 @@ proc vieweditor {top n title} { toplevel $top wm title $top $title - label $top.nl -text "Name" -font $uifont - entry $top.name -width 20 -textvariable newviewname($n) -font $uifont + label $top.nl -text "Name" -font uifont + entry $top.name -width 20 -textvariable newviewname($n) -font uifont grid $top.nl $top.name -sticky w -pady 5 checkbutton $top.perm -text "Remember this view" -variable newviewperm($n) \ - -font $uifont + -font uifont grid $top.perm - -pady 5 -sticky w - message $top.al -aspect 1000 -font $uifont \ + message $top.al -aspect 1000 -font uifont \ -text "Commits to include (arguments to git rev-list):" grid $top.al - -sticky w -pady 5 entry $top.args -width 50 -textvariable newviewargs($n) \ - -background white -font $uifont + -background white -font uifont grid $top.args - -sticky ew -padx 5 - message $top.l -aspect 1000 -font $uifont \ + message $top.l -aspect 1000 -font uifont \ -text "Enter files and directories to include, one per line:" grid $top.l - -sticky w - text $top.t -width 40 -height 10 -background white -font $uifont + text $top.t -width 40 -height 10 -background white -font uifont if {[info exists viewfiles($n)]} { foreach f $viewfiles($n) { $top.t insert end $f @@ -1767,9 +1912,9 @@ proc vieweditor {top n title} { grid $top.t - -sticky ew -padx 5 frame $top.buts button $top.buts.ok -text "OK" -command [list newviewok $top $n] \ - -font $uifont + -font uifont button $top.buts.can -text "Cancel" -command [list destroy $top] \ - -font $uifont + -font uifont grid $top.buts.ok $top.buts.can grid columnconfigure $top.buts 0 -weight 1 -uniform a grid columnconfigure $top.buts 1 -weight 1 -uniform a @@ -1788,10 +1933,10 @@ proc doviewmenu {m first cmd op argv} { } proc allviewmenus {n op args} { - global viewhlmenu + # global viewhlmenu doviewmenu .bar.view 5 [list showview $n] $op $args - doviewmenu $viewhlmenu 1 [list addvhighlight $n] $op $args + # doviewmenu $viewhlmenu 1 [list addvhighlight $n] $op $args } proc newviewok {top n} { @@ -1834,8 +1979,8 @@ proc newviewok {top n} { set viewname($n) $newviewname($n) doviewmenu .bar.view 5 [list showview $n] \ entryconf [list -label $viewname($n)] - doviewmenu $viewhlmenu 1 [list addvhighlight $n] \ - entryconf [list -label $viewname($n) -value $viewname($n)] + # doviewmenu $viewhlmenu 1 [list addvhighlight $n] \ + # entryconf [list -label $viewname($n) -value $viewname($n)] } if {$files ne $viewfiles($n) || $newargs ne $viewargs($n)} { set viewfiles($n) $files @@ -1867,8 +2012,8 @@ proc addviewmenu {n} { .bar.view add radiobutton -label $viewname($n) \ -command [list showview $n] -variable selectedview -value $n - $viewhlmenu add radiobutton -label $viewname($n) \ - -command [list addvhighlight $n] -variable selectedhlview + #$viewhlmenu add radiobutton -label $viewname($n) \ + # -command [list addvhighlight $n] -variable selectedhlview } proc flatten {var} { @@ -1892,17 +2037,17 @@ proc unflatten {var l} { proc showview {n} { global curview viewdata viewfiles - global displayorder parentlist rowidlist rowoffsets + global displayorder parentlist rowidlist rowisopt rowfinal global colormap rowtextx commitrow nextcolor canvxmax - global numcommits rowrangelist commitlisted idrowranges rowchk + global numcommits commitlisted global selectedline currentid canv canvy0 global treediffs global pending_select phase - global commitidx rowlaidout rowoptim + global commitidx global commfd global selectedview selectfirst global vparentlist vdisporder vcmitlisted - global hlview selectedhlview + global hlview selectedhlview commitinterest if {$n == $curview} return set selid {} @@ -1928,15 +2073,11 @@ proc showview {n} { set vparentlist($curview) $parentlist set vdisporder($curview) $displayorder set vcmitlisted($curview) $commitlisted - if {$phase ne {}} { - set viewdata($curview) \ - [list $phase $rowidlist $rowoffsets $rowrangelist \ - [flatten idrowranges] [flatten idinlist] \ - $rowlaidout $rowoptim $numcommits] - } elseif {![info exists viewdata($curview)] - || [lindex $viewdata($curview) 0] ne {}} { + if {$phase ne {} || + ![info exists viewdata($curview)] || + [lindex $viewdata($curview) 0] ne {}} { set viewdata($curview) \ - [list {} $rowidlist $rowoffsets $rowrangelist] + [list $phase $rowidlist $rowisopt $rowfinal] } } catch {unset treediffs} @@ -1945,12 +2086,14 @@ proc showview {n} { unset hlview set selectedhlview None } + catch {unset commitinterest} set curview $n set selectedview $n .bar.view entryconf Edit* -state [expr {$n == 0? "disabled": "normal"}] .bar.view entryconf Delete* -state [expr {$n == 0? "disabled": "normal"}] + run refill_reflist if {![info exists viewdata($n)]} { if {$selid ne {}} { set pending_select $selid @@ -1965,19 +2108,9 @@ proc showview {n} { set parentlist $vparentlist($n) set commitlisted $vcmitlisted($n) set rowidlist [lindex $v 1] - set rowoffsets [lindex $v 2] - set rowrangelist [lindex $v 3] - if {$phase eq {}} { - set numcommits [llength $displayorder] - catch {unset idrowranges} - } else { - unflatten idrowranges [lindex $v 4] - unflatten idinlist [lindex $v 5] - set rowlaidout [lindex $v 6] - set rowoptim [lindex $v 7] - set numcommits [lindex $v 8] - catch {unset rowchk} - } + set rowisopt [lindex $v 2] + set rowfinal [lindex $v 3] + set numcommits $commitidx($n) catch {unset colormap} catch {unset rowtextx} @@ -2021,7 +2154,6 @@ proc showview {n} { } elseif {$numcommits == 0} { show_status "No commits selected" } - run refill_reflist } # Stuff relating to the highlighting facility @@ -2073,12 +2205,12 @@ proc bolden_name {row font} { } proc unbolden {} { - global mainfont boldrows + global boldrows set stillbold {} foreach row $boldrows { if {![ishighlighted $row]} { - bolden $row $mainfont + bolden $row mainfont } else { lappend stillbold $row } @@ -2094,7 +2226,7 @@ proc addvhighlight {n} { } set hlview $n if {$n != $curview && ![info exists viewdata($n)]} { - set viewdata($n) [list getcommits {{}} {{}} {} {} {} 0 0 0 {}] + set viewdata($n) [list getcommits {{}} 0 0 0] set vparentlist($n) {} set vdisporder($n) {} set vcmitlisted($n) {} @@ -2117,9 +2249,8 @@ proc delvhighlight {} { proc vhighlightmore {} { global hlview vhl_done commitidx vhighlights - global displayorder vdisporder curview mainfont + global displayorder vdisporder curview - set font [concat $mainfont bold] set max $commitidx($hlview) if {$hlview == $curview} { set disp $displayorder @@ -2135,7 +2266,7 @@ proc vhighlightmore {} { set row $commitrow($curview,$id) if {$r0 <= $row && $row <= $r1} { if {![highlighted $row]} { - bolden $row $font + bolden $row mainfontbold } set vhighlights($row) 1 } @@ -2145,11 +2276,11 @@ proc vhighlightmore {} { } proc askvhighlight {row id} { - global hlview vhighlights commitrow iddrawn mainfont + global hlview vhighlights commitrow iddrawn if {[info exists commitrow($hlview,$id)]} { if {[info exists iddrawn($id)] && ![ishighlighted $row]} { - bolden $row [concat $mainfont bold] + bolden $row mainfontbold } set vhighlights($row) 1 } else { @@ -2157,9 +2288,9 @@ proc askvhighlight {row id} { } } -proc hfiles_change {name ix op} { +proc hfiles_change {} { global highlight_files filehighlight fhighlights fh_serial - global mainfont highlight_paths + global highlight_paths gdttype if {[info exists filehighlight]} { # delete previous highlights @@ -2177,6 +2308,69 @@ proc hfiles_change {name ix op} { } } +proc gdttype_change {name ix op} { + global gdttype highlight_files findstring findpattern + + stopfinding + if {$findstring ne {}} { + if {$gdttype eq "containing:"} { + if {$highlight_files ne {}} { + set highlight_files {} + hfiles_change + } + findcom_change + } else { + if {$findpattern ne {}} { + set findpattern {} + findcom_change + } + set highlight_files $findstring + hfiles_change + } + drawvisible + } + # enable/disable findtype/findloc menus too +} + +proc find_change {name ix op} { + global gdttype findstring highlight_files + + stopfinding + if {$gdttype eq "containing:"} { + findcom_change + } else { + if {$highlight_files ne $findstring} { + set highlight_files $findstring + hfiles_change + } + } + drawvisible +} + +proc findcom_change args { + global nhighlights boldnamerows + global findpattern findtype findstring gdttype + + stopfinding + # delete previous highlights, if any + foreach row $boldnamerows { + bolden_name $row mainfont + } + set boldnamerows {} + catch {unset nhighlights} + unbolden + unmarkmatches + if {$gdttype ne "containing:" || $findstring eq {}} { + set findpattern {} + } elseif {$findtype eq "Regexp"} { + set findpattern $findstring + } else { + set e [string map {"*" "\\*" "?" "\\?" "\[" "\\\[" "\\" "\\\\"} \ + $findstring] + set findpattern "*$e*" + } +} + proc makepatterns {l} { set ret {} foreach e $l { @@ -2199,8 +2393,11 @@ proc do_file_hl {serial} { set highlight_paths [makepatterns $paths] highlight_filelist set gdtargs [concat -- $paths] - } else { + } elseif {$gdttype eq "adding/removing string:"} { set gdtargs [list "-S$highlight_files"] + } else { + # must be "containing:", i.e. we're searching commit info + return } set cmd [concat | git diff-tree -r -s --stdin $gdtargs] set filehighlight [open $cmd r+] @@ -2230,8 +2427,8 @@ proc askfilehighlight {row id} { } proc readfhighlight {} { - global filehighlight fhighlights commitrow curview mainfont iddrawn - global fhl_list + global filehighlight fhighlights commitrow curview iddrawn + global fhl_list find_dirn if {![info exists filehighlight]} { return 0 @@ -2252,7 +2449,7 @@ proc readfhighlight {} { if {![info exists commitrow($curview,$line)]} continue set row $commitrow($curview,$line) if {[info exists iddrawn($line)] && ![ishighlighted $row]} { - bolden $row [concat $mainfont bold] + bolden $row mainfontbold } set fhighlights($row) 1 } @@ -2263,35 +2460,17 @@ proc readfhighlight {} { unset filehighlight return 0 } - next_hlcont - return 1 -} - -proc find_change {name ix op} { - global nhighlights mainfont boldnamerows - global findstring findpattern findtype - - # delete previous highlights, if any - foreach row $boldnamerows { - bolden_name $row $mainfont - } - set boldnamerows {} - catch {unset nhighlights} - unbolden - unmarkmatches - if {$findtype ne "Regexp"} { - set e [string map {"*" "\\*" "?" "\\?" "\[" "\\\[" "\\" "\\\\"} \ - $findstring] - set findpattern "*$e*" + if {[info exists find_dirn]} { + run findmore } - drawvisible + return 1 } proc doesmatch {f} { - global findtype findstring findpattern + global findtype findpattern if {$findtype eq "Regexp"} { - return [regexp $findstring $f] + return [regexp $findpattern $f] } elseif {$findtype eq "IgnCase"} { return [string match -nocase $findpattern $f] } else { @@ -2300,7 +2479,7 @@ proc doesmatch {f} { } proc askfindhighlight {row id} { - global nhighlights commitinfo iddrawn mainfont + global nhighlights commitinfo iddrawn global findloc global markingmatches @@ -2321,11 +2500,10 @@ proc askfindhighlight {row id} { } } if {$isbold && [info exists iddrawn($id)]} { - set f [concat $mainfont bold] if {![ishighlighted $row]} { - bolden $row $f + bolden $row mainfontbold if {$isbold > 1} { - bolden_name $row $f + bolden_name $row mainfontbold } } if {$markingmatches} { @@ -2454,7 +2632,7 @@ proc is_ancestor {a} { } proc askrelhighlight {row id} { - global descendent highlight_related iddrawn mainfont rhighlights + global descendent highlight_related iddrawn rhighlights global selectedline ancestor if {![info exists selectedline]} return @@ -2478,87 +2656,12 @@ proc askrelhighlight {row id} { } if {[info exists iddrawn($id)]} { if {$isbold && ![ishighlighted $row]} { - bolden $row [concat $mainfont bold] + bolden $row mainfontbold } } set rhighlights($row) $isbold } -proc next_hlcont {} { - global fhl_row fhl_dirn displayorder numcommits - global vhighlights fhighlights nhighlights rhighlights - global hlview filehighlight findstring highlight_related - - if {![info exists fhl_dirn] || $fhl_dirn == 0} return - set row $fhl_row - while {1} { - if {$row < 0 || $row >= $numcommits} { - bell - set fhl_dirn 0 - return - } - set id [lindex $displayorder $row] - if {[info exists hlview]} { - if {![info exists vhighlights($row)]} { - askvhighlight $row $id - } - if {$vhighlights($row) > 0} break - } - if {$findstring ne {}} { - if {![info exists nhighlights($row)]} { - askfindhighlight $row $id - } - if {$nhighlights($row) > 0} break - } - if {$highlight_related ne "None"} { - if {![info exists rhighlights($row)]} { - askrelhighlight $row $id - } - if {$rhighlights($row) > 0} break - } - if {[info exists filehighlight]} { - if {![info exists fhighlights($row)]} { - # ask for a few more while we're at it... - set r $row - for {set n 0} {$n < 100} {incr n} { - if {![info exists fhighlights($r)]} { - askfilehighlight $r [lindex $displayorder $r] - } - incr r $fhl_dirn - if {$r < 0 || $r >= $numcommits} break - } - flushhighlights - } - if {$fhighlights($row) < 0} { - set fhl_row $row - return - } - if {$fhighlights($row) > 0} break - } - incr row $fhl_dirn - } - set fhl_dirn 0 - selectline $row 1 -} - -proc next_highlight {dirn} { - global selectedline fhl_row fhl_dirn - global hlview filehighlight findstring highlight_related - - if {![info exists selectedline]} return - if {!([info exists hlview] || $findstring ne {} || - $highlight_related ne "None" || [info exists filehighlight])} return - set fhl_row [expr {$selectedline + $dirn}] - set fhl_dirn $dirn - next_hlcont -} - -proc cancel_next_highlight {} { - global fhl_dirn - - set fhl_dirn 0 -} - # Graph layout functions proc shortids {ids} { @@ -2575,108 +2678,43 @@ proc shortids {ids} { return $res } -proc incrange {l x o} { - set n [llength $l] - while {$x < $n} { - set e [lindex $l $x] - if {$e ne {}} { - lset l $x [expr {$e + $o}] - } - incr x - } - return $l -} - proc ntimes {n o} { set ret {} - for {} {$n > 0} {incr n -1} { - lappend ret $o - } - return $ret -} - -proc usedinrange {id l1 l2} { - global children commitrow curview - - if {[info exists commitrow($curview,$id)]} { - set r $commitrow($curview,$id) - if {$l1 <= $r && $r <= $l2} { - return [expr {$r - $l1 + 1}] - } - } - set kids $children($curview,$id) - foreach c $kids { - set r $commitrow($curview,$c) - if {$l1 <= $r && $r <= $l2} { - return [expr {$r - $l1 + 1}] + set o [list $o] + for {set mask 1} {$mask <= $n} {incr mask $mask} { + if {($n & $mask) != 0} { + set ret [concat $ret $o] } + set o [concat $o $o] } - return 0 + return $ret } -proc sanity {row {full 0}} { - global rowidlist rowoffsets +# Work out where id should go in idlist so that order-token +# values increase from left to right +proc idcol {idlist id {i 0}} { + global ordertok curview - set col -1 - set ids [lindex $rowidlist $row] - foreach id $ids { - incr col - if {$id eq {}} continue - if {$col < [llength $ids] - 1 && - [lsearch -exact -start [expr {$col+1}] $ids $id] >= 0} { - puts "oops: [shortids $id] repeated in row $row col $col: {[shortids [lindex $rowidlist $row]]}" - } - set o [lindex $rowoffsets $row $col] - set y $row - set x $col - while {$o ne {}} { - incr y -1 - incr x $o - if {[lindex $rowidlist $y $x] != $id} { - puts "oops: rowoffsets wrong at row [expr {$y+1}] col [expr {$x-$o}]" - puts " id=[shortids $id] check started at row $row" - for {set i $row} {$i >= $y} {incr i -1} { - puts " row $i ids={[shortids [lindex $rowidlist $i]]} offs={[lindex $rowoffsets $i]}" - } - break - } - if {!$full} break - set o [lindex $rowoffsets $y $x] + set t $ordertok($curview,$id) + if {$i >= [llength $idlist] || + $t < $ordertok($curview,[lindex $idlist $i])} { + if {$i > [llength $idlist]} { + set i [llength $idlist] } - } -} - -proc makeuparrow {oid x y z} { - global rowidlist rowoffsets uparrowlen idrowranges displayorder - - for {set i 1} {$i < $uparrowlen && $y > 1} {incr i} { - incr y -1 - incr x $z - set off0 [lindex $rowoffsets $y] - for {set x0 $x} {1} {incr x0} { - if {$x0 >= [llength $off0]} { - set x0 [llength [lindex $rowoffsets [expr {$y-1}]]] - break - } - set z [lindex $off0 $x0] - if {$z ne {}} { - incr x0 $z - break - } + while {[incr i -1] >= 0 && + $t < $ordertok($curview,[lindex $idlist $i])} {} + incr i + } else { + if {$t > $ordertok($curview,[lindex $idlist $i])} { + while {[incr i] < [llength $idlist] && + $t >= $ordertok($curview,[lindex $idlist $i])} {} } - set z [expr {$x0 - $x}] - lset rowidlist $y [linsert [lindex $rowidlist $y] $x $oid] - lset rowoffsets $y [linsert [lindex $rowoffsets $y] $x $z] } - set tmp [lreplace [lindex $rowoffsets $y] $x $x {}] - lset rowoffsets $y [incrange $tmp [expr {$x+1}] -1] - lappend idrowranges($oid) [lindex $displayorder $y] + return $i } proc initlayout {} { - global rowidlist rowoffsets displayorder commitlisted - global rowlaidout rowoptim - global idinlist rowchk rowrangelist idrowranges + global rowidlist rowisopt rowfinal displayorder commitlisted global numcommits canvxmax canv global nextcolor global parentlist @@ -2687,18 +2725,13 @@ proc initlayout {} { set displayorder {} set commitlisted {} set parentlist {} - set rowrangelist {} set nextcolor 0 - set rowidlist {{}} - set rowoffsets {{}} - catch {unset idinlist} - catch {unset rowchk} - set rowlaidout 0 - set rowoptim 0 + set rowidlist {} + set rowisopt {} + set rowfinal {} set canvxmax [$canv cget -width] catch {unset colormap} catch {unset rowtextx} - catch {unset idrowranges} set selectfirst 1 } @@ -2730,61 +2763,19 @@ proc visiblerows {} { return [list $r0 $r1] } -proc layoutmore {tmax allread} { - global rowlaidout rowoptim commitidx numcommits optim_delay - global uparrowlen curview rowidlist idinlist +proc layoutmore {} { + global commitidx viewcomplete numcommits + global uparrowlen downarrowlen mingaplen curview - set showlast 0 - set showdelay $optim_delay - set optdelay [expr {$uparrowlen + 1}] - while {1} { - if {$rowoptim - $showdelay > $numcommits} { - showstuff [expr {$rowoptim - $showdelay}] $showlast - } elseif {$rowlaidout - $optdelay > $rowoptim} { - set nr [expr {$rowlaidout - $optdelay - $rowoptim}] - if {$nr > 100} { - set nr 100 - } - optimize_rows $rowoptim 0 [expr {$rowoptim + $nr}] - incr rowoptim $nr - } elseif {$commitidx($curview) > $rowlaidout} { - set nr [expr {$commitidx($curview) - $rowlaidout}] - # may need to increase this threshold if uparrowlen or - # mingaplen are increased... - if {$nr > 150} { - set nr 150 - } - set row $rowlaidout - set rowlaidout [layoutrows $row [expr {$row + $nr}] $allread] - if {$rowlaidout == $row} { - return 0 - } - } elseif {$allread} { - set optdelay 0 - set nrows $commitidx($curview) - if {[lindex $rowidlist $nrows] ne {} || - [array names idinlist] ne {}} { - layouttail - set rowlaidout $commitidx($curview) - } elseif {$rowoptim == $nrows} { - set showdelay 0 - set showlast 1 - if {$numcommits == $nrows} { - return 0 - } - } - } else { - return 0 - } - if {$tmax ne {} && [clock clicks -milliseconds] >= $tmax} { - return 1 - } + set show $commitidx($curview) + if {$show > $numcommits || $viewcomplete($curview)} { + showstuff $show $viewcomplete($curview) } } proc showstuff {canshow last} { global numcommits commitrow pending_select selectedline curview - global lookingforhead mainheadid displayorder selectfirst + global mainheadid displayorder selectfirst global lastscrollset commitinterest if {$numcommits == 0} { @@ -2792,15 +2783,6 @@ proc showstuff {canshow last} { set phase "incrdraw" allcanvs delete all } - for {set l $numcommits} {$l < $canshow} {incr l} { - set id [lindex $displayorder $l] - if {[info exists commitinterest($id)]} { - foreach script $commitinterest($id) { - eval [string map [list "%I" $id] $script] - } - unset commitinterest($id) - } - } set r0 $numcommits set prev $numcommits set numcommits $canshow @@ -2831,28 +2813,22 @@ proc showstuff {canshow last} { set selectfirst 0 } } - if {$lookingforhead && [info exists commitrow($curview,$mainheadid)] - && ($last || $commitrow($curview,$mainheadid) < $numcommits - 1)} { - set lookingforhead 0 - dodiffindex - } } proc doshowlocalchanges {} { - global lookingforhead curview mainheadid phase commitrow + global curview mainheadid phase commitrow if {[info exists commitrow($curview,$mainheadid)] && ($phase eq {} || $commitrow($curview,$mainheadid) < $numcommits - 1)} { dodiffindex } elseif {$phase ne {}} { - set lookingforhead 1 + lappend commitinterest($mainheadid) {} } } proc dohidelocalchanges {} { - global lookingforhead localfrow localirow lserial + global localfrow localirow lserial - set lookingforhead 0 if {$localfrow >= 0} { removerow $localfrow set localfrow -1 @@ -2869,8 +2845,9 @@ proc dohidelocalchanges {} { # spawn off a process to do git diff-index --cached HEAD proc dodiffindex {} { - global localirow localfrow lserial + global localirow localfrow lserial showlocalchanges + if {!$showlocalchanges} return incr lserial set localfrow -1 set localirow -1 @@ -2941,207 +2918,325 @@ proc readdifffiles {fd serial} { return 0 } -proc layoutrows {row endrow last} { - global rowidlist rowoffsets displayorder - global uparrowlen downarrowlen maxwidth mingaplen - global children parentlist - global idrowranges - global commitidx curview - global idinlist rowchk rowrangelist +proc nextuse {id row} { + global commitrow curview children - set idlist [lindex $rowidlist $row] - set offs [lindex $rowoffsets $row] - while {$row < $endrow} { - set id [lindex $displayorder $row] - set nev [expr {[llength $idlist] - $maxwidth + 1}] - foreach p [lindex $parentlist $row] { - if {![info exists idinlist($p)] || !$idinlist($p)} { - incr nev - } - } - if {$nev > 0} { - if {!$last && - $row + $uparrowlen + $mingaplen >= $commitidx($curview)} break - for {set x [llength $idlist]} {[incr x -1] >= 0} {} { - set i [lindex $idlist $x] - if {![info exists rowchk($i)] || $row >= $rowchk($i)} { - set r [usedinrange $i [expr {$row - $downarrowlen}] \ - [expr {$row + $uparrowlen + $mingaplen}]] - if {$r == 0} { - set idlist [lreplace $idlist $x $x] - set offs [lreplace $offs $x $x] - set offs [incrange $offs $x 1] - set idinlist($i) 0 - set rm1 [expr {$row - 1}] - lappend idrowranges($i) [lindex $displayorder $rm1] - if {[incr nev -1] <= 0} break - continue - } - set rowchk($i) [expr {$row + $r}] - } + if {[info exists children($curview,$id)]} { + foreach kid $children($curview,$id) { + if {![info exists commitrow($curview,$kid)]} { + return -1 + } + if {$commitrow($curview,$kid) > $row} { + return $commitrow($curview,$kid) } - lset rowidlist $row $idlist - lset rowoffsets $row $offs } - set oldolds {} - set newolds {} - foreach p [lindex $parentlist $row] { - if {![info exists idinlist($p)]} { - lappend newolds $p - } elseif {!$idinlist($p)} { - lappend oldolds $p + } + if {[info exists commitrow($curview,$id)]} { + return $commitrow($curview,$id) + } + return -1 +} + +proc prevuse {id row} { + global commitrow curview children + + set ret -1 + if {[info exists children($curview,$id)]} { + foreach kid $children($curview,$id) { + if {![info exists commitrow($curview,$kid)]} break + if {$commitrow($curview,$kid) < $row} { + set ret $commitrow($curview,$kid) } - set idinlist($p) 1 } - set col [lsearch -exact $idlist $id] - if {$col < 0} { - set col [llength $idlist] - lappend idlist $id - lset rowidlist $row $idlist - set z {} - if {$children($curview,$id) ne {}} { - set z [expr {[llength [lindex $rowidlist [expr {$row-1}]]] - $col}] - unset idinlist($id) - } - lappend offs $z - lset rowoffsets $row $offs - if {$z ne {}} { - makeuparrow $id $col $row $z + } + return $ret +} + +proc make_idlist {row} { + global displayorder parentlist uparrowlen downarrowlen mingaplen + global commitidx curview ordertok children commitrow + + set r [expr {$row - $mingaplen - $downarrowlen - 1}] + if {$r < 0} { + set r 0 + } + set ra [expr {$row - $downarrowlen}] + if {$ra < 0} { + set ra 0 + } + set rb [expr {$row + $uparrowlen}] + if {$rb > $commitidx($curview)} { + set rb $commitidx($curview) + } + set ids {} + for {} {$r < $ra} {incr r} { + set nextid [lindex $displayorder [expr {$r + 1}]] + foreach p [lindex $parentlist $r] { + if {$p eq $nextid} continue + set rn [nextuse $p $r] + if {$rn >= $row && + $rn <= $r + $downarrowlen + $mingaplen + $uparrowlen} { + lappend ids [list $ordertok($curview,$p) $p] } - } else { - unset idinlist($id) - } - set ranges {} - if {[info exists idrowranges($id)]} { - set ranges $idrowranges($id) - lappend ranges $id - unset idrowranges($id) - } - lappend rowrangelist $ranges - incr row - set offs [ntimes [llength $idlist] 0] - set l [llength $newolds] - set idlist [eval lreplace \$idlist $col $col $newolds] - set o 0 - if {$l != 1} { - set offs [lrange $offs 0 [expr {$col - 1}]] - foreach x $newolds { - lappend offs {} - incr o -1 - } - incr o - set tmp [expr {[llength $idlist] - [llength $offs]}] - if {$tmp > 0} { - set offs [concat $offs [ntimes $tmp $o]] + } + } + for {} {$r < $row} {incr r} { + set nextid [lindex $displayorder [expr {$r + 1}]] + foreach p [lindex $parentlist $r] { + if {$p eq $nextid} continue + set rn [nextuse $p $r] + if {$rn < 0 || $rn >= $row} { + lappend ids [list $ordertok($curview,$p) $p] } - } else { - lset offs $col {} } - foreach i $newolds { - set idrowranges($i) $id + } + set id [lindex $displayorder $row] + lappend ids [list $ordertok($curview,$id) $id] + while {$r < $rb} { + foreach p [lindex $parentlist $r] { + set firstkid [lindex $children($curview,$p) 0] + if {$commitrow($curview,$firstkid) < $row} { + lappend ids [list $ordertok($curview,$p) $p] + } } - incr col $l - foreach oid $oldolds { - set idlist [linsert $idlist $col $oid] - set offs [linsert $offs $col $o] - makeuparrow $oid $col $row $o - incr col + incr r + set id [lindex $displayorder $r] + if {$id ne {}} { + set firstkid [lindex $children($curview,$id) 0] + if {$firstkid ne {} && $commitrow($curview,$firstkid) < $row} { + lappend ids [list $ordertok($curview,$id) $id] + } } - lappend rowidlist $idlist - lappend rowoffsets $offs } - return $row + set idlist {} + foreach idx [lsort -unique $ids] { + lappend idlist [lindex $idx 1] + } + return $idlist } -proc addextraid {id row} { - global displayorder commitrow commitinfo - global commitidx commitlisted - global parentlist children curview +proc rowsequal {a b} { + while {[set i [lsearch -exact $a {}]] >= 0} { + set a [lreplace $a $i $i] + } + while {[set i [lsearch -exact $b {}]] >= 0} { + set b [lreplace $b $i $i] + } + return [expr {$a eq $b}] +} - incr commitidx($curview) - lappend displayorder $id - lappend commitlisted 0 - lappend parentlist {} - set commitrow($curview,$id) $row - readcommit $id - if {![info exists commitinfo($id)]} { - set commitinfo($id) {"No commit information available"} +proc makeupline {id row rend col} { + global rowidlist uparrowlen downarrowlen mingaplen + + for {set r $rend} {1} {set r $rstart} { + set rstart [prevuse $id $r] + if {$rstart < 0} return + if {$rstart < $row} break } - if {![info exists children($curview,$id)]} { - set children($curview,$id) {} + if {$rstart + $uparrowlen + $mingaplen + $downarrowlen < $rend} { + set rstart [expr {$rend - $uparrowlen - 1}] + } + for {set r $rstart} {[incr r] <= $row} {} { + set idlist [lindex $rowidlist $r] + if {$idlist ne {} && [lsearch -exact $idlist $id] < 0} { + set col [idcol $idlist $id $col] + lset rowidlist $r [linsert $idlist $col $id] + changedrow $r + } } } -proc layouttail {} { - global rowidlist rowoffsets idinlist commitidx curview - global idrowranges rowrangelist +proc layoutrows {row endrow} { + global rowidlist rowisopt rowfinal displayorder + global uparrowlen downarrowlen maxwidth mingaplen + global children parentlist + global commitidx viewcomplete curview commitrow - set row $commitidx($curview) - set idlist [lindex $rowidlist $row] - while {$idlist ne {}} { - set col [expr {[llength $idlist] - 1}] - set id [lindex $idlist $col] - addextraid $id $row - catch {unset idinlist($id)} - lappend idrowranges($id) $id - lappend rowrangelist $idrowranges($id) - unset idrowranges($id) - incr row - set offs [ntimes $col 0] - set idlist [lreplace $idlist $col $col] - lappend rowidlist $idlist - lappend rowoffsets $offs - } - - foreach id [array names idinlist] { - unset idinlist($id) - addextraid $id $row - lset rowidlist $row [list $id] - lset rowoffsets $row 0 - makeuparrow $id 0 $row 0 - lappend idrowranges($id) $id - lappend rowrangelist $idrowranges($id) - unset idrowranges($id) - incr row - lappend rowidlist {} - lappend rowoffsets {} + set idlist {} + if {$row > 0} { + set rm1 [expr {$row - 1}] + foreach id [lindex $rowidlist $rm1] { + if {$id ne {}} { + lappend idlist $id + } + } + set final [lindex $rowfinal $rm1] + } + for {} {$row < $endrow} {incr row} { + set rm1 [expr {$row - 1}] + if {$rm1 < 0 || $idlist eq {}} { + set idlist [make_idlist $row] + set final 1 + } else { + set id [lindex $displayorder $rm1] + set col [lsearch -exact $idlist $id] + set idlist [lreplace $idlist $col $col] + foreach p [lindex $parentlist $rm1] { + if {[lsearch -exact $idlist $p] < 0} { + set col [idcol $idlist $p $col] + set idlist [linsert $idlist $col $p] + # if not the first child, we have to insert a line going up + if {$id ne [lindex $children($curview,$p) 0]} { + makeupline $p $rm1 $row $col + } + } + } + set id [lindex $displayorder $row] + if {$row > $downarrowlen} { + set termrow [expr {$row - $downarrowlen - 1}] + foreach p [lindex $parentlist $termrow] { + set i [lsearch -exact $idlist $p] + if {$i < 0} continue + set nr [nextuse $p $termrow] + if {$nr < 0 || $nr >= $row + $mingaplen + $uparrowlen} { + set idlist [lreplace $idlist $i $i] + } + } + } + set col [lsearch -exact $idlist $id] + if {$col < 0} { + set col [idcol $idlist $id] + set idlist [linsert $idlist $col $id] + if {$children($curview,$id) ne {}} { + makeupline $id $rm1 $row $col + } + } + set r [expr {$row + $uparrowlen - 1}] + if {$r < $commitidx($curview)} { + set x $col + foreach p [lindex $parentlist $r] { + if {[lsearch -exact $idlist $p] >= 0} continue + set fk [lindex $children($curview,$p) 0] + if {$commitrow($curview,$fk) < $row} { + set x [idcol $idlist $p $x] + set idlist [linsert $idlist $x $p] + } + } + if {[incr r] < $commitidx($curview)} { + set p [lindex $displayorder $r] + if {[lsearch -exact $idlist $p] < 0} { + set fk [lindex $children($curview,$p) 0] + if {$fk ne {} && $commitrow($curview,$fk) < $row} { + set x [idcol $idlist $p $x] + set idlist [linsert $idlist $x $p] + } + } + } + } + } + if {$final && !$viewcomplete($curview) && + $row + $uparrowlen + $mingaplen + $downarrowlen + >= $commitidx($curview)} { + set final 0 + } + set l [llength $rowidlist] + if {$row == $l} { + lappend rowidlist $idlist + lappend rowisopt 0 + lappend rowfinal $final + } elseif {$row < $l} { + if {![rowsequal $idlist [lindex $rowidlist $row]]} { + lset rowidlist $row $idlist + changedrow $row + } + lset rowfinal $row $final + } else { + set pad [ntimes [expr {$row - $l}] {}] + set rowidlist [concat $rowidlist $pad] + lappend rowidlist $idlist + set rowfinal [concat $rowfinal $pad] + lappend rowfinal $final + set rowisopt [concat $rowisopt [ntimes [expr {$row - $l + 1}] 0]] + } + } + return $row +} + +proc changedrow {row} { + global displayorder iddrawn rowisopt need_redisplay + + set l [llength $rowisopt] + if {$row < $l} { + lset rowisopt $row 0 + if {$row + 1 < $l} { + lset rowisopt [expr {$row + 1}] 0 + if {$row + 2 < $l} { + lset rowisopt [expr {$row + 2}] 0 + } + } + } + set id [lindex $displayorder $row] + if {[info exists iddrawn($id)]} { + set need_redisplay 1 } } proc insert_pad {row col npad} { - global rowidlist rowoffsets + global rowidlist set pad [ntimes $npad {}] - lset rowidlist $row [eval linsert [list [lindex $rowidlist $row]] $col $pad] - set tmp [eval linsert [list [lindex $rowoffsets $row]] $col $pad] - lset rowoffsets $row [incrange $tmp [expr {$col + $npad}] [expr {-$npad}]] + set idlist [lindex $rowidlist $row] + set bef [lrange $idlist 0 [expr {$col - 1}]] + set aft [lrange $idlist $col end] + set i [lsearch -exact $aft {}] + if {$i > 0} { + set aft [lreplace $aft $i $i] + } + lset rowidlist $row [concat $bef $pad $aft] + changedrow $row } proc optimize_rows {row col endrow} { - global rowidlist rowoffsets displayorder + global rowidlist rowisopt displayorder curview children - for {} {$row < $endrow} {incr row} { - set idlist [lindex $rowidlist $row] - set offs [lindex $rowoffsets $row] + if {$row < 1} { + set row 1 + } + for {} {$row < $endrow} {incr row; set col 0} { + if {[lindex $rowisopt $row]} continue set haspad 0 - for {} {$col < [llength $offs]} {incr col} { - if {[lindex $idlist $col] eq {}} { + set y0 [expr {$row - 1}] + set ym [expr {$row - 2}] + set idlist [lindex $rowidlist $row] + set previdlist [lindex $rowidlist $y0] + if {$idlist eq {} || $previdlist eq {}} continue + if {$ym >= 0} { + set pprevidlist [lindex $rowidlist $ym] + if {$pprevidlist eq {}} continue + } else { + set pprevidlist {} + } + set x0 -1 + set xm -1 + for {} {$col < [llength $idlist]} {incr col} { + set id [lindex $idlist $col] + if {[lindex $previdlist $col] eq $id} continue + if {$id eq {}} { set haspad 1 continue } - set z [lindex $offs $col] - if {$z eq {}} continue + set x0 [lsearch -exact $previdlist $id] + if {$x0 < 0} continue + set z [expr {$x0 - $col}] set isarrow 0 - set x0 [expr {$col + $z}] - set y0 [expr {$row - 1}] - set z0 [lindex $rowoffsets $y0 $x0] + set z0 {} + if {$ym >= 0} { + set xm [lsearch -exact $pprevidlist $id] + if {$xm >= 0} { + set z0 [expr {$xm - $x0}] + } + } if {$z0 eq {}} { - set id [lindex $idlist $col] - set ranges [rowranges $id] - if {$ranges ne {} && $y0 > [lindex $ranges 0]} { + # if row y0 is the first child of $id then it's not an arrow + if {[lindex $children($curview,$id) 0] ne + [lindex $displayorder $y0]} { set isarrow 1 } } + if {!$isarrow && $id ne [lindex $displayorder $row] && + [lsearch -exact [lindex $rowidlist [expr {$row+1}]] $id] < 0} { + set isarrow 1 + } # Looking at lines from this row to the previous row, # make them go straight up if they end in an arrow on # the previous row; otherwise make them go straight up @@ -3150,43 +3245,32 @@ proc optimize_rows {row col endrow} { # Line currently goes left too much; # insert pads in the previous row, then optimize it set npad [expr {-1 - $z + $isarrow}] - set offs [incrange $offs $col $npad] insert_pad $y0 $x0 $npad if {$y0 > 0} { optimize_rows $y0 $x0 $row } - set z [lindex $offs $col] - set x0 [expr {$col + $z}] - set z0 [lindex $rowoffsets $y0 $x0] + set previdlist [lindex $rowidlist $y0] + set x0 [lsearch -exact $previdlist $id] + set z [expr {$x0 - $col}] + if {$z0 ne {}} { + set pprevidlist [lindex $rowidlist $ym] + set xm [lsearch -exact $pprevidlist $id] + set z0 [expr {$xm - $x0}] + } } elseif {$z > 1 || ($z > 0 && $isarrow)} { # Line currently goes right too much; - # insert pads in this line and adjust the next's rowoffsets + # insert pads in this line set npad [expr {$z - 1 + $isarrow}] - set y1 [expr {$row + 1}] - set offs2 [lindex $rowoffsets $y1] - set x1 -1 - foreach z $offs2 { - incr x1 - if {$z eq {} || $x1 + $z < $col} continue - if {$x1 + $z > $col} { - incr npad - } - lset rowoffsets $y1 [incrange $offs2 $x1 $npad] - break - } - set pad [ntimes $npad {}] - set idlist [eval linsert \$idlist $col $pad] - set tmp [eval linsert \$offs $col $pad] + insert_pad $row $col $npad + set idlist [lindex $rowidlist $row] incr col $npad - set offs [incrange $tmp $col [expr {-$npad}]] - set z [lindex $offs $col] + set z [expr {$x0 - $col}] set haspad 1 } - if {$z0 eq {} && !$isarrow} { + if {$z0 eq {} && !$isarrow && $ym >= 0} { # this line links to its first child on row $row-2 - set rm2 [expr {$row - 2}] - set id [lindex $displayorder $rm2] - set xc [lsearch -exact [lindex $rowidlist $rm2] $id] + set id [lindex $displayorder $ym] + set xc [lsearch -exact $pprevidlist $id] if {$xc >= 0} { set z0 [expr {$xc - $x0}] } @@ -3194,52 +3278,35 @@ proc optimize_rows {row col endrow} { # avoid lines jigging left then immediately right if {$z0 ne {} && $z < 0 && $z0 > 0} { insert_pad $y0 $x0 1 - set offs [incrange $offs $col 1] - optimize_rows $y0 [expr {$x0 + 1}] $row + incr x0 + optimize_rows $y0 $x0 $row + set previdlist [lindex $rowidlist $y0] } } if {!$haspad} { - set o {} # Find the first column that doesn't have a line going right for {set col [llength $idlist]} {[incr col -1] >= 0} {} { - set o [lindex $offs $col] - if {$o eq {}} { + set id [lindex $idlist $col] + if {$id eq {}} break + set x0 [lsearch -exact $previdlist $id] + if {$x0 < 0} { # check if this is the link to the first child - set id [lindex $idlist $col] - set ranges [rowranges $id] - if {$ranges ne {} && $row == [lindex $ranges 0]} { + set kid [lindex $displayorder $y0] + if {[lindex $children($curview,$id) 0] eq $kid} { # it is, work out offset to child - set y0 [expr {$row - 1}] - set id [lindex $displayorder $y0] - set x0 [lsearch -exact [lindex $rowidlist $y0] $id] - if {$x0 >= 0} { - set o [expr {$x0 - $col}] - } + set x0 [lsearch -exact $previdlist $kid] } } - if {$o eq {} || $o <= 0} break + if {$x0 <= $col} break } # Insert a pad at that column as long as it has a line and - # isn't the last column, and adjust the next row' offsets - if {$o ne {} && [incr col] < [llength $idlist]} { - set y1 [expr {$row + 1}] - set offs2 [lindex $rowoffsets $y1] - set x1 -1 - foreach z $offs2 { - incr x1 - if {$z eq {} || $x1 + $z < $col} continue - lset rowoffsets $y1 [incrange $offs2 $x1 1] - break - } + # isn't the last column + if {$x0 >= 0 && [incr col] < [llength $idlist]} { set idlist [linsert $idlist $col {}] - set tmp [linsert $offs $col {}] - incr col - set offs [incrange $tmp $col -1] + lset rowidlist $row $idlist + changedrow $row } } - lset rowidlist $row $idlist - lset rowoffsets $row $offs - set col 0 } } @@ -3264,51 +3331,64 @@ proc linewidth {id} { } proc rowranges {id} { - global phase idrowranges commitrow rowlaidout rowrangelist curview - - set ranges {} - if {$phase eq {} || - ([info exists commitrow($curview,$id)] - && $commitrow($curview,$id) < $rowlaidout)} { - set ranges [lindex $rowrangelist $commitrow($curview,$id)] - } elseif {[info exists idrowranges($id)]} { - set ranges $idrowranges($id) - } - set linenos {} - foreach rid $ranges { - lappend linenos $commitrow($curview,$rid) - } - if {$linenos ne {}} { - lset linenos 0 [expr {[lindex $linenos 0] + 1}] - } - return $linenos -} - -# work around tk8.4 refusal to draw arrows on diagonal segments -proc adjarrowhigh {coords} { - global linespc - - set x0 [lindex $coords 0] - set x1 [lindex $coords 2] - if {$x0 != $x1} { - set y0 [lindex $coords 1] - set y1 [lindex $coords 3] - if {$y0 - $y1 <= 2 * $linespc && $x1 == [lindex $coords 4]} { - # we have a nearby vertical segment, just trim off the diag bit - set coords [lrange $coords 2 end] + global commitrow curview children uparrowlen downarrowlen + global rowidlist + + set kids $children($curview,$id) + if {$kids eq {}} { + return {} + } + set ret {} + lappend kids $id + foreach child $kids { + if {![info exists commitrow($curview,$child)]} break + set row $commitrow($curview,$child) + if {![info exists prev]} { + lappend ret [expr {$row + 1}] } else { - set slope [expr {($x0 - $x1) / ($y0 - $y1)}] - set xi [expr {$x0 - $slope * $linespc / 2}] - set yi [expr {$y0 - $linespc / 2}] - set coords [lreplace $coords 0 1 $xi $y0 $xi $yi] + if {$row <= $prevrow} { + puts "oops children out of order [shortids $id] $row < [shortids $prev] $prevrow" + } + # see if the line extends the whole way from prevrow to row + if {$row > $prevrow + $uparrowlen + $downarrowlen && + [lsearch -exact [lindex $rowidlist \ + [expr {int(($row + $prevrow) / 2)}]] $id] < 0} { + # it doesn't, see where it ends + set r [expr {$prevrow + $downarrowlen}] + if {[lsearch -exact [lindex $rowidlist $r] $id] < 0} { + while {[incr r -1] > $prevrow && + [lsearch -exact [lindex $rowidlist $r] $id] < 0} {} + } else { + while {[incr r] <= $row && + [lsearch -exact [lindex $rowidlist $r] $id] >= 0} {} + incr r -1 + } + lappend ret $r + # see where it starts up again + set r [expr {$row - $uparrowlen}] + if {[lsearch -exact [lindex $rowidlist $r] $id] < 0} { + while {[incr r] < $row && + [lsearch -exact [lindex $rowidlist $r] $id] < 0} {} + } else { + while {[incr r -1] >= $prevrow && + [lsearch -exact [lindex $rowidlist $r] $id] >= 0} {} + incr r + } + lappend ret $r + } + } + if {$child eq $id} { + lappend ret $row } + set prev $id + set prevrow $row } - return $coords + return $ret } proc drawlineseg {id row endrow arrowlow} { global rowidlist displayorder iddrawn linesegs - global canv colormap linespc curview maxlinelen + global canv colormap linespc curview maxlinelen parentlist set cols [list [lsearch -exact [lindex $rowidlist $row] $id]] set le [expr {$row + 1}] @@ -3383,9 +3463,11 @@ proc drawlineseg {id row endrow arrowlow} { set itl [lindex $lines [expr {$i-1}] 2] set al [$canv itemcget $itl -arrow] set arrowlow [expr {$al eq "last" || $al eq "both"}] - } elseif {$arrowlow && - [lsearch -exact [lindex $rowidlist [expr {$row-1}]] $id] >= 0} { - set arrowlow 0 + } elseif {$arrowlow} { + if {[lsearch -exact [lindex $rowidlist [expr {$row-1}]] $id] >= 0 || + [lsearch -exact [lindex $parentlist [expr {$row-1}]] $id] >= 0} { + set arrowlow 0 + } } set arrow [lindex {none first last both} [expr {$arrowhigh + 2*$arrowlow}]] for {set y $le} {[incr y -1] > $row} {} { @@ -3404,8 +3486,19 @@ proc drawlineseg {id row endrow arrowlow} { set xc [lsearch -exact [lindex $rowidlist $row] $ch] if {$xc < 0} { puts "oops: drawlineseg: child $ch not on row $row" - } else { - if {$xc < $x - 1} { + } elseif {$xc != $x} { + if {($arrowhigh && $le == $row + 1) || $dir == 0} { + set d [expr {int(0.5 * $linespc)}] + set x1 [xc $row $x] + if {$xc < $x} { + set x2 [expr {$x1 - $d}] + } else { + set x2 [expr {$x1 + $d}] + } + set y2 [yc $row] + set y1 [expr {$y2 + $d}] + lappend coords $x1 $y1 $x2 $y2 + } elseif {$xc < $x - 1} { lappend coords [xc $row [expr {$x-1}]] [yc $row] } elseif {$xc > $x + 1} { lappend coords [xc $row [expr {$x+1}]] [yc $row] @@ -3416,23 +3509,9 @@ proc drawlineseg {id row endrow arrowlow} { } else { set xn [xc $row $xp] set yn [yc $row] - # work around tk8.4 refusal to draw arrows on diagonal segments - if {$arrowlow && $xn != [lindex $coords end-1]} { - if {[llength $coords] < 4 || - [lindex $coords end-3] != [lindex $coords end-1] || - [lindex $coords end] - $yn > 2 * $linespc} { - set xn [xc $row [expr {$xp - 0.5 * $dir}]] - set yo [yc [expr {$row + 0.5}]] - lappend coords $xn $yo $xn $yn - } - } else { - lappend coords $xn $yn - } + lappend coords $xn $yn } if {!$joinhigh} { - if {$arrowhigh} { - set coords [adjarrowhigh $coords] - } assigncolor $id set t [$canv create line $coords -width [linewidth $id] \ -fill $colormap($id) -tags lines.$id -arrow $arrow] @@ -3456,9 +3535,6 @@ proc drawlineseg {id row endrow arrowlow} { set coords [concat $coords $clow] if {!$joinhigh} { lset lines [expr {$i-1}] 1 $le - if {$arrowhigh} { - set coords [adjarrowhigh $coords] - } } else { # coalesce two pieces $canv delete $ith @@ -3478,7 +3554,7 @@ proc drawlineseg {id row endrow arrowlow} { proc drawparentlinks {id row} { global rowidlist canv colormap curview parentlist - global idpos + global idpos linespc set rowids [lindex $rowidlist $row] set col [lsearch -exact $rowids $id] @@ -3488,6 +3564,8 @@ proc drawparentlinks {id row} { set x [xc $row $col] set y [yc $row] set y2 [yc $row2] + set d [expr {int(0.5 * $linespc)}] + set ymid [expr {$y + $d}] set ids [lindex $rowidlist $row2] # rmx = right-most X coord used set rmx 0 @@ -3501,19 +3579,37 @@ proc drawparentlinks {id row} { if {$x2 > $rmx} { set rmx $x2 } - if {[lsearch -exact $rowids $p] < 0} { + set j [lsearch -exact $rowids $p] + if {$j < 0} { # drawlineseg will do this one for us continue } assigncolor $p # should handle duplicated parents here... set coords [list $x $y] - if {$i < $col - 1} { - lappend coords [xc $row [expr {$i + 1}]] $y - } elseif {$i > $col + 1} { - lappend coords [xc $row [expr {$i - 1}]] $y + if {$i != $col} { + # if attaching to a vertical segment, draw a smaller + # slant for visual distinctness + if {$i == $j} { + if {$i < $col} { + lappend coords [expr {$x2 + $d}] $y $x2 $ymid + } else { + lappend coords [expr {$x2 - $d}] $y $x2 $ymid + } + } elseif {$i < $col && $i < $j} { + # segment slants towards us already + lappend coords [xc $row $j] $y + } else { + if {$i < $col - 1} { + lappend coords [expr {$x2 + $linespc}] $y + } elseif {$i > $col + 1} { + lappend coords [expr {$x2 - $linespc}] $y + } + lappend coords $x2 $y2 + } + } else { + lappend coords $x2 $y2 } - lappend coords $x2 $y2 set t [$canv create line $coords -width [linewidth $p] \ -fill $colormap($p) -tags lines.$p] $canv lower $t @@ -3535,8 +3631,8 @@ proc drawcmittext {id row col} { global linespc canv canv2 canv3 canvy0 fgcolor curview global commitlisted commitinfo rowidlist parentlist global rowtextx idpos idtags idheads idotherrefs - global linehtag linentag linedtag - global mainfont canvxmax boldrows boldnamerows fgcolor nullid nullid2 + global linehtag linentag linedtag selectedline + global canvxmax boldrows boldnamerows fgcolor nullid nullid2 # listed is 0 for boundary, 1 for normal, 2 for left, 3 for right set listed [lindex $commitlisted $row] @@ -3593,15 +3689,15 @@ proc drawcmittext {id row col} { set name [lindex $commitinfo($id) 1] set date [lindex $commitinfo($id) 2] set date [formatdate $date] - set font $mainfont - set nfont $mainfont + set font mainfont + set nfont mainfont set isbold [ishighlighted $row] if {$isbold > 0} { lappend boldrows $row - lappend font bold + set font mainfontbold if {$isbold > 1} { lappend boldnamerows $row - lappend nfont bold + set nfont mainfontbold } } set linehtag($row) [$canv create text $xt $y -anchor w -fill $fgcolor \ @@ -3610,8 +3706,11 @@ proc drawcmittext {id row col} { set linentag($row) [$canv2 create text 3 $y -anchor w -fill $fgcolor \ -text $name -font $nfont -tags text] set linedtag($row) [$canv3 create text 3 $y -anchor w -fill $fgcolor \ - -text $date -font $mainfont -tags text] - set xr [expr {$xt + [font measure $mainfont $headline]}] + -text $date -font mainfont -tags text] + if {[info exists selectedline] && $selectedline == $row} { + make_secsel $row + } + set xr [expr {$xt + [font measure $font $headline]}] if {$xr > $canvxmax} { set canvxmax $xr setcanvscroll @@ -3619,10 +3718,10 @@ proc drawcmittext {id row col} { } proc drawcmitrow {row} { - global displayorder rowidlist + global displayorder rowidlist nrows_drawn global iddrawn markingmatches global commitinfo parentlist numcommits - global filehighlight fhighlights findstring nhighlights + global filehighlight fhighlights findpattern nhighlights global hlview vhighlights global highlight_related rhighlights @@ -3635,7 +3734,7 @@ proc drawcmitrow {row} { if {[info exists filehighlight] && ![info exists fhighlights($row)]} { askfilehighlight $row $id } - if {$findstring ne {} && ![info exists nhighlights($row)]} { + if {$findpattern ne {} && ![info exists nhighlights($row)]} { askfindhighlight $row $id } if {$highlight_related ne "None" && ![info exists rhighlights($row)]} { @@ -3653,6 +3752,7 @@ proc drawcmitrow {row} { assigncolor $id drawcmittext $id $row $col set iddrawn($id) 1 + incr nrows_drawn } if {$markingmatches} { markrowmatches $row $id @@ -3660,8 +3760,8 @@ proc drawcmitrow {row} { } proc drawcommits {row {endrow {}}} { - global numcommits iddrawn displayorder curview - global parentlist rowidlist + global numcommits iddrawn displayorder curview need_redisplay + global parentlist rowidlist rowfinal uparrowlen downarrowlen nrows_drawn if {$row < 0} { set row 0 @@ -3673,6 +3773,35 @@ proc drawcommits {row {endrow {}}} { set endrow [expr {$numcommits - 1}] } + set rl1 [expr {$row - $downarrowlen - 3}] + if {$rl1 < 0} { + set rl1 0 + } + set ro1 [expr {$row - 3}] + if {$ro1 < 0} { + set ro1 0 + } + set r2 [expr {$endrow + $uparrowlen + 3}] + if {$r2 > $numcommits} { + set r2 $numcommits + } + for {set r $rl1} {$r < $r2} {incr r} { + if {[lindex $rowidlist $r] ne {} && [lindex $rowfinal $r]} { + if {$rl1 < $r} { + layoutrows $rl1 $r + } + set rl1 [expr {$r + 1}] + } + } + if {$rl1 < $r} { + layoutrows $rl1 $r + } + optimize_rows $ro1 0 $r2 + if {$need_redisplay || $nrows_drawn > 2000} { + clear_display + drawvisible + } + # make the lines join to already-drawn rows either side set r [expr {$row - 1}] if {$r < 0 || ![info exists iddrawn([lindex $displayorder $r])]} { @@ -3689,34 +3818,23 @@ proc drawcommits {row {endrow {}}} { drawcmitrow $r if {$r == $er} break set nextid [lindex $displayorder [expr {$r + 1}]] - if {$wasdrawn && [info exists iddrawn($nextid)]} { - catch {unset prevlines} - continue - } + if {$wasdrawn && [info exists iddrawn($nextid)]} continue drawparentlinks $id $r - if {[info exists lineends($r)]} { - foreach lid $lineends($r) { - unset prevlines($lid) - } - } set rowids [lindex $rowidlist $r] foreach lid $rowids { if {$lid eq {}} continue + if {[info exists lineend($lid)] && $lineend($lid) > $r} continue if {$lid eq $id} { # see if this is the first child of any of its parents foreach p [lindex $parentlist $r] { if {[lsearch -exact $rowids $p] < 0} { # make this line extend up to the child - set le [drawlineseg $p $r $er 0] - lappend lineends($le) $p - set prevlines($p) 1 + set lineend($p) [drawlineseg $p $r $er 0] } } - } elseif {![info exists prevlines($lid)]} { - set le [drawlineseg $lid $r $er 1] - lappend lineends($le) $lid - set prevlines($lid) 1 + } else { + set lineend($lid) [drawlineseg $lid $r $er 1] } } } @@ -3740,7 +3858,7 @@ proc drawvisible {} { } proc clear_display {} { - global iddrawn linesegs + global iddrawn linesegs need_redisplay nrows_drawn global vhighlights fhighlights nhighlights rhighlights allcanvs delete all @@ -3750,10 +3868,12 @@ proc clear_display {} { catch {unset fhighlights} catch {unset nhighlights} catch {unset rhighlights} + set need_redisplay 0 + set nrows_drawn 0 } proc findcrossings {id} { - global rowidlist parentlist numcommits rowoffsets displayorder + global rowidlist parentlist numcommits displayorder set cross {} set ccross {} @@ -3762,12 +3882,9 @@ proc findcrossings {id} { set e [expr {$numcommits - 1}] } if {$e <= $s} continue - set x [lsearch -exact [lindex $rowidlist $e] $id] - if {$x < 0} { - puts "findcrossings: oops, no [shortids $id] in row $e" - continue - } for {set row $e} {[incr row -1] >= $s} {} { + set x [lsearch -exact [lindex $rowidlist $row] $id] + if {$x < 0} break set olds [lindex $parentlist $row] set kid [lindex $displayorder $row] set kidx [lsearch -exact [lindex $rowidlist $row] $kid] @@ -3785,9 +3902,6 @@ proc findcrossings {id} { } } } - set inc [lindex $rowoffsets $row $x] - if {$inc eq {}} break - incr x $inc } } return [concat $ccross {{}} $cross] @@ -3868,7 +3982,7 @@ proc bindline {t id} { proc drawtags {id x xt y1} { global idtags idheads idotherrefs mainhead global linespc lthickness - global canv mainfont commitrow rowtextx curview fgcolor bgcolor + global canv commitrow rowtextx curview fgcolor bgcolor set marks {} set ntags 0 @@ -3897,9 +4011,9 @@ proc drawtags {id x xt y1} { foreach tag $marks { incr i if {$i >= $ntags && $i < $ntags + $nheads && $tag eq $mainhead} { - set wid [font measure [concat $mainfont bold] $tag] + set wid [font measure mainfontbold $tag] } else { - set wid [font measure $mainfont $tag] + set wid [font measure mainfont $tag] } lappend xvals $xt lappend wvals $wid @@ -3911,7 +4025,7 @@ proc drawtags {id x xt y1} { foreach tag $marks x $xvals wid $wvals { set xl [expr {$x + $delta}] set xr [expr {$x + $delta + $wid + $lthickness}] - set font $mainfont + set font mainfont if {[incr ntags -1] >= 0} { # draw a tag set t [$canv create polygon $x [expr {$yt + $delta}] $xl $yt \ @@ -3924,7 +4038,7 @@ proc drawtags {id x xt y1} { if {[incr nheads -1] >= 0} { set col green if {$tag eq $mainhead} { - lappend font bold + set font mainfontbold } } else { set col "#ddddff" @@ -3933,7 +4047,7 @@ proc drawtags {id x xt y1} { $canv create polygon $x $yt $xr $yt $xr $yb $x $yb \ -width 1 -outline black -fill $col -tags tag.$id if {[regexp {^(remotes/.*/|remotes/)} $tag match remoteprefix]} { - set rwid [font measure $mainfont $remoteprefix] + set rwid [font measure mainfont $remoteprefix] set xi [expr {$x + 1}] set yti [expr {$yt + 1}] set xri [expr {$x + $rwid}] @@ -3965,10 +4079,10 @@ proc xcoord {i level ln} { } proc show_status {msg} { - global canv mainfont fgcolor + global canv fgcolor clear_display - $canv create text 3 3 -anchor nw -text $msg -font $mainfont \ + $canv create text 3 3 -anchor nw -text $msg -font mainfont \ -tags text -fill $fgcolor } @@ -3977,9 +4091,9 @@ proc show_status {msg} { # on that row and below will move down one row. proc insertrow {row newcmit} { global displayorder parentlist commitlisted children - global commitrow curview rowidlist rowoffsets numcommits - global rowrangelist rowlaidout rowoptim numcommits - global selectedline rowchk commitidx + global commitrow curview rowidlist rowisopt rowfinal numcommits + global numcommits + global selectedline commitidx ordertok if {$row >= $numcommits} { puts "oops, inserting new row $row but only have $numcommits rows" @@ -3999,45 +4113,24 @@ proc insertrow {row newcmit} { set commitrow($curview,$id) $r } incr commitidx($curview) + set ordertok($curview,$newcmit) $ordertok($curview,$p) - set idlist [lindex $rowidlist $row] - set offs [lindex $rowoffsets $row] - set newoffs {} - foreach x $idlist { - if {$x eq {} || ($x eq $p && [llength $kids] == 1)} { - lappend newoffs {} - } else { - lappend newoffs 0 - } - } - if {[llength $kids] == 1} { - set col [lsearch -exact $idlist $p] - lset idlist $col $newcmit - } else { - set col [llength $idlist] - lappend idlist $newcmit - lappend offs {} - lset rowoffsets $row $offs - } - set rowidlist [linsert $rowidlist $row $idlist] - set rowoffsets [linsert $rowoffsets [expr {$row+1}] $newoffs] - - set rowrangelist [linsert $rowrangelist $row {}] - if {[llength $kids] > 1} { - set rp1 [expr {$row + 1}] - set ranges [lindex $rowrangelist $rp1] - if {$ranges eq {}} { - set ranges [list $newcmit $p] - } elseif {[lindex $ranges end-1] eq $p} { - lset ranges end-1 $newcmit + if {$row < [llength $rowidlist]} { + set idlist [lindex $rowidlist $row] + if {$idlist ne {}} { + if {[llength $kids] == 1} { + set col [lsearch -exact $idlist $p] + lset idlist $col $newcmit + } else { + set col [llength $idlist] + lappend idlist $newcmit + } } - lset rowrangelist $rp1 $ranges + set rowidlist [linsert $rowidlist $row $idlist] + set rowisopt [linsert $rowisopt $row 0] + set rowfinal [linsert $rowfinal $row [lindex $rowfinal $row]] } - catch {unset rowchk} - - incr rowlaidout - incr rowoptim incr numcommits if {[info exists selectedline] && $selectedline >= $row} { @@ -4049,9 +4142,9 @@ proc insertrow {row newcmit} { # Remove a commit that was inserted with insertrow on row $row. proc removerow {row} { global displayorder parentlist commitlisted children - global commitrow curview rowidlist rowoffsets numcommits - global rowrangelist idrowranges rowlaidout rowoptim numcommits - global linesegends selectedline rowchk commitidx + global commitrow curview rowidlist rowisopt rowfinal numcommits + global numcommits + global linesegends selectedline commitidx if {$row >= $numcommits} { puts "oops, removing row $row but only have $numcommits rows" @@ -4076,27 +4169,12 @@ proc removerow {row} { } incr commitidx($curview) -1 - set rowidlist [lreplace $rowidlist $row $row] - set rowoffsets [lreplace $rowoffsets $rp1 $rp1] - if {$kids ne {}} { - set offs [lindex $rowoffsets $row] - set offs [lreplace $offs end end] - lset rowoffsets $row $offs - } - - set rowrangelist [lreplace $rowrangelist $row $row] - if {[llength $kids] > 0} { - set ranges [lindex $rowrangelist $row] - if {[lindex $ranges end-1] eq $id} { - set ranges [lreplace $ranges end-1 end] - lset rowrangelist $row $ranges - } + if {$row < [llength $rowidlist]} { + set rowidlist [lreplace $rowidlist $row $row] + set rowisopt [lreplace $rowisopt $row $row] + set rowfinal [lreplace $rowfinal $row $row] } - catch {unset rowchk} - - incr rowlaidout -1 - incr rowoptim -1 incr numcommits -1 if {[info exists selectedline] && $selectedline > $row} { @@ -4116,20 +4194,30 @@ proc settextcursor {c} { set curtextcursor $c } -proc nowbusy {what} { - global isbusy +proc nowbusy {what {name {}}} { + global isbusy busyname statusw if {[array names isbusy] eq {}} { . config -cursor watch settextcursor watch } set isbusy($what) 1 + set busyname($what) $name + if {$name ne {}} { + $statusw conf -text $name + } } proc notbusy {what} { - global isbusy maincursor textcursor + global isbusy maincursor textcursor busyname statusw - catch {unset isbusy($what)} + catch { + unset isbusy($what) + if {$busyname($what) ne {} && + [$statusw cget -text] eq $busyname($what)} { + $statusw conf -text {} + } + } if {[array names isbusy] eq {}} { . config -cursor $maincursor settextcursor $textcursor @@ -4157,148 +4245,149 @@ proc findmatches {f} { return $matches } -proc dofind {{rev 0}} { +proc dofind {{dirn 1} {wrap 1}} { global findstring findstartline findcurline selectedline numcommits + global gdttype filehighlight fh_serial find_dirn findallowwrap - unmarkmatches - cancel_next_highlight + if {[info exists find_dirn]} { + if {$find_dirn == $dirn} return + stopfinding + } focus . if {$findstring eq {} || $numcommits == 0} return if {![info exists selectedline]} { - set findstartline [lindex [visiblerows] $rev] + set findstartline [lindex [visiblerows] [expr {$dirn < 0}]] } else { set findstartline $selectedline } set findcurline $findstartline - nowbusy finding - if {!$rev} { - run findmore - } else { - if {$findcurline == 0} { - set findcurline $numcommits - } - incr findcurline -1 - run findmorerev + nowbusy finding "Searching" + if {$gdttype ne "containing:" && ![info exists filehighlight]} { + after cancel do_file_hl $fh_serial + do_file_hl $fh_serial } + set find_dirn $dirn + set findallowwrap $wrap + run findmore } -proc findnext {restart} { - global findcurline - if {![info exists findcurline]} { - if {$restart} { - dofind - } else { - bell - } - } else { - run findmore - nowbusy finding - } -} +proc stopfinding {} { + global find_dirn findcurline fprogcoord -proc findprev {} { - global findcurline - if {![info exists findcurline]} { - dofind 1 - } else { - run findmorerev - nowbusy finding + if {[info exists find_dirn]} { + unset find_dirn + unset findcurline + notbusy finding + set fprogcoord 0 + adjustprogress } } proc findmore {} { - global commitdata commitinfo numcommits findstring findpattern findloc + global commitdata commitinfo numcommits findpattern findloc global findstartline findcurline displayorder + global find_dirn gdttype fhighlights fprogcoord + global findallowwrap - set fldtypes {Headline Author Date Committer CDate Comments} - set l [expr {$findcurline + 1}] - if {$l >= $numcommits} { - set l 0 - } - if {$l <= $findstartline} { - set lim [expr {$findstartline + 1}] - } else { - set lim $numcommits - } - if {$lim - $l > 500} { - set lim [expr {$l + 500}] - } - set last 0 - for {} {$l < $lim} {incr l} { - set id [lindex $displayorder $l] - # shouldn't happen unless git log doesn't give all the commits... - if {![info exists commitdata($id)]} continue - if {![doesmatch $commitdata($id)]} continue - if {![info exists commitinfo($id)]} { - getcommit $id - } - set info $commitinfo($id) - foreach f $info ty $fldtypes { - if {($findloc eq "All fields" || $findloc eq $ty) && - [doesmatch $f]} { - findselectline $l - notbusy finding - return 0 - } - } - } - if {$l == $findstartline + 1} { - bell - unset findcurline - notbusy finding + if {![info exists find_dirn]} { return 0 } - set findcurline [expr {$l - 1}] - return 1 -} - -proc findmorerev {} { - global commitdata commitinfo numcommits findstring findpattern findloc - global findstartline findcurline displayorder - set fldtypes {Headline Author Date Committer CDate Comments} set l $findcurline - if {$l == 0} { - set l $numcommits - } - incr l -1 - if {$l >= $findstartline} { - set lim [expr {$findstartline - 1}] + set moretodo 0 + if {$find_dirn > 0} { + incr l + if {$l >= $numcommits} { + set l 0 + } + if {$l <= $findstartline} { + set lim [expr {$findstartline + 1}] + } else { + set lim $numcommits + set moretodo $findallowwrap + } } else { - set lim -1 - } - if {$l - $lim > 500} { - set lim [expr {$l - 500}] - } - set last 0 - for {} {$l > $lim} {incr l -1} { - set id [lindex $displayorder $l] - if {![doesmatch $commitdata($id)]} continue - if {![info exists commitinfo($id)]} { - getcommit $id + if {$l == 0} { + set l $numcommits + } + incr l -1 + if {$l >= $findstartline} { + set lim [expr {$findstartline - 1}] + } else { + set lim -1 + set moretodo $findallowwrap + } + } + set n [expr {($lim - $l) * $find_dirn}] + if {$n > 500} { + set n 500 + set moretodo 1 + } + set found 0 + set domore 1 + if {$gdttype eq "containing:"} { + for {} {$n > 0} {incr n -1; incr l $find_dirn} { + set id [lindex $displayorder $l] + # shouldn't happen unless git log doesn't give all the commits... + if {![info exists commitdata($id)]} continue + if {![doesmatch $commitdata($id)]} continue + if {![info exists commitinfo($id)]} { + getcommit $id + } + set info $commitinfo($id) + foreach f $info ty $fldtypes { + if {($findloc eq "All fields" || $findloc eq $ty) && + [doesmatch $f]} { + set found 1 + break + } + } + if {$found} break } - set info $commitinfo($id) - foreach f $info ty $fldtypes { - if {($findloc eq "All fields" || $findloc eq $ty) && - [doesmatch $f]} { - findselectline $l - notbusy finding - return 0 + } else { + for {} {$n > 0} {incr n -1; incr l $find_dirn} { + set id [lindex $displayorder $l] + if {![info exists fhighlights($l)]} { + askfilehighlight $l $id + if {$domore} { + set domore 0 + set findcurline [expr {$l - $find_dirn}] + } + } elseif {$fhighlights($l)} { + set found $domore + break } } } - if {$l == -1} { - bell + if {$found || ($domore && !$moretodo)} { unset findcurline + unset find_dirn notbusy finding + set fprogcoord 0 + adjustprogress + if {$found} { + findselectline $l + } else { + bell + } return 0 } - set findcurline [expr {$l + 1}] - return 1 + if {!$domore} { + flushhighlights + } else { + set findcurline [expr {$l - $find_dirn}] + } + set n [expr {($findcurline - $findstartline) * $find_dirn - 1}] + if {$n < 0} { + incr n $numcommits + } + set fprogcoord [expr {$n * 1.0 / $numcommits}] + adjustprogress + return $domore } proc findselectline {l} { - global findloc commentend ctext findcurline markingmatches + global findloc commentend ctext findcurline markingmatches gdttype set markingmatches 1 set findcurline $l @@ -4341,12 +4430,11 @@ proc markmatches {canv l str tag matches font row} { } proc unmarkmatches {} { - global findids markingmatches findcurline + global markingmatches allcanvs delete matches - catch {unset findids} set markingmatches 0 - catch {unset findcurline} + stopfinding } proc selcanvline {w x y} { @@ -4382,7 +4470,7 @@ proc commit_descriptor {p} { # append some text to the ctext widget, and make any SHA1 ID # that we know about be a clickable link. proc appendwithlinks {text tags} { - global ctext commitrow linknum curview + global ctext commitrow linknum curview pendinglinks set start [$ctext index "end - 1c"] $ctext insert end $text $tags @@ -4391,17 +4479,49 @@ proc appendwithlinks {text tags} { set s [lindex $l 0] set e [lindex $l 1] set linkid [string range $text $s $e] - if {![info exists commitrow($curview,$linkid)]} continue incr e - $ctext tag add link "$start + $s c" "$start + $e c" + $ctext tag delete link$linknum $ctext tag add link$linknum "$start + $s c" "$start + $e c" - $ctext tag bind link$linknum <1> \ - [list selectline $commitrow($curview,$linkid) 1] + setlink $linkid link$linknum incr linknum } - $ctext tag conf link -foreground blue -underline 1 - $ctext tag bind link <Enter> { %W configure -cursor hand2 } - $ctext tag bind link <Leave> { %W configure -cursor $curtextcursor } +} + +proc setlink {id lk} { + global curview commitrow ctext pendinglinks commitinterest + + if {[info exists commitrow($curview,$id)]} { + $ctext tag conf $lk -foreground blue -underline 1 + $ctext tag bind $lk <1> [list selectline $commitrow($curview,$id) 1] + $ctext tag bind $lk <Enter> {linkcursor %W 1} + $ctext tag bind $lk <Leave> {linkcursor %W -1} + } else { + lappend pendinglinks($id) $lk + lappend commitinterest($id) {makelink %I} + } +} + +proc makelink {id} { + global pendinglinks + + if {![info exists pendinglinks($id)]} return + foreach lk $pendinglinks($id) { + setlink $id $lk + } + unset pendinglinks($id) +} + +proc linkcursor {w inc} { + global linkentercount curtextcursor + + if {[incr linkentercount $inc] > 0} { + $w configure -cursor hand2 + } else { + $w configure -cursor $curtextcursor + if {$linkentercount < 0} { + set linkentercount 0 + } + } } proc viewnextline {dir} { @@ -4448,15 +4568,7 @@ proc appendrefs {pos ids var} { $ctext tag delete $lk $ctext insert $pos $sep $ctext insert $pos [lindex $ti 0] $lk - if {[info exists commitrow($curview,$id)]} { - $ctext tag conf $lk -foreground blue - $ctext tag bind $lk <1> \ - [list selectline $commitrow($curview,$id) 1] - $ctext tag conf $lk -underline 1 - $ctext tag bind $lk <Enter> { %W configure -cursor hand2 } - $ctext tag bind $lk <Leave> \ - { %W configure -cursor $curtextcursor } - } + setlink $id $lk set sep ", " } } @@ -4514,9 +4626,27 @@ proc dispnexttag {} { } } +proc make_secsel {l} { + global linehtag linentag linedtag canv canv2 canv3 + + if {![info exists linehtag($l)]} return + $canv delete secsel + set t [eval $canv create rect [$canv bbox $linehtag($l)] -outline {{}} \ + -tags secsel -fill [$canv cget -selectbackground]] + $canv lower $t + $canv2 delete secsel + set t [eval $canv2 create rect [$canv2 bbox $linentag($l)] -outline {{}} \ + -tags secsel -fill [$canv2 cget -selectbackground]] + $canv2 lower $t + $canv3 delete secsel + set t [eval $canv3 create rect [$canv3 bbox $linedtag($l)] -outline {{}} \ + -tags secsel -fill [$canv3 cget -selectbackground]] + $canv3 lower $t +} + proc selectline {l isnew} { - global canv canv2 canv3 ctext commitinfo selectedline - global displayorder linehtag linentag linedtag + global canv ctext commitinfo selectedline + global displayorder global canvy0 linespc parentlist children curview global currentid sha1entry global commentend idtags linknum @@ -4526,8 +4656,8 @@ proc selectline {l isnew} { catch {unset pending_select} $canv delete hover normalline - cancel_next_highlight unsel_reflist + stopfinding if {$l < 0 || $l >= $numcommits} return set y [expr {$canvy0 + $l * $linespc}] set ymax [lindex [$canv cget -scrollregion] 3] @@ -4565,19 +4695,7 @@ proc selectline {l isnew} { drawvisible } - if {![info exists linehtag($l)]} return - $canv delete secsel - set t [eval $canv create rect [$canv bbox $linehtag($l)] -outline {{}} \ - -tags secsel -fill [$canv cget -selectbackground]] - $canv lower $t - $canv2 delete secsel - set t [eval $canv2 create rect [$canv2 bbox $linentag($l)] -outline {{}} \ - -tags secsel -fill [$canv2 cget -selectbackground]] - $canv2 lower $t - $canv3 delete secsel - set t [eval $canv3 create rect [$canv3 bbox $linedtag($l)] -outline {{}} \ - -tags secsel -fill [$canv3 cget -selectbackground]] - $canv3 lower $t + make_secsel $l if {$isnew} { addtohistory [list selectline $l 0] @@ -4720,7 +4838,6 @@ proc unselectline {} { catch {unset currentid} allcanvs delete secsel rhighlight_none - cancel_next_highlight } proc reselectline {} { @@ -4889,6 +5006,7 @@ proc showfile {f} { $ctext insert end "$f\n" filesep $ctext config -state disabled $ctext yview $commentend + settabs 0 } proc getblobline {bf id} { @@ -4914,15 +5032,18 @@ proc getblobline {bf id} { } proc mergediff {id l} { - global diffmergeid diffopts mdifffd + global diffmergeid mdifffd global diffids global parentlist + global limitdiffs viewfiles curview set diffmergeid $id set diffids $id # this doesn't seem to actually affect anything... - set env(GIT_DIFF_OPTS) $diffopts set cmd [concat | git diff-tree --no-commit-id --cc $id] + if {$limitdiffs && $viewfiles($curview) ne {}} { + set cmd [concat $cmd -- $viewfiles($curview)] + } if {[catch {set mdf [open $cmd r]} err]} { error_popup "Error getting merge diffs: $err" return @@ -4930,6 +5051,7 @@ proc mergediff {id l} { fconfigure $mdf -blocking 0 set mdifffd($id) $mdf set np [llength [lindex $parentlist $l]] + settabs $np filerun $mdf [list getmergediffline $mdf $id $np] } @@ -5007,6 +5129,7 @@ proc getmergediffline {mdf id np} { proc startdiff {ids} { global treediffs diffids treepending diffmergeid nullid nullid2 + settabs 1 set diffids $ids catch {unset diffmergeid} if {![info exists treediffs($ids)] || @@ -5020,8 +5143,27 @@ proc startdiff {ids} { } } +proc path_filter {filter name} { + foreach p $filter { + set l [string length $p] + if {[string index $p end] eq "/"} { + if {[string compare -length $l $p $name] == 0} { + return 1 + } + } else { + if {[string compare -length $l $p $name] == 0 && + ([string length $name] == $l || + [string index $name $l] eq "/")} { + return 1 + } + } + } + return 0 +} + proc addtocflist {ids} { - global treediffs cflist + global treediffs + add_flist $treediffs($ids) getblobdiffs $ids } @@ -5078,7 +5220,7 @@ proc gettreediffs {ids} { proc gettreediffline {gdtf ids} { global treediff treediffs treepending diffids diffmergeid - global cmitmode + global cmitmode viewfiles curview limitdiffs set nr 0 while {[incr nr] <= 1000 && [gets $gdtf line] >= 0} { @@ -5095,7 +5237,17 @@ proc gettreediffline {gdtf ids} { return [expr {$nr >= 1000? 2: 1}] } close $gdtf - set treediffs($ids) $treediff + if {$limitdiffs && $viewfiles($curview) ne {}} { + set flist {} + foreach f $treediff { + if {[path_filter $viewfiles($curview) $f]} { + lappend flist $f + } + } + set treediffs($ids) $flist + } else { + set treediffs($ids) $treediff + } unset treepending if {$cmitmode eq "tree"} { gettree $diffids @@ -5126,12 +5278,16 @@ proc diffcontextchange {n1 n2 op} { } proc getblobdiffs {ids} { - global diffopts blobdifffd diffids env + global blobdifffd diffids env global diffinhdr treediffs global diffcontext + global limitdiffs viewfiles curview - set env(GIT_DIFF_OPTS) $diffopts - if {[catch {set bdf [open [diffcmd $ids "-p -C --no-commit-id -U$diffcontext"] r]} err]} { + set cmd [diffcmd $ids "-p -C --no-commit-id -U$diffcontext"] + if {$limitdiffs && $viewfiles($curview) ne {}} { + set cmd [concat $cmd -- $viewfiles($curview)] + } + if {[catch {set bdf [open $cmd r]} err]} { puts "error getting diffs: $err" return } @@ -5215,8 +5371,7 @@ proc getblobdiffline {bdf ids} { set diffinhdr 0 } elseif {$diffinhdr} { - if {![string compare -length 12 "rename from " $line] || - ![string compare -length 10 "copy from " $line]} { + if {![string compare -length 12 "rename from " $line]} { set fname [string range $line [expr 6 + [string first " from " $line] ] end] if {[string index $fname 0] eq "\""} { set fname [lindex $fname 0] @@ -5297,6 +5452,7 @@ proc nextfile {} { proc clear_ctext {{first 1.0}} { global ctext smarktop smarkbot + global pendinglinks set l [lindex [split $first .] 0] if {![info exists smarktop] || [$ctext compare $first < $smarktop.0]} { @@ -5306,6 +5462,26 @@ proc clear_ctext {{first 1.0}} { set smarkbot $l } $ctext delete $first end + if {$first eq "1.0"} { + catch {unset pendinglinks} + } +} + +proc settabs {{firstab {}}} { + global firsttabstop tabstop ctext have_tk85 + + if {$firstab ne {} && $have_tk85} { + set firsttabstop $firstab + } + set w [font measure textfont "0"] + if {$firsttabstop != 0} { + $ctext conf -tabs [list [expr {($firsttabstop + $tabstop) * $w}] \ + [expr {($firsttabstop + 2 * $tabstop) * $w}]] + } elseif {$have_tk85 || $tabstop != 8} { + $ctext conf -tabs [expr {$tabstop * $w}] + } else { + $ctext conf -tabs {} + } } proc incrsearch {name ix op} { @@ -5428,11 +5604,11 @@ proc scrolltext {f0 f1} { } proc setcoords {} { - global linespc charspc canvx0 canvy0 mainfont + global linespc charspc canvx0 canvy0 global xspc1 xspc2 lthickness - set linespc [font metrics $mainfont -linespace] - set charspc [font measure $mainfont "m"] + set linespc [font metrics mainfont -linespace] + set charspc [font measure mainfont "m"] set canvy0 [expr {int(3 + 0.5 * $linespc)}] set canvx0 [expr {int(3 + 0.5 * $linespc)}] set lthickness [expr {int($linespc / 9) + 1}] @@ -5457,26 +5633,75 @@ proc redisplay {} { } } +proc parsefont {f n} { + global fontattr + + set fontattr($f,family) [lindex $n 0] + set s [lindex $n 1] + if {$s eq {} || $s == 0} { + set s 10 + } elseif {$s < 0} { + set s [expr {int(-$s / [winfo fpixels . 1p] + 0.5)}] + } + set fontattr($f,size) $s + set fontattr($f,weight) normal + set fontattr($f,slant) roman + foreach style [lrange $n 2 end] { + switch -- $style { + "normal" - + "bold" {set fontattr($f,weight) $style} + "roman" - + "italic" {set fontattr($f,slant) $style} + } + } +} + +proc fontflags {f {isbold 0}} { + global fontattr + + return [list -family $fontattr($f,family) -size $fontattr($f,size) \ + -weight [expr {$isbold? "bold": $fontattr($f,weight)}] \ + -slant $fontattr($f,slant)] +} + +proc fontname {f} { + global fontattr + + set n [list $fontattr($f,family) $fontattr($f,size)] + if {$fontattr($f,weight) eq "bold"} { + lappend n "bold" + } + if {$fontattr($f,slant) eq "italic"} { + lappend n "italic" + } + return $n +} + proc incrfont {inc} { global mainfont textfont ctext canv phase cflist showrefstop - global charspc tabstop - global stopped entries + global stopped entries fontattr + unmarkmatches - set mainfont [lreplace $mainfont 1 1 [expr {[lindex $mainfont 1] + $inc}]] - set textfont [lreplace $textfont 1 1 [expr {[lindex $textfont 1] + $inc}]] + set s $fontattr(mainfont,size) + incr s $inc + if {$s < 1} { + set s 1 + } + set fontattr(mainfont,size) $s + font config mainfont -size $s + font config mainfontbold -size $s + set mainfont [fontname mainfont] + set s $fontattr(textfont,size) + incr s $inc + if {$s < 1} { + set s 1 + } + set fontattr(textfont,size) $s + font config textfont -size $s + font config textfontbold -size $s + set textfont [fontname textfont] setcoords - $ctext conf -font $textfont -tabs "[expr {$tabstop * $charspc}]" - $cflist conf -font $textfont - $ctext tag conf filesep -font [concat $textfont bold] - foreach e $entries { - $e conf -font $mainfont - } - if {$phase eq "getcommits"} { - $canv itemconf textitems -font $mainfont - } - if {[info exists showrefstop] && [winfo exists $showrefstop]} { - $showrefstop.list conf -font $mainfont - } + settabs redisplay } @@ -5587,7 +5812,7 @@ proc lineleave {id} { proc linehover {} { global hoverx hovery hoverid hovertimer global canv linespc lthickness - global commitinfo mainfont + global commitinfo set text [lindex $commitinfo($hoverid) 0] set ymax [lindex [$canv cget -scrollregion] 3] @@ -5597,13 +5822,13 @@ proc linehover {} { set y [expr {$hovery + $yfrac * $ymax - $linespc / 2}] set x0 [expr {$x - 2 * $lthickness}] set y0 [expr {$y - 2 * $lthickness}] - set x1 [expr {$x + [font measure $mainfont $text] + 2 * $lthickness}] + set x1 [expr {$x + [font measure mainfont $text] + 2 * $lthickness}] set y1 [expr {$y + $linespc + 2 * $lthickness}] set t [$canv create rectangle $x0 $y0 $x1 $y1 \ -fill \#ffff80 -outline black -width 1 -tags hover] $canv raise $t set t [$canv create text $x $y -anchor nw -text $text -tags hover \ - -font $mainfont] + -font mainfont] $canv raise $t } @@ -5641,7 +5866,7 @@ proc arrowjump {id n y} { } proc lineclick {x y id isnew} { - global ctext commitinfo children canv thickerline curview + global ctext commitinfo children canv thickerline curview commitrow if {![info exists commitinfo($id)] && ![getcommit $id]} return unmarkmatches @@ -5669,12 +5894,10 @@ proc lineclick {x y id isnew} { # fill the details pane with info about this line $ctext conf -state normal clear_ctext - $ctext tag conf link -foreground blue -underline 1 - $ctext tag bind link <Enter> { %W configure -cursor hand2 } - $ctext tag bind link <Leave> { %W configure -cursor $curtextcursor } + settabs 0 $ctext insert end "Parent:\t" - $ctext insert end $id [list link link0] - $ctext tag bind link0 <1> [list selbyid $id] + $ctext insert end $id link0 + setlink $id link0 set info $commitinfo($id) $ctext insert end "\n\t[lindex $info 0]\n" $ctext insert end "\tAuthor:\t[lindex $info 1]\n" @@ -5689,8 +5912,8 @@ proc lineclick {x y id isnew} { if {![info exists commitinfo($child)] && ![getcommit $child]} continue set info $commitinfo($child) $ctext insert end "\n\t" - $ctext insert end $child [list link link$i] - $ctext tag bind link$i <1> [list selbyid $child] + $ctext insert end $child link$i + setlink $child link$i $ctext insert end "\n\t[lindex $info 0]" $ctext insert end "\n\tAuthor:\t[lindex $info 1]" set date [formatdate [lindex $info 2]] @@ -5729,6 +5952,7 @@ proc rowmenu {x y id} { global rowctxmenu commitrow selectedline rowmenuid curview global nullid nullid2 fakerowmenu mainhead + stopfinding set rowmenuid $id if {![info exists selectedline] || $commitrow($curview,$id) eq $selectedline} { @@ -5771,16 +5995,13 @@ proc doseldiff {oldid newid} { clear_ctext init_flist "Top" $ctext insert end "From " - $ctext tag conf link -foreground blue -underline 1 - $ctext tag bind link <Enter> { %W configure -cursor hand2 } - $ctext tag bind link <Leave> { %W configure -cursor $curtextcursor } - $ctext tag bind link0 <1> [list selbyid $oldid] - $ctext insert end $oldid [list link link0] + $ctext insert end $oldid link0 + setlink $oldid link0 $ctext insert end "\n " $ctext insert end [lindex $commitinfo($oldid) 0] $ctext insert end "\n\nTo " - $ctext tag bind link1 <1> [list selbyid $newid] - $ctext insert end $newid [list link link1] + $ctext insert end $newid link1 + setlink $newid link1 $ctext insert end "\n " $ctext insert end [lindex $commitinfo($newid) 0] $ctext insert end "\n" @@ -5861,6 +6082,8 @@ proc mkpatchgo {} { set newid [$patchtop.tosha1 get] set fname [$patchtop.fname get] set cmd [diffcmd [list $oldid $newid] -p] + # trim off the initial "|" + set cmd [lrange $cmd 1 end] lappend cmd >$fname & if {[catch {eval exec $cmd} err]} { error_popup "Error creating patch: $err" @@ -5941,7 +6164,7 @@ proc domktag {} { proc redrawtags {id} { global canv linehtag commitrow idpos selectedline curview - global mainfont canvxmax iddrawn + global canvxmax iddrawn if {![info exists commitrow($curview,$id)]} return if {![info exists iddrawn($id)]} return @@ -5950,7 +6173,7 @@ proc redrawtags {id} { set xt [eval drawtags $id $idpos($id)] $canv coords $linehtag($commitrow($curview,$id)) $xt [lindex $idpos($id) 2] set text [$canv itemcget $linehtag($commitrow($curview,$id)) -text] - set xr [expr {$xt + [font measure $mainfont $text]}] + set xr [expr {$xt + [font measure mainfont $text]}] if {$xr > $canvxmax} { set canvxmax $xr setcanvscroll @@ -6093,7 +6316,7 @@ proc cherrypick {} { included in branch $mainhead -- really re-apply it?"] if {!$ok} return } - nowbusy cherrypick + nowbusy cherrypick "Cherry-picking" update # Unfortunately git-cherry-pick writes stuff to stderr even when # no error occurs, and exec takes that as an indication of error... @@ -6123,7 +6346,6 @@ proc cherrypick {} { proc resethead {} { global mainheadid mainhead rowmenuid confirm_ok resettype - global showlocalchanges set confirm_ok 0 set w ".confirmreset" @@ -6160,32 +6382,23 @@ proc resethead {} { error_popup $err } else { dohidelocalchanges - set w ".resetprogress" - filerun $fd [list readresetstat $fd $w] - toplevel $w - wm transient $w - wm title $w "Reset progress" - message $w.m -text "Reset in progress, please wait..." \ - -justify center -aspect 1000 - pack $w.m -side top -fill x -padx 20 -pady 5 - canvas $w.c -width 150 -height 20 -bg white - $w.c create rect 0 0 0 20 -fill green -tags rect - pack $w.c -side top -fill x -padx 20 -pady 5 -expand 1 - nowbusy reset + filerun $fd [list readresetstat $fd] + nowbusy reset "Resetting" } } -proc readresetstat {fd w} { - global mainhead mainheadid showlocalchanges +proc readresetstat {fd} { + global mainhead mainheadid showlocalchanges rprogcoord if {[gets $fd line] >= 0} { if {[regexp {([0-9]+)% \(([0-9]+)/([0-9]+)\)} $line match p m n]} { - set x [expr {($m * 150) / $n}] - $w.c coords rect 0 0 $x 20 + set rprogcoord [expr {1.0 * $m / $n}] + adjustprogress } return 1 } - destroy $w + set rprogcoord 0 + adjustprogress notbusy reset if {[catch {close $fd} err]} { error_popup $err @@ -6209,6 +6422,7 @@ proc readresetstat {fd w} { proc headmenu {x y id head} { global headmenuid headmenuhead headctxmenu mainhead + stopfinding set headmenuid $id set headmenuhead $head set state normal @@ -6226,7 +6440,7 @@ proc cobranch {} { # check the tree is clean first?? set oldmainhead $mainhead - nowbusy checkout + nowbusy checkout "Checking out" update dohidelocalchanges if {[catch { @@ -6282,8 +6496,8 @@ proc rmbranch {} { # Display a list of tags and heads proc showrefs {} { - global showrefstop bgcolor fgcolor selectbgcolor mainfont - global bglist fglist uifont reflistfilter reflist maincursor + global showrefstop bgcolor fgcolor selectbgcolor + global bglist fglist reflistfilter reflist maincursor set top .showrefs set showrefstop $top @@ -6295,7 +6509,7 @@ proc showrefs {} { toplevel $top wm title $top "Tags and heads: [file tail [pwd]]" text $top.list -background $bgcolor -foreground $fgcolor \ - -selectbackground $selectbgcolor -font $mainfont \ + -selectbackground $selectbgcolor -font mainfont \ -xscrollcommand "$top.xsb set" -yscrollcommand "$top.ysb set" \ -width 30 -height 20 -cursor $maincursor \ -spacing1 1 -spacing3 1 -state disabled @@ -6307,15 +6521,15 @@ proc showrefs {} { grid $top.list $top.ysb -sticky nsew grid $top.xsb x -sticky ew frame $top.f - label $top.f.l -text "Filter: " -font $uifont - entry $top.f.e -width 20 -textvariable reflistfilter -font $uifont + label $top.f.l -text "Filter: " -font uifont + entry $top.f.e -width 20 -textvariable reflistfilter -font uifont set reflistfilter "*" trace add variable reflistfilter write reflistfilter_change pack $top.f.e -side right -fill x -expand 1 pack $top.f.l -side left grid $top.f - -sticky ew -pady 2 button $top.close -command [list destroy $top] -text "Close" \ - -font $uifont + -font uifont grid $top.close - grid columnconfigure $top 0 -weight 1 grid rowconfigure $top 0 -weight 1 @@ -6438,25 +6652,59 @@ proc refill_reflist {} { # Stuff for finding nearby tags proc getallcommits {} { - global allcommits allids nbmp nextarc seeds + global allcommits nextarc seeds allccache allcwait cachedarcs allcupdate + global idheads idtags idotherrefs allparents tagobjid if {![info exists allcommits]} { - set allids {} - set nbmp 0 set nextarc 0 set allcommits 0 set seeds {} + set allcwait 0 + set cachedarcs 0 + set allccache [file join [gitdir] "gitk.cache"] + if {![catch { + set f [open $allccache r] + set allcwait 1 + getcache $f + }]} return } - set cmd [concat | git rev-list --all --parents] - foreach id $seeds { - lappend cmd "^$id" + if {$allcwait} { + return + } + set cmd [list | git rev-list --parents] + set allcupdate [expr {$seeds ne {}}] + if {!$allcupdate} { + set ids "--all" + } else { + set refs [concat [array names idheads] [array names idtags] \ + [array names idotherrefs]] + set ids {} + set tagobjs {} + foreach name [array names tagobjid] { + lappend tagobjs $tagobjid($name) + } + foreach id [lsort -unique $refs] { + if {![info exists allparents($id)] && + [lsearch -exact $tagobjs $id] < 0} { + lappend ids $id + } + } + if {$ids ne {}} { + foreach id $seeds { + lappend ids "^$id" + } + } + } + if {$ids ne {}} { + set fd [open [concat $cmd $ids] r] + fconfigure $fd -blocking 0 + incr allcommits + nowbusy allcommits + filerun $fd [list getallclines $fd] + } else { + dispneartags 0 } - set fd [open $cmd r] - fconfigure $fd -blocking 0 - incr allcommits - nowbusy allcommits - filerun $fd [list getallclines $fd] } # Since most commits have 1 parent and 1 child, we group strings of @@ -6475,10 +6723,10 @@ proc getallcommits {} { # coming from descendents, and "outgoing" means going towards ancestors. proc getallclines {fd} { - global allids allparents allchildren idtags idheads nextarc nbmp + global allparents allchildren idtags idheads nextarc global arcnos arcids arctags arcout arcend arcstart archeads growing - global seeds allcommits - + global seeds allcommits cachedarcs allcupdate + set nid 0 while {[incr nid] <= 1000 && [gets $fd line] >= 0} { set id [lindex $line 0] @@ -6486,7 +6734,7 @@ proc getallclines {fd} { # seen it already continue } - lappend allids $id + set cachedarcs 0 set olds [lrange $line 1 end] set allparents($id) $olds if {![info exists allchildren($id)]} { @@ -6517,7 +6765,6 @@ proc getallclines {fd} { continue } } - incr nbmp foreach a $arcnos($id) { lappend arcids($a) $id set arcend($a) $id @@ -6557,9 +6804,28 @@ proc getallclines {fd} { if {![eof $fd]} { return [expr {$nid >= 1000? 2: 1}] } - close $fd + set cacheok 1 + if {[catch { + fconfigure $fd -blocking 1 + close $fd + } err]} { + # got an error reading the list of commits + # if we were updating, try rereading the whole thing again + if {$allcupdate} { + incr allcommits -1 + dropcache $err + return + } + error_popup "Error reading commit topology information;\ + branch and preceding/following tag information\ + will be incomplete.\n($err)" + set cacheok 0 + } if {[incr allcommits -1] == 0} { notbusy allcommits + if {$cacheok} { + run savecache + } } dispneartags 0 return 0 @@ -6583,7 +6849,7 @@ proc recalcarc {a} { } proc splitarc {p} { - global arcnos arcids nextarc nbmp arctags archeads idtags idheads + global arcnos arcids nextarc arctags archeads idtags idheads global arcstart arcend arcout allparents growing set a $arcnos($p) @@ -6615,7 +6881,6 @@ proc splitarc {p} { set growing($na) 1 unset growing($a) } - incr nbmp foreach id $tail { if {[llength $arcnos($id)] == 1} { @@ -6639,17 +6904,15 @@ proc splitarc {p} { # Update things for a new commit added that is a child of one # existing commit. Used when cherry-picking. proc addnewchild {id p} { - global allids allparents allchildren idtags nextarc nbmp + global allparents allchildren idtags nextarc global arcnos arcids arctags arcout arcend arcstart archeads growing global seeds allcommits - if {![info exists allcommits]} return - lappend allids $id + if {![info exists allcommits] || ![info exists arcnos($p)]} return set allparents($id) [list $p] set allchildren($id) {} set arcnos($id) {} lappend seeds $id - incr nbmp lappend allchildren($p) $id set a [incr nextarc] set arcstart($a) $id @@ -6664,6 +6927,172 @@ proc addnewchild {id p} { set arcout($id) [list $a] } +# This implements a cache for the topology information. +# The cache saves, for each arc, the start and end of the arc, +# the ids on the arc, and the outgoing arcs from the end. +proc readcache {f} { + global arcnos arcids arcout arcstart arcend arctags archeads nextarc + global idtags idheads allparents cachedarcs possible_seeds seeds growing + global allcwait + + set a $nextarc + set lim $cachedarcs + if {$lim - $a > 500} { + set lim [expr {$a + 500}] + } + if {[catch { + if {$a == $lim} { + # finish reading the cache and setting up arctags, etc. + set line [gets $f] + if {$line ne "1"} {error "bad final version"} + close $f + foreach id [array names idtags] { + if {[info exists arcnos($id)] && [llength $arcnos($id)] == 1 && + [llength $allparents($id)] == 1} { + set a [lindex $arcnos($id) 0] + if {$arctags($a) eq {}} { + recalcarc $a + } + } + } + foreach id [array names idheads] { + if {[info exists arcnos($id)] && [llength $arcnos($id)] == 1 && + [llength $allparents($id)] == 1} { + set a [lindex $arcnos($id) 0] + if {$archeads($a) eq {}} { + recalcarc $a + } + } + } + foreach id [lsort -unique $possible_seeds] { + if {$arcnos($id) eq {}} { + lappend seeds $id + } + } + set allcwait 0 + } else { + while {[incr a] <= $lim} { + set line [gets $f] + if {[llength $line] != 3} {error "bad line"} + set s [lindex $line 0] + set arcstart($a) $s + lappend arcout($s) $a + if {![info exists arcnos($s)]} { + lappend possible_seeds $s + set arcnos($s) {} + } + set e [lindex $line 1] + if {$e eq {}} { + set growing($a) 1 + } else { + set arcend($a) $e + if {![info exists arcout($e)]} { + set arcout($e) {} + } + } + set arcids($a) [lindex $line 2] + foreach id $arcids($a) { + lappend allparents($s) $id + set s $id + lappend arcnos($id) $a + } + if {![info exists allparents($s)]} { + set allparents($s) {} + } + set arctags($a) {} + set archeads($a) {} + } + set nextarc [expr {$a - 1}] + } + } err]} { + dropcache $err + return 0 + } + if {!$allcwait} { + getallcommits + } + return $allcwait +} + +proc getcache {f} { + global nextarc cachedarcs possible_seeds + + if {[catch { + set line [gets $f] + if {[llength $line] != 2 || [lindex $line 0] ne "1"} {error "bad version"} + # make sure it's an integer + set cachedarcs [expr {int([lindex $line 1])}] + if {$cachedarcs < 0} {error "bad number of arcs"} + set nextarc 0 + set possible_seeds {} + run readcache $f + } err]} { + dropcache $err + } + return 0 +} + +proc dropcache {err} { + global allcwait nextarc cachedarcs seeds + + #puts "dropping cache ($err)" + foreach v {arcnos arcout arcids arcstart arcend growing \ + arctags archeads allparents allchildren} { + global $v + catch {unset $v} + } + set allcwait 0 + set nextarc 0 + set cachedarcs 0 + set seeds {} + getallcommits +} + +proc writecache {f} { + global cachearc cachedarcs allccache + global arcstart arcend arcnos arcids arcout + + set a $cachearc + set lim $cachedarcs + if {$lim - $a > 1000} { + set lim [expr {$a + 1000}] + } + if {[catch { + while {[incr a] <= $lim} { + if {[info exists arcend($a)]} { + puts $f [list $arcstart($a) $arcend($a) $arcids($a)] + } else { + puts $f [list $arcstart($a) {} $arcids($a)] + } + } + } err]} { + catch {close $f} + catch {file delete $allccache} + #puts "writing cache failed ($err)" + return 0 + } + set cachearc [expr {$a - 1}] + if {$a > $cachedarcs} { + puts $f "1" + close $f + return 0 + } + return 1 +} + +proc savecache {} { + global nextarc cachedarcs cachearc allccache + + if {$nextarc == $cachedarcs} return + set cachearc 0 + set cachedarcs $nextarc + catch { + set f [open $allccache w] + puts $f [list 1 $cachedarcs] + run writecache $f + } +} + # Returns 1 if a is an ancestor of b, -1 if b is an ancestor of a, # or 0 if neither is true. proc anc_or_desc {a b} { @@ -7361,6 +7790,7 @@ proc showtag {tag isnew} { } $ctext conf -state normal clear_ctext + settabs 0 set linknum 0 if {![info exists tagcontents($tag)]} { catch { @@ -7384,11 +7814,135 @@ proc doquit {} { destroy . } +proc mkfontdisp {font top which} { + global fontattr fontpref $font + + set fontpref($font) [set $font] + button $top.${font}but -text $which -font optionfont \ + -command [list choosefont $font $which] + label $top.$font -relief flat -font $font \ + -text $fontattr($font,family) -justify left + grid x $top.${font}but $top.$font -sticky w +} + +proc choosefont {font which} { + global fontparam fontlist fonttop fontattr + + set fontparam(which) $which + set fontparam(font) $font + set fontparam(family) [font actual $font -family] + set fontparam(size) $fontattr($font,size) + set fontparam(weight) $fontattr($font,weight) + set fontparam(slant) $fontattr($font,slant) + set top .gitkfont + set fonttop $top + if {![winfo exists $top]} { + font create sample + eval font config sample [font actual $font] + toplevel $top + wm title $top "Gitk font chooser" + label $top.l -textvariable fontparam(which) -font uifont + pack $top.l -side top + set fontlist [lsort [font families]] + frame $top.f + listbox $top.f.fam -listvariable fontlist \ + -yscrollcommand [list $top.f.sb set] + bind $top.f.fam <<ListboxSelect>> selfontfam + scrollbar $top.f.sb -command [list $top.f.fam yview] + pack $top.f.sb -side right -fill y + pack $top.f.fam -side left -fill both -expand 1 + pack $top.f -side top -fill both -expand 1 + frame $top.g + spinbox $top.g.size -from 4 -to 40 -width 4 \ + -textvariable fontparam(size) \ + -validatecommand {string is integer -strict %s} + checkbutton $top.g.bold -padx 5 \ + -font {{Times New Roman} 12 bold} -text "B" -indicatoron 0 \ + -variable fontparam(weight) -onvalue bold -offvalue normal + checkbutton $top.g.ital -padx 5 \ + -font {{Times New Roman} 12 italic} -text "I" -indicatoron 0 \ + -variable fontparam(slant) -onvalue italic -offvalue roman + pack $top.g.size $top.g.bold $top.g.ital -side left + pack $top.g -side top + canvas $top.c -width 150 -height 50 -border 2 -relief sunk \ + -background white + $top.c create text 100 25 -anchor center -text $which -font sample \ + -fill black -tags text + bind $top.c <Configure> [list centertext $top.c] + pack $top.c -side top -fill x + frame $top.buts + button $top.buts.ok -text "OK" -command fontok -default active \ + -font uifont + button $top.buts.can -text "Cancel" -command fontcan -default normal \ + -font uifont + grid $top.buts.ok $top.buts.can + grid columnconfigure $top.buts 0 -weight 1 -uniform a + grid columnconfigure $top.buts 1 -weight 1 -uniform a + pack $top.buts -side bottom -fill x + trace add variable fontparam write chg_fontparam + } else { + raise $top + $top.c itemconf text -text $which + } + set i [lsearch -exact $fontlist $fontparam(family)] + if {$i >= 0} { + $top.f.fam selection set $i + $top.f.fam see $i + } +} + +proc centertext {w} { + $w coords text [expr {[winfo width $w] / 2}] [expr {[winfo height $w] / 2}] +} + +proc fontok {} { + global fontparam fontpref prefstop + + set f $fontparam(font) + set fontpref($f) [list $fontparam(family) $fontparam(size)] + if {$fontparam(weight) eq "bold"} { + lappend fontpref($f) "bold" + } + if {$fontparam(slant) eq "italic"} { + lappend fontpref($f) "italic" + } + set w $prefstop.$f + $w conf -text $fontparam(family) -font $fontpref($f) + + fontcan +} + +proc fontcan {} { + global fonttop fontparam + + if {[info exists fonttop]} { + catch {destroy $fonttop} + catch {font delete sample} + unset fonttop + unset fontparam + } +} + +proc selfontfam {} { + global fonttop fontparam + + set i [$fonttop.f.fam curselection] + if {$i ne {}} { + set fontparam(family) [$fonttop.f.fam get $i] + } +} + +proc chg_fontparam {v sub op} { + global fontparam + + font config sample -$sub $fontparam($sub) +} + proc doprefs {} { - global maxwidth maxgraphpct diffopts + global maxwidth maxgraphpct global oldprefs prefstop showneartags showlocalchanges global bgcolor fgcolor ctext diffcolors selectbgcolor - global uifont tabstop + global uifont tabstop limitdiffs set top .gitkprefs set prefstop $top @@ -7396,13 +7950,14 @@ proc doprefs {} { raise $top return } - foreach v {maxwidth maxgraphpct diffopts showneartags showlocalchanges} { + foreach v {maxwidth maxgraphpct showneartags showlocalchanges \ + limitdiffs tabstop} { set oldprefs($v) [set $v] } toplevel $top wm title $top "Gitk preferences" label $top.ldisp -text "Commit list display options" - $top.ldisp configure -font $uifont + $top.ldisp configure -font uifont grid $top.ldisp - -sticky w -pady 10 label $top.spacer -text " " label $top.maxwidthl -text "Maximum graph width (lines)" \ @@ -7420,23 +7975,24 @@ proc doprefs {} { grid x $top.showlocal -sticky w label $top.ddisp -text "Diff display options" - $top.ddisp configure -font $uifont + $top.ddisp configure -font uifont grid $top.ddisp - -sticky w -pady 10 - label $top.diffoptl -text "Options for diff program" \ - -font optionfont - entry $top.diffopt -width 20 -textvariable diffopts - grid x $top.diffoptl $top.diffopt -sticky w + label $top.tabstopl -text "Tab spacing" -font optionfont + spinbox $top.tabstop -from 1 -to 20 -width 4 -textvariable tabstop + grid x $top.tabstopl $top.tabstop -sticky w frame $top.ntag label $top.ntag.l -text "Display nearby tags" -font optionfont checkbutton $top.ntag.b -variable showneartags pack $top.ntag.b $top.ntag.l -side left grid x $top.ntag -sticky w - label $top.tabstopl -text "tabstop" -font optionfont - spinbox $top.tabstop -from 1 -to 20 -width 4 -textvariable tabstop - grid x $top.tabstopl $top.tabstop -sticky w + frame $top.ldiff + label $top.ldiff.l -text "Limit diffs to listed paths" -font optionfont + checkbutton $top.ldiff.b -variable limitdiffs + pack $top.ldiff.b $top.ldiff.l -side left + grid x $top.ldiff -sticky w label $top.cdisp -text "Colors: press to choose" - $top.cdisp configure -font $uifont + $top.cdisp configure -font uifont grid $top.cdisp - -sticky w -pady 10 label $top.bg -padx 40 -relief sunk -background $bgcolor button $top.bgbut -text "Background" -font optionfont \ @@ -7467,11 +8023,18 @@ proc doprefs {} { -command [list choosecolor selectbgcolor 0 $top.selbgsep background setselbg] grid x $top.selbgbut $top.selbgsep -sticky w + label $top.cfont -text "Fonts: press to choose" + $top.cfont configure -font uifont + grid $top.cfont - -sticky w -pady 10 + mkfontdisp mainfont $top "Main font" + mkfontdisp textfont $top "Diff display font" + mkfontdisp uifont $top "User interface font" + frame $top.buts button $top.buts.ok -text "OK" -command prefsok -default active - $top.buts.ok configure -font $uifont + $top.buts.ok configure -font uifont button $top.buts.can -text "Cancel" -command prefscan -default normal - $top.buts.can configure -font $uifont + $top.buts.can configure -font uifont grid $top.buts.ok $top.buts.can grid columnconfigure $top.buts 0 -weight 1 -uniform a grid columnconfigure $top.buts 1 -weight 1 -uniform a @@ -7519,24 +8082,48 @@ proc setfg {c} { } proc prefscan {} { - global maxwidth maxgraphpct diffopts - global oldprefs prefstop showneartags showlocalchanges + global oldprefs prefstop - foreach v {maxwidth maxgraphpct diffopts showneartags showlocalchanges} { + foreach v {maxwidth maxgraphpct showneartags showlocalchanges \ + limitdiffs tabstop} { + global $v set $v $oldprefs($v) } catch {destroy $prefstop} unset prefstop + fontcan } proc prefsok {} { global maxwidth maxgraphpct global oldprefs prefstop showneartags showlocalchanges - global charspc ctext tabstop + global fontpref mainfont textfont uifont + global limitdiffs treediffs catch {destroy $prefstop} unset prefstop - $ctext configure -tabs "[expr {$tabstop * $charspc}]" + fontcan + set fontchanged 0 + if {$mainfont ne $fontpref(mainfont)} { + set mainfont $fontpref(mainfont) + parsefont mainfont $mainfont + eval font configure mainfont [fontflags mainfont] + eval font configure mainfontbold [fontflags mainfont 1] + setcoords + set fontchanged 1 + } + if {$textfont ne $fontpref(textfont)} { + set textfont $fontpref(textfont) + parsefont textfont $textfont + eval font configure textfont [fontflags textfont] + eval font configure textfontbold [fontflags textfont 1] + } + if {$uifont ne $fontpref(uifont)} { + set uifont $fontpref(uifont) + parsefont uifont $uifont + eval font configure uifont [fontflags uifont] + } + settabs if {$showlocalchanges != $oldprefs(showlocalchanges)} { if {$showlocalchanges} { doshowlocalchanges @@ -7544,10 +8131,15 @@ proc prefsok {} { dohidelocalchanges } } - if {$maxwidth != $oldprefs(maxwidth) + if {$limitdiffs != $oldprefs(limitdiffs)} { + # treediffs elements are limited by path + catch {unset treediffs} + } + if {$fontchanged || $maxwidth != $oldprefs(maxwidth) || $maxgraphpct != $oldprefs(maxgraphpct)} { redisplay - } elseif {$showneartags != $oldprefs(showneartags)} { + } elseif {$showneartags != $oldprefs(showneartags) || + $limitdiffs != $oldprefs(limitdiffs)} { reselectline } } @@ -7833,9 +8425,15 @@ proc tcl_encoding {enc} { return {} } +# First check that Tcl/Tk is recent enough +if {[catch {package require Tk 8.4} err]} { + show_error {} . "Sorry, gitk cannot run with this version of Tcl/Tk.\n\ + Gitk requires at least Tcl/Tk 8.4." + exit 1 +} + # defaults... set datemode 0 -set diffopts "-U 5 -p" set wrcomcmd "git diff-tree --stdin -p --pretty" set gitencoding {} @@ -7859,15 +8457,16 @@ set maxgraphpct 50 set maxwidth 16 set revlistorder 0 set fastdate 0 -set uparrowlen 7 -set downarrowlen 7 -set mingaplen 30 +set uparrowlen 5 +set downarrowlen 5 +set mingaplen 100 set cmitmode "patch" set wrapcomment "none" set showneartags 1 set maxrefs 20 set maxlinelen 200 set showlocalchanges 1 +set limitdiffs 1 set datetimeformat "%Y-%m-%d %H:%M:%S" set colors {green red blue magenta darkgrey brown orange} @@ -7881,6 +8480,17 @@ catch {source ~/.gitk} font create optionfont -family sans-serif -size -12 +parsefont mainfont $mainfont +eval font create mainfont [fontflags mainfont] +eval font create mainfontbold [fontflags mainfont 1] + +parsefont textfont $textfont +eval font create textfont [fontflags textfont] +eval font create textfontbold [fontflags textfont 1] + +parsefont uifont $uifont +eval font create uifont [fontflags uifont] + # check that we can find a .git directory somewhere... if {[catch {set gitdir [gitdir]}]} { show_error {} . "Cannot find a git repository here." @@ -7891,6 +8501,7 @@ if {![file isdirectory $gitdir]} { exit 1 } +set mergeonly 0 set revtreeargs {} set cmdline_files {} set i 0 @@ -7898,6 +8509,10 @@ foreach arg $argv { switch -- $arg { "" { } "-d" { set datemode 1 } + "--merge" { + set mergeonly 1 + lappend revtreeargs $arg + } "--" { set cmdline_files [lrange $argv [expr {$i + 1}] end] break @@ -7938,9 +8553,44 @@ if {$i >= [llength $argv] && $revtreeargs ne {}} { } } +if {$mergeonly} { + # find the list of unmerged files + set mlist {} + set nr_unmerged 0 + if {[catch { + set fd [open "| git ls-files -u" r] + } err]} { + show_error {} . "Couldn't get list of unmerged files: $err" + exit 1 + } + while {[gets $fd line] >= 0} { + set i [string first "\t" $line] + if {$i < 0} continue + set fname [string range $line [expr {$i+1}] end] + if {[lsearch -exact $mlist $fname] >= 0} continue + incr nr_unmerged + if {$cmdline_files eq {} || [path_filter $cmdline_files $fname]} { + lappend mlist $fname + } + } + catch {close $fd} + if {$mlist eq {}} { + if {$nr_unmerged == 0} { + show_error {} . "No files selected: --merge specified but\ + no files are unmerged." + } else { + show_error {} . "No files selected: --merge specified but\ + no unmerged files are within file limit." + } + exit 1 + } + set cmdline_files $mlist +} + set nullid "0000000000000000000000000000000000000000" set nullid2 "0000000000000000000000000000000000000001" +set have_tk85 [expr {[package vcompare $tk_version "8.5"] >= 0}] set runq {} set history {} @@ -7948,18 +8598,23 @@ set historyindex 0 set fh_serial 0 set nhl_names {} set highlight_paths {} +set findpattern {} set searchdirn -forwards set boldrows {} set boldnamerows {} set diffelide {0 0} set markingmatches 0 - -set optim_delay 16 +set linkentercount 0 +set need_redisplay 0 +set nrows_drawn 0 +set firsttabstop 0 set nextviewnum 1 set curview 0 set selectedview 0 set selectedhlview None +set highlight_related None +set highlight_files {} set viewfiles(0) {} set viewperm(0) 0 set viewargs(0) {} @@ -7968,7 +8623,6 @@ set cmdlineok 0 set stopped 0 set stuffsaved 0 set patchnum 0 -set lookingforhead 0 set localirow -1 set localfrow -1 set lserial 0 diff --git a/gitweb/INSTALL b/gitweb/INSTALL index 6328e26f56..9cd5b0a2b1 100644 --- a/gitweb/INSTALL +++ b/gitweb/INSTALL @@ -116,7 +116,7 @@ GITWEB_CONFIG file: $feature{'pickaxe'}{'default'} = [1]; $feature{'pickaxe'}{'override'} = 1; - $feature{'snapshot'}{'default'} = ['x-gzip', 'gz', 'gzip']; + $feature{'snapshot'}{'default'} = ['zip', 'tgz']; $feature{'snapshot'}{'override'} = 1; diff --git a/gitweb/gitweb.perl b/gitweb/gitweb.perl index 48e21dad6c..2e00756276 100755 --- a/gitweb/gitweb.perl +++ b/gitweb/gitweb.perl @@ -846,6 +846,23 @@ sub chop_str { return "$body$tail"; } +# takes the same arguments as chop_str, but also wraps a <span> around the +# result with a title attribute if it does get chopped. Additionally, the +# string is HTML-escaped. +sub chop_and_escape_str { + my $str = shift; + my $len = shift; + my $add_len = shift || 10; + + my $chopped = chop_str($str, $len, $add_len); + if ($chopped eq $str) { + return esc_html($chopped); + } else { + return qq{<span title="} . esc_html($str) . qq{">} . + esc_html($chopped) . qq{</span>}; + } +} + ## ---------------------------------------------------------------------- ## functions returning short strings @@ -2000,6 +2017,19 @@ sub parse_difftree_raw_line { return wantarray ? %res : \%res; } +# wrapper: return parsed line of git-diff-tree "raw" output +# (the argument might be raw line, or parsed info) +sub parsed_difftree_line { + my $line_or_ref = shift; + + if (ref($line_or_ref) eq "HASH") { + # pre-parsed (or generated by hand) + return $line_or_ref; + } else { + return parse_difftree_raw_line($line_or_ref); + } +} + # parse line of git-ls-tree output sub parse_ls_tree_line ($;%) { my $line = shift; @@ -2043,6 +2073,7 @@ sub parse_from_to_diffinfo { } } } else { + # ordinary (not combined) diff $from->{'file'} = $diffinfo->{'from_file'} || $diffinfo->{'file'}; if ($diffinfo->{'status'} ne "A") { # not new (added) file $from->{'href'} = href(action=>"blob", hash_base=>$hash_parent, @@ -2766,6 +2797,7 @@ sub git_print_tree_entry { ## ...................................................................... ## functions printing large fragments of HTML +# get pre-image filenames for merge (combined) diff sub fill_from_file_info { my ($diff, @parents) = @_; @@ -2782,28 +2814,25 @@ sub fill_from_file_info { return $diff; } -# parameters can be strings, or references to arrays of strings -sub from_ids_eq { - my ($a, $b) = @_; - - if (ref($a) eq "ARRAY" && ref($b) eq "ARRAY" && @$a == @$b) { - for (my $i = 0; $i < @$a; ++$i) { - return 0 unless ($a->[$i] eq $b->[$i]); - } - return 1; - } elsif (!ref($a) && !ref($b)) { - return $a eq $b; - } else { - return 0; - } -} - +# is current raw difftree line of file deletion sub is_deleted { my $diffinfo = shift; return $diffinfo->{'to_id'} eq ('0' x 40); } +# does patch correspond to [previous] difftree raw line +# $diffinfo - hashref of parsed raw diff format +# $patchinfo - hashref of parsed patch diff format +# (the same keys as in $diffinfo) +sub is_patch_split { + my ($diffinfo, $patchinfo) = @_; + + return defined $diffinfo && defined $patchinfo + && ($diffinfo->{'to_file'} || $diffinfo->{'file'}) eq $patchinfo->{'to_file'}; +} + + sub git_difftree_body { my ($difftree, $hash, @parents) = @_; my ($parent) = $parents[0]; @@ -2840,13 +2869,7 @@ sub git_difftree_body { my $alternate = 1; my $patchno = 0; foreach my $line (@{$difftree}) { - my $diff; - if (ref($line) eq "HASH") { - # pre-parsed (or generated by hand) - $diff = $line; - } else { - $diff = parse_difftree_raw_line($line); - } + my $diff = parsed_difftree_line($line); if ($alternate) { print "<tr class=\"dark\">\n"; @@ -3117,10 +3140,12 @@ sub git_patchset_body { my ($fd, $difftree, $hash, @hash_parents) = @_; my ($hash_parent) = $hash_parents[0]; + my $is_combined = (@hash_parents > 1); my $patch_idx = 0; my $patch_number = 0; my $patch_line; my $diffinfo; + my $to_name; my (%from, %to); print "<div class=\"patchset\">\n"; @@ -3134,73 +3159,46 @@ sub git_patchset_body { PATCH: while ($patch_line) { - my @diff_header; - my ($from_id, $to_id); - - # git diff header - #assert($patch_line =~ m/^diff /) if DEBUG; - #assert($patch_line !~ m!$/$!) if DEBUG; # is chomp-ed - $patch_number++; - push @diff_header, $patch_line; - - # extended diff header - EXTENDED_HEADER: - while ($patch_line = <$fd>) { - chomp $patch_line; - - last EXTENDED_HEADER if ($patch_line =~ m/^--- |^diff /); - - if ($patch_line =~ m/^index ([0-9a-fA-F]{40})..([0-9a-fA-F]{40})/) { - $from_id = $1; - $to_id = $2; - } elsif ($patch_line =~ m/^index ((?:[0-9a-fA-F]{40},)+[0-9a-fA-F]{40})..([0-9a-fA-F]{40})/) { - $from_id = [ split(',', $1) ]; - $to_id = $2; - } - push @diff_header, $patch_line; + # parse "git diff" header line + if ($patch_line =~ m/^diff --git (\"(?:[^\\\"]*(?:\\.[^\\\"]*)*)\"|[^ "]*) (.*)$/) { + # $1 is from_name, which we do not use + $to_name = unquote($2); + $to_name =~ s!^b/!!; + } elsif ($patch_line =~ m/^diff --(cc|combined) ("?.*"?)$/) { + # $1 is 'cc' or 'combined', which we do not use + $to_name = unquote($2); + } else { + $to_name = undef; } - my $last_patch_line = $patch_line; # check if current patch belong to current raw line # and parse raw git-diff line if needed - if (defined $diffinfo && - defined $from_id && defined $to_id && - from_ids_eq($diffinfo->{'from_id'}, $from_id) && - $diffinfo->{'to_id'} eq $to_id) { + if (is_patch_split($diffinfo, { 'to_file' => $to_name })) { # this is continuation of a split patch print "<div class=\"patch cont\">\n"; } else { # advance raw git-diff output if needed $patch_idx++ if defined $diffinfo; - # compact combined diff output can have some patches skipped - # find which patch (using pathname of result) we are at now - my $to_name; - if ($diff_header[0] =~ m!^diff --cc "?(.*)"?$!) { - $to_name = $1; - } - - do { - # read and prepare patch information - if (ref($difftree->[$patch_idx]) eq "HASH") { - # pre-parsed (or generated by hand) - $diffinfo = $difftree->[$patch_idx]; - } else { - $diffinfo = parse_difftree_raw_line($difftree->[$patch_idx]); - } + # read and prepare patch information + $diffinfo = parsed_difftree_line($difftree->[$patch_idx]); - # check if current raw line has no patch (it got simplified) - if (defined $to_name && $to_name ne $diffinfo->{'to_file'}) { + # compact combined diff output can have some patches skipped + # find which patch (using pathname of result) we are at now; + if ($is_combined) { + while ($to_name ne $diffinfo->{'to_file'}) { print "<div class=\"patch\" id=\"patch". ($patch_idx+1) ."\">\n" . format_diff_cc_simplified($diffinfo, @hash_parents) . "</div>\n"; # class="patch" $patch_idx++; $patch_number++; + + last if $patch_idx > $#$difftree; + $diffinfo = parsed_difftree_line($difftree->[$patch_idx]); } - } until (!defined $to_name || $to_name eq $diffinfo->{'to_file'} || - $patch_idx > $#$difftree); + } # modifies %from, %to hashes parse_from_to_diffinfo($diffinfo, \%from, \%to, @hash_parents); @@ -3210,30 +3208,36 @@ sub git_patchset_body { print "<div class=\"patch\" id=\"patch". ($patch_idx+1) ."\">\n"; } + # git diff header + #assert($patch_line =~ m/^diff /) if DEBUG; + #assert($patch_line !~ m!$/$!) if DEBUG; # is chomp-ed + $patch_number++; # print "git diff" header - $patch_line = shift @diff_header; print format_git_diff_header_line($patch_line, $diffinfo, \%from, \%to); # print extended diff header - print "<div class=\"diff extended_header\">\n" if (@diff_header > 0); + print "<div class=\"diff extended_header\">\n"; EXTENDED_HEADER: - foreach $patch_line (@diff_header) { + while ($patch_line = <$fd>) { + chomp $patch_line; + + last EXTENDED_HEADER if ($patch_line =~ m/^--- |^diff /); + print format_extended_diff_header_line($patch_line, $diffinfo, \%from, \%to); } - print "</div>\n" if (@diff_header > 0); # class="diff extended_header" + print "</div>\n"; # class="diff extended_header" # from-file/to-file diff header - $patch_line = $last_patch_line; if (! $patch_line) { print "</div>\n"; # class="patch" last PATCH; } next PATCH if ($patch_line =~ m/^diff /); #assert($patch_line =~ m/^---/) if DEBUG; - #assert($patch_line eq $last_patch_line) if DEBUG; + my $last_patch_line = $patch_line; $patch_line = <$fd>; chomp $patch_line; #assert($patch_line =~ m/^\+\+\+/) if DEBUG; @@ -3258,16 +3262,11 @@ sub git_patchset_body { # for compact combined (--cc) format, with chunk and patch simpliciaction # patchset might be empty, but there might be unprocessed raw lines - for ($patch_idx++ if $patch_number > 0; + for (++$patch_idx if $patch_number > 0; $patch_idx < @$difftree; - $patch_idx++) { + ++$patch_idx) { # read and prepare patch information - if (ref($difftree->[$patch_idx]) eq "HASH") { - # pre-parsed (or generated by hand) - $diffinfo = $difftree->[$patch_idx]; - } else { - $diffinfo = parse_difftree_raw_line($difftree->[$patch_idx]); - } + $diffinfo = parsed_difftree_line($difftree->[$patch_idx]); # generate anchor for "patch" links in difftree / whatchanged part print "<div class=\"patch\" id=\"patch". ($patch_idx+1) ."\">\n" . @@ -3395,7 +3394,7 @@ sub git_project_list_body { "<td>" . $cgi->a({-href => href(project=>$pr->{'path'}, action=>"summary"), -class => "list", -title => $pr->{'descr_long'}}, esc_html($pr->{'descr'})) . "</td>\n" . - "<td><i>" . esc_html(chop_str($pr->{'owner'}, 15)) . "</i></td>\n"; + "<td><i>" . chop_and_escape_str($pr->{'owner'}, 15) . "</i></td>\n"; print "<td class=\"". age_class($pr->{'age'}) . "\">" . (defined $pr->{'age_string'} ? $pr->{'age_string'} : "No commits") . "</td>\n" . "<td class=\"link\">" . @@ -3437,9 +3436,10 @@ sub git_shortlog_body { print "<tr class=\"light\">\n"; } $alternate ^= 1; + my $author = chop_and_escape_str($co{'author_name'}, 10); # git_summary() used print "<td><i>$co{'age_string'}</i></td>\n" . print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" . - "<td><i>" . esc_html(chop_str($co{'author_name'}, 10)) . "</i></td>\n" . + "<td><i>" . $author . "</i></td>\n" . "<td>"; print format_subject_html($co{'title'}, $co{'title_short'}, href(action=>"commit", hash=>$commit), $ref); @@ -3487,9 +3487,10 @@ sub git_history_body { print "<tr class=\"light\">\n"; } $alternate ^= 1; + # shortlog uses chop_str($co{'author_name'}, 10) + my $author = chop_and_escape_str($co{'author_name'}, 15, 3); print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" . - # shortlog uses chop_str($co{'author_name'}, 10) - "<td><i>" . esc_html(chop_str($co{'author_name'}, 15, 3)) . "</i></td>\n" . + "<td><i>" . $author . "</i></td>\n" . "<td>"; # originally git_history used chop_str($co{'title'}, 50) print format_subject_html($co{'title'}, $co{'title_short'}, @@ -3643,11 +3644,12 @@ sub git_search_grep_body { print "<tr class=\"light\">\n"; } $alternate ^= 1; + my $author = chop_and_escape_str($co{'author_name'}, 15, 5); print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" . - "<td><i>" . esc_html(chop_str($co{'author_name'}, 15, 5)) . "</i></td>\n" . + "<td><i>" . $author . "</i></td>\n" . "<td>" . $cgi->a({-href => href(action=>"commit", hash=>$co{'id'}), -class => "list subject"}, - esc_html(chop_str($co{'title'}, 50)) . "<br/>"); + chop_and_escape_str($co{'title'}, 50) . "<br/>"); my $comment = $co{'comment'}; foreach my $line (@$comment) { if ($line =~ m/^(.*)($search_regexp)(.*)$/i) { @@ -5157,12 +5159,13 @@ sub git_search { print "<tr class=\"light\">\n"; } $alternate ^= 1; + my $author = chop_and_escape_str($co{'author_name'}, 15, 5); print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" . - "<td><i>" . esc_html(chop_str($co{'author_name'}, 15, 5)) . "</i></td>\n" . + "<td><i>" . $author . "</i></td>\n" . "<td>" . $cgi->a({-href => href(action=>"commit", hash=>$co{'id'}), -class => "list subject"}, - esc_html(chop_str($co{'title'}, 50)) . "<br/>"); + chop_and_escape_str($co{'title'}, 50) . "<br/>"); while (my $setref = shift @files) { my %set = %$setref; print $cgi->a({-href => href(action=>"blob", hash_base=>$co{'id'}, diff --git a/hash.c b/hash.c new file mode 100644 index 0000000000..7b492d4fc0 --- /dev/null +++ b/hash.c @@ -0,0 +1,110 @@ +/* + * Some generic hashing helpers. + */ +#include "cache.h" +#include "hash.h" + +/* + * Look up a hash entry in the hash table. Return the pointer to + * the existing entry, or the empty slot if none existed. The caller + * can then look at the (*ptr) to see whether it existed or not. + */ +static struct hash_table_entry *lookup_hash_entry(unsigned int hash, struct hash_table *table) +{ + unsigned int size = table->size, nr = hash % size; + struct hash_table_entry *array = table->array; + + while (array[nr].ptr) { + if (array[nr].hash == hash) + break; + nr++; + if (nr >= size) + nr = 0; + } + return array + nr; +} + + +/* + * Insert a new hash entry pointer into the table. + * + * If that hash entry already existed, return the pointer to + * the existing entry (and the caller can create a list of the + * pointers or do anything else). If it didn't exist, return + * NULL (and the caller knows the pointer has been inserted). + */ +static void **insert_hash_entry(unsigned int hash, void *ptr, struct hash_table *table) +{ + struct hash_table_entry *entry = lookup_hash_entry(hash, table); + + if (!entry->ptr) { + entry->ptr = ptr; + entry->hash = hash; + table->nr++; + return NULL; + } + return &entry->ptr; +} + +static void grow_hash_table(struct hash_table *table) +{ + unsigned int i; + unsigned int old_size = table->size, new_size; + struct hash_table_entry *old_array = table->array, *new_array; + + new_size = alloc_nr(old_size); + new_array = xcalloc(sizeof(struct hash_table_entry), new_size); + table->size = new_size; + table->array = new_array; + table->nr = 0; + for (i = 0; i < old_size; i++) { + unsigned int hash = old_array[i].hash; + void *ptr = old_array[i].ptr; + if (ptr) + insert_hash_entry(hash, ptr, table); + } + free(old_array); +} + +void *lookup_hash(unsigned int hash, struct hash_table *table) +{ + if (!table->array) + return NULL; + return &lookup_hash_entry(hash, table)->ptr; +} + +void **insert_hash(unsigned int hash, void *ptr, struct hash_table *table) +{ + unsigned int nr = table->nr; + if (nr >= table->size/2) + grow_hash_table(table); + return insert_hash_entry(hash, ptr, table); +} + +int for_each_hash(struct hash_table *table, int (*fn)(void *)) +{ + int sum = 0; + unsigned int i; + unsigned int size = table->size; + struct hash_table_entry *array = table->array; + + for (i = 0; i < size; i++) { + void *ptr = array->ptr; + array++; + if (ptr) { + int val = fn(ptr); + if (val < 0) + return val; + sum += val; + } + } + return sum; +} + +void free_hash(struct hash_table *table) +{ + free(table->array); + table->array = NULL; + table->size = 0; + table->nr = 0; +} diff --git a/hash.h b/hash.h new file mode 100644 index 0000000000..a8b0fbb5b5 --- /dev/null +++ b/hash.h @@ -0,0 +1,43 @@ +#ifndef HASH_H +#define HASH_H + +/* + * These are some simple generic hash table helper functions. + * Not necessarily suitable for all users, but good for things + * where you want to just keep track of a list of things, and + * have a good hash to use on them. + * + * It keeps the hash table at roughly 50-75% free, so the memory + * cost of the hash table itself is roughly + * + * 3 * 2*sizeof(void *) * nr_of_objects + * + * bytes. + * + * FIXME: on 64-bit architectures, we waste memory. It would be + * good to have just 32-bit pointers, requiring a special allocator + * for hashed entries or something. + */ +struct hash_table_entry { + unsigned int hash; + void *ptr; +}; + +struct hash_table { + unsigned int size, nr; + struct hash_table_entry *array; +}; + +extern void *lookup_hash(unsigned int hash, struct hash_table *table); +extern void **insert_hash(unsigned int hash, void *ptr, struct hash_table *table); +extern int for_each_hash(struct hash_table *table, int (*fn)(void *)); +extern void free_hash(struct hash_table *table); + +static inline void init_hash(struct hash_table *table) +{ + table->size = 0; + table->nr = 0; + table->array = NULL; +} + +#endif @@ -37,24 +37,25 @@ static inline void mput_char(char c, unsigned int num) putchar(c); } -static struct cmdname { - size_t len; - char name[1]; -} **cmdname; -static int cmdname_alloc, cmdname_cnt; - -static void add_cmdname(const char *name, int len) +static struct cmdnames { + int alloc; + int cnt; + struct cmdname { + size_t len; + char name[1]; + } **names; +} main_cmds, other_cmds; + +static void add_cmdname(struct cmdnames *cmds, const char *name, int len) { - struct cmdname *ent; - if (cmdname_alloc <= cmdname_cnt) { - cmdname_alloc = cmdname_alloc + 200; - cmdname = xrealloc(cmdname, cmdname_alloc * sizeof(*cmdname)); - } - ent = xmalloc(sizeof(*ent) + len); + struct cmdname *ent = xmalloc(sizeof(*ent) + len); + ent->len = len; memcpy(ent->name, name, len); ent->name[len] = 0; - cmdname[cmdname_cnt++] = ent; + + ALLOC_GROW(cmds->names, cmds->cnt + 1, cmds->alloc); + cmds->names[cmds->cnt++] = ent; } static int cmdname_compare(const void *a_, const void *b_) @@ -64,7 +65,42 @@ static int cmdname_compare(const void *a_, const void *b_) return strcmp(a->name, b->name); } -static void pretty_print_string_list(struct cmdname **cmdname, int longest) +static void uniq(struct cmdnames *cmds) +{ + int i, j; + + if (!cmds->cnt) + return; + + for (i = j = 1; i < cmds->cnt; i++) + if (strcmp(cmds->names[i]->name, cmds->names[i-1]->name)) + cmds->names[j++] = cmds->names[i]; + + cmds->cnt = j; +} + +static void exclude_cmds(struct cmdnames *cmds, struct cmdnames *excludes) { + int ci, cj, ei; + int cmp; + + ci = cj = ei = 0; + while (ci < cmds->cnt && ei < excludes->cnt) { + cmp = strcmp(cmds->names[ci]->name, excludes->names[ei]->name); + if (cmp < 0) + cmds->names[cj++] = cmds->names[ci++]; + else if (cmp == 0) + ci++, ei++; + else if (cmp > 0) + ei++; + } + + while (ci < cmds->cnt) + cmds->names[cj++] = cmds->names[ci++]; + + cmds->cnt = cj; +} + +static void pretty_print_string_list(struct cmdnames *cmds, int longest) { int cols = 1, rows; int space = longest + 1; /* min 1 SP between words */ @@ -73,9 +109,7 @@ static void pretty_print_string_list(struct cmdname **cmdname, int longest) if (space < max_cols) cols = max_cols / space; - rows = (cmdname_cnt + cols - 1) / cols; - - qsort(cmdname, cmdname_cnt, sizeof(*cmdname), cmdname_compare); + rows = (cmds->cnt + cols - 1) / cols; for (i = 0; i < rows; i++) { printf(" "); @@ -83,71 +117,112 @@ static void pretty_print_string_list(struct cmdname **cmdname, int longest) for (j = 0; j < cols; j++) { int n = j * rows + i; int size = space; - if (n >= cmdname_cnt) + if (n >= cmds->cnt) break; - if (j == cols-1 || n + rows >= cmdname_cnt) + if (j == cols-1 || n + rows >= cmds->cnt) size = 1; - printf("%-*s", size, cmdname[n]->name); + printf("%-*s", size, cmds->names[n]->name); } putchar('\n'); } } -static void list_commands(const char *exec_path, const char *pattern) +static unsigned int list_commands_in_dir(struct cmdnames *cmds, + const char *path) { unsigned int longest = 0; - char path[PATH_MAX]; - int dirlen; - DIR *dir = opendir(exec_path); + const char *prefix = "git-"; + int prefix_len = strlen(prefix); + DIR *dir = opendir(path); struct dirent *de; - if (!dir) { - fprintf(stderr, "git: '%s': %s\n", exec_path, strerror(errno)); - exit(1); - } - - dirlen = strlen(exec_path); - if (PATH_MAX - 20 < dirlen) { - fprintf(stderr, "git: insanely long exec-path '%s'\n", - exec_path); - exit(1); - } - - memcpy(path, exec_path, dirlen); - path[dirlen++] = '/'; + if (!dir || chdir(path)) + return 0; while ((de = readdir(dir)) != NULL) { struct stat st; int entlen; - if (prefixcmp(de->d_name, "git-")) + if (prefixcmp(de->d_name, prefix)) continue; - strcpy(path+dirlen, de->d_name); - if (stat(path, &st) || /* stat, not lstat */ + + if (stat(de->d_name, &st) || /* stat, not lstat */ !S_ISREG(st.st_mode) || !(st.st_mode & S_IXUSR)) continue; - entlen = strlen(de->d_name); + entlen = strlen(de->d_name) - prefix_len; if (has_extension(de->d_name, ".exe")) entlen -= 4; if (longest < entlen) longest = entlen; - add_cmdname(de->d_name + 4, entlen-4); + add_cmdname(cmds, de->d_name + prefix_len, entlen); } closedir(dir); - printf("git commands available in '%s'\n", exec_path); - printf("----------------------------"); - mput_char('-', strlen(exec_path)); - putchar('\n'); - pretty_print_string_list(cmdname, longest - 4); - putchar('\n'); + return longest; } -static void list_common_cmds_help(void) +static void list_commands(void) +{ + unsigned int longest = 0; + unsigned int len; + const char *env_path = getenv("PATH"); + char *paths, *path, *colon; + const char *exec_path = git_exec_path(); + + if (exec_path) + longest = list_commands_in_dir(&main_cmds, exec_path); + + if (!env_path) { + fprintf(stderr, "PATH not set\n"); + exit(1); + } + + path = paths = xstrdup(env_path); + while (1) { + if ((colon = strchr(path, ':'))) + *colon = 0; + + len = list_commands_in_dir(&other_cmds, path); + if (len > longest) + longest = len; + + if (!colon) + break; + path = colon + 1; + } + free(paths); + + qsort(main_cmds.names, main_cmds.cnt, + sizeof(*main_cmds.names), cmdname_compare); + uniq(&main_cmds); + + qsort(other_cmds.names, other_cmds.cnt, + sizeof(*other_cmds.names), cmdname_compare); + uniq(&other_cmds); + exclude_cmds(&other_cmds, &main_cmds); + + if (main_cmds.cnt) { + printf("available git commands in '%s'\n", exec_path); + printf("----------------------------"); + mput_char('-', strlen(exec_path)); + putchar('\n'); + pretty_print_string_list(&main_cmds, longest); + putchar('\n'); + } + + if (other_cmds.cnt) { + printf("git commands available from elsewhere on your $PATH\n"); + printf("---------------------------------------------------\n"); + pretty_print_string_list(&other_cmds, longest); + putchar('\n'); + } +} + +void list_common_cmds_help(void) { int i, longest = 0; @@ -185,8 +260,7 @@ static void show_man_page(const char *git_cmd) void help_unknown_cmd(const char *cmd) { - printf("git: '%s' is not a git-command\n\n", cmd); - list_common_cmds_help(); + fprintf(stderr, "git: '%s' is not a git-command. See 'git --help'.\n", cmd); exit(1); } @@ -199,19 +273,17 @@ int cmd_version(int argc, const char **argv, const char *prefix) int cmd_help(int argc, const char **argv, const char *prefix) { const char *help_cmd = argc > 1 ? argv[1] : NULL; - const char *exec_path = git_exec_path(); if (!help_cmd) { printf("usage: %s\n\n", git_usage_string); list_common_cmds_help(); - exit(1); + exit(0); } else if (!strcmp(help_cmd, "--all") || !strcmp(help_cmd, "-a")) { printf("usage: %s\n\n", git_usage_string); - if(exec_path) - list_commands(exec_path, "git-*"); - exit(1); + list_commands(); + exit(0); } else diff --git a/http-push.c b/http-push.c index 276e1eb1d9..c02a3af634 100644 --- a/http-push.c +++ b/http-push.c @@ -1,7 +1,6 @@ #include "cache.h" #include "commit.h" #include "pack.h" -#include "fetch.h" #include "tag.h" #include "blob.h" #include "http.h" @@ -14,7 +13,7 @@ #include <expat.h> static const char http_push_usage[] = -"git-http-push [--all] [--force] [--verbose] <remote> [<head>...]\n"; +"git-http-push [--all] [--dry-run] [--force] [--verbose] <remote> [<head>...]\n"; #ifndef XML_STATUS_OK enum XML_Status { @@ -81,6 +80,7 @@ static struct curl_slist *default_headers; static int push_verbosely; static int push_all; static int force_all; +static int dry_run; static struct object_list *objects; @@ -795,38 +795,27 @@ static void finish_request(struct transfer_request *request) } #ifdef USE_CURL_MULTI -void fill_active_slots(void) +static int fill_active_slot(void *unused) { struct transfer_request *request = request_queue_head; - struct transfer_request *next; - struct active_request_slot *slot = active_queue_head; - int num_transfers; if (aborted) - return; + return 0; - while (active_requests < max_requests && request != NULL) { - next = request->next; + for (request = request_queue_head; request; request = request->next) { if (request->state == NEED_FETCH) { start_fetch_loose(request); + return 1; } else if (pushing && request->state == NEED_PUSH) { if (remote_dir_exists[request->obj->sha1[0]] == 1) { start_put(request); } else { start_mkcol(request); } - curl_multi_perform(curlm, &num_transfers); - } - request = next; - } - - while (slot != NULL) { - if (!slot->in_use && slot->curl != NULL) { - curl_easy_cleanup(slot->curl); - slot->curl = NULL; + return 1; } - slot = slot->next; } + return 0; } #endif @@ -2314,6 +2303,10 @@ int main(int argc, char **argv) force_all = 1; continue; } + if (!strcmp(arg, "--dry-run")) { + dry_run = 1; + continue; + } if (!strcmp(arg, "--verbose")) { push_verbosely = 1; continue; @@ -2455,7 +2448,8 @@ int main(int argc, char **argv) if (strcmp(ref->name, ref->peer_ref->name)) fprintf(stderr, " using '%s'", ref->peer_ref->name); fprintf(stderr, "\n from %s\n to %s\n", old_hex, new_hex); - + if (dry_run) + continue; /* Lock remote branch ref */ ref_lock = lock_remote(ref->name, LOCK_TIME); @@ -2502,6 +2496,7 @@ int main(int argc, char **argv) objects_to_send); #ifdef USE_CURL_MULTI fill_active_slots(); + add_fill_function(NULL, fill_active_slot); #endif finish_all_active_slots(); @@ -2522,7 +2517,8 @@ int main(int argc, char **argv) if (remote->has_info_refs && new_refs) { if (info_ref_lock && remote->can_update_info_refs) { fprintf(stderr, "Updating remote server info\n"); - update_remote_info_refs(info_ref_lock); + if (!dry_run) + update_remote_info_refs(info_ref_lock); } else { fprintf(stderr, "Unable to update server info\n"); } diff --git a/http-fetch.c b/http-walker.c index 202fae0ba8..444aebf526 100644 --- a/http-fetch.c +++ b/http-walker.c @@ -1,19 +1,12 @@ #include "cache.h" #include "commit.h" #include "pack.h" -#include "fetch.h" +#include "walker.h" #include "http.h" #define PREV_BUF_SIZE 4096 #define RANGE_HEADER_SIZE 30 -static int commits_on_stdin; - -static int got_alternates = -1; -static int corrupt_object_found; - -static struct curl_slist *no_pragma_header; - struct alt_base { char *base; @@ -22,8 +15,6 @@ struct alt_base struct alt_base *next; }; -static struct alt_base *alt; - enum object_request_state { WAITING, ABORTED, @@ -33,6 +24,7 @@ enum object_request_state { struct object_request { + struct walker *walker; unsigned char sha1[20]; struct alt_base *repo; char *url; @@ -53,6 +45,7 @@ struct object_request }; struct alternates_request { + struct walker *walker; const char *base; char *url; struct buffer *buffer; @@ -60,6 +53,13 @@ struct alternates_request { int http_specific; }; +struct walker_data { + const char *url; + int got_alternates; + struct alt_base *alt; + struct curl_slist *no_pragma_header; +}; + static struct object_request *object_queue_head; static size_t fwrite_sha1_file(void *ptr, size_t eltsize, size_t nmemb, @@ -103,11 +103,12 @@ static int missing__target(int code, int result) #define missing_target(a) missing__target((a)->http_code, (a)->curl_result) -static void fetch_alternates(const char *base); +static void fetch_alternates(struct walker *walker, const char *base); static void process_object_response(void *callback_data); -static void start_object_request(struct object_request *obj_req) +static void start_object_request(struct walker *walker, + struct object_request *obj_req) { char *hex = sha1_to_hex(obj_req->sha1); char prevfile[PATH_MAX]; @@ -120,6 +121,7 @@ static void start_object_request(struct object_request *obj_req) char range[RANGE_HEADER_SIZE]; struct curl_slist *range_header = NULL; struct active_request_slot *slot; + struct walker_data *data = walker->data; snprintf(prevfile, sizeof(prevfile), "%s.prev", obj_req->filename); unlink(prevfile); @@ -212,12 +214,12 @@ static void start_object_request(struct object_request *obj_req) curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_sha1_file); curl_easy_setopt(slot->curl, CURLOPT_ERRORBUFFER, obj_req->errorstr); curl_easy_setopt(slot->curl, CURLOPT_URL, url); - curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, no_pragma_header); + curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, data->no_pragma_header); /* If we have successfully processed data from a previous fetch attempt, only fetch the data we don't already have. */ if (prev_posn>0) { - if (get_verbosely) + if (walker->get_verbosely) fprintf(stderr, "Resuming fetch of object %s at byte %ld\n", hex, prev_posn); @@ -268,13 +270,16 @@ static void finish_object_request(struct object_request *obj_req) move_temp_to_file(obj_req->tmpfile, obj_req->filename); if (obj_req->rename == 0) - pull_say("got %s\n", sha1_to_hex(obj_req->sha1)); + walker_say(obj_req->walker, "got %s\n", sha1_to_hex(obj_req->sha1)); } static void process_object_response(void *callback_data) { struct object_request *obj_req = (struct object_request *)callback_data; + struct walker *walker = obj_req->walker; + struct walker_data *data = walker->data; + struct alt_base *alt = data->alt; obj_req->curl_result = obj_req->slot->curl_result; obj_req->http_code = obj_req->slot->http_code; @@ -283,13 +288,13 @@ static void process_object_response(void *callback_data) /* Use alternates if necessary */ if (missing_target(obj_req)) { - fetch_alternates(alt->base); + fetch_alternates(walker, alt->base); if (obj_req->repo->next != NULL) { obj_req->repo = obj_req->repo->next; close(obj_req->local); obj_req->local = -1; - start_object_request(obj_req); + start_object_request(walker, obj_req); return; } } @@ -317,42 +322,35 @@ static void release_object_request(struct object_request *obj_req) } #ifdef USE_CURL_MULTI -void fill_active_slots(void) +static int fill_active_slot(struct walker *walker) { - struct object_request *obj_req = object_queue_head; - struct active_request_slot *slot = active_queue_head; - int num_transfers; + struct object_request *obj_req; - while (active_requests < max_requests && obj_req != NULL) { + for (obj_req = object_queue_head; obj_req; obj_req = obj_req->next) { if (obj_req->state == WAITING) { if (has_sha1_file(obj_req->sha1)) obj_req->state = COMPLETE; - else - start_object_request(obj_req); - curl_multi_perform(curlm, &num_transfers); - } - obj_req = obj_req->next; - } - - while (slot != NULL) { - if (!slot->in_use && slot->curl != NULL) { - curl_easy_cleanup(slot->curl); - slot->curl = NULL; + else { + start_object_request(walker, obj_req); + return 1; + } } - slot = slot->next; } + return 0; } #endif -void prefetch(unsigned char *sha1) +static void prefetch(struct walker *walker, unsigned char *sha1) { struct object_request *newreq; struct object_request *tail; + struct walker_data *data = walker->data; char *filename = sha1_file_name(sha1); newreq = xmalloc(sizeof(*newreq)); + newreq->walker = walker; hashcpy(newreq->sha1, sha1); - newreq->repo = alt; + newreq->repo = data->alt; newreq->url = NULL; newreq->local = -1; newreq->state = WAITING; @@ -378,7 +376,7 @@ void prefetch(unsigned char *sha1) #endif } -static int fetch_index(struct alt_base *repo, unsigned char *sha1) +static int fetch_index(struct walker *walker, struct alt_base *repo, unsigned char *sha1) { char *hex = sha1_to_hex(sha1); char *filename; @@ -387,6 +385,7 @@ static int fetch_index(struct alt_base *repo, unsigned char *sha1) long prev_posn = 0; char range[RANGE_HEADER_SIZE]; struct curl_slist *range_header = NULL; + struct walker_data *data = walker->data; FILE *indexfile; struct active_request_slot *slot; @@ -395,7 +394,7 @@ static int fetch_index(struct alt_base *repo, unsigned char *sha1) if (has_pack_index(sha1)) return 0; - if (get_verbosely) + if (walker->get_verbosely) fprintf(stderr, "Getting index for pack %s\n", hex); url = xmalloc(strlen(repo->base) + 64); @@ -413,14 +412,14 @@ static int fetch_index(struct alt_base *repo, unsigned char *sha1) curl_easy_setopt(slot->curl, CURLOPT_FILE, indexfile); curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite); curl_easy_setopt(slot->curl, CURLOPT_URL, url); - curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, no_pragma_header); + curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, data->no_pragma_header); slot->local = indexfile; /* If there is data present from a previous transfer attempt, resume where it left off */ prev_posn = ftell(indexfile); if (prev_posn>0) { - if (get_verbosely) + if (walker->get_verbosely) fprintf(stderr, "Resuming fetch of index for pack %s at byte %ld\n", hex, prev_posn); @@ -446,13 +445,13 @@ static int fetch_index(struct alt_base *repo, unsigned char *sha1) return move_temp_to_file(tmpfile, filename); } -static int setup_index(struct alt_base *repo, unsigned char *sha1) +static int setup_index(struct walker *walker, struct alt_base *repo, unsigned char *sha1) { struct packed_git *new_pack; if (has_pack_file(sha1)) return 0; /* don't list this as something we can get */ - if (fetch_index(repo, sha1)) + if (fetch_index(walker, repo, sha1)) return -1; new_pack = parse_pack_index(sha1); @@ -465,8 +464,10 @@ static void process_alternates_response(void *callback_data) { struct alternates_request *alt_req = (struct alternates_request *)callback_data; + struct walker *walker = alt_req->walker; + struct walker_data *cdata = walker->data; struct active_request_slot *slot = alt_req->slot; - struct alt_base *tail = alt; + struct alt_base *tail = cdata->alt; const char *base = alt_req->base; static const char null_byte = '\0'; char *data; @@ -487,7 +488,7 @@ static void process_alternates_response(void *callback_data) if (slot->finished != NULL) (*slot->finished) = 0; if (!start_active_slot(slot)) { - got_alternates = -1; + cdata->got_alternates = -1; slot->in_use = 0; if (slot->finished != NULL) (*slot->finished) = 1; @@ -496,7 +497,7 @@ static void process_alternates_response(void *callback_data) } } else if (slot->curl_result != CURLE_OK) { if (!missing_target(slot)) { - got_alternates = -1; + cdata->got_alternates = -1; return; } } @@ -573,7 +574,7 @@ static void process_alternates_response(void *callback_data) memcpy(target + serverlen, data + i, posn - i - 7); target[serverlen + posn - i - 7] = 0; - if (get_verbosely) + if (walker->get_verbosely) fprintf(stderr, "Also look at %s\n", target); newalt = xmalloc(sizeof(*newalt)); @@ -590,39 +591,40 @@ static void process_alternates_response(void *callback_data) i = posn + 1; } - got_alternates = 1; + cdata->got_alternates = 1; } -static void fetch_alternates(const char *base) +static void fetch_alternates(struct walker *walker, const char *base) { struct buffer buffer; char *url; char *data; struct active_request_slot *slot; struct alternates_request alt_req; + struct walker_data *cdata = walker->data; /* If another request has already started fetching alternates, wait for them to arrive and return to processing this request's curl message */ #ifdef USE_CURL_MULTI - while (got_alternates == 0) { + while (cdata->got_alternates == 0) { step_active_slots(); } #endif /* Nothing to do if they've already been fetched */ - if (got_alternates == 1) + if (cdata->got_alternates == 1) return; /* Start the fetch */ - got_alternates = 0; + cdata->got_alternates = 0; data = xmalloc(4096); buffer.size = 4096; buffer.posn = 0; buffer.buffer = data; - if (get_verbosely) + if (walker->get_verbosely) fprintf(stderr, "Getting alternates list for %s\n", base); url = xmalloc(strlen(base) + 31); @@ -632,6 +634,7 @@ static void fetch_alternates(const char *base) may fail and need to have alternates loaded before continuing */ slot = get_active_slot(); slot->callback_func = process_alternates_response; + alt_req.walker = walker; slot->callback_data = &alt_req; curl_easy_setopt(slot->curl, CURLOPT_FILE, &buffer); @@ -647,13 +650,13 @@ static void fetch_alternates(const char *base) if (start_active_slot(slot)) run_active_slot(slot); else - got_alternates = -1; + cdata->got_alternates = -1; free(data); free(url); } -static int fetch_indices(struct alt_base *repo) +static int fetch_indices(struct walker *walker, struct alt_base *repo) { unsigned char sha1[20]; char *url; @@ -672,7 +675,7 @@ static int fetch_indices(struct alt_base *repo) buffer.posn = 0; buffer.buffer = data; - if (get_verbosely) + if (walker->get_verbosely) fprintf(stderr, "Getting pack list for %s\n", repo->base); url = xmalloc(strlen(repo->base) + 21); @@ -712,7 +715,7 @@ static int fetch_indices(struct alt_base *repo) !prefixcmp(data + i, " pack-") && !prefixcmp(data + i + 46, ".pack\n")) { get_sha1_hex(data + i + 6, sha1); - setup_index(repo, sha1); + setup_index(walker, repo, sha1); i += 51; break; } @@ -728,7 +731,7 @@ static int fetch_indices(struct alt_base *repo) return 0; } -static int fetch_pack(struct alt_base *repo, unsigned char *sha1) +static int fetch_pack(struct walker *walker, struct alt_base *repo, unsigned char *sha1) { char *url; struct packed_git *target; @@ -740,17 +743,18 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1) long prev_posn = 0; char range[RANGE_HEADER_SIZE]; struct curl_slist *range_header = NULL; + struct walker_data *data = walker->data; struct active_request_slot *slot; struct slot_results results; - if (fetch_indices(repo)) + if (fetch_indices(walker, repo)) return -1; target = find_sha1_pack(sha1, repo->packs); if (!target) return -1; - if (get_verbosely) { + if (walker->get_verbosely) { fprintf(stderr, "Getting pack %s\n", sha1_to_hex(target->sha1)); fprintf(stderr, " which contains %s\n", @@ -773,14 +777,14 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1) curl_easy_setopt(slot->curl, CURLOPT_FILE, packfile); curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite); curl_easy_setopt(slot->curl, CURLOPT_URL, url); - curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, no_pragma_header); + curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, data->no_pragma_header); slot->local = packfile; /* If there is data present from a previous transfer attempt, resume where it left off */ prev_posn = ftell(packfile); if (prev_posn>0) { - if (get_verbosely) + if (walker->get_verbosely) fprintf(stderr, "Resuming fetch of pack %s at byte %ld\n", sha1_to_hex(target->sha1), prev_posn); @@ -834,7 +838,7 @@ static void abort_object_request(struct object_request *obj_req) release_object_request(obj_req); } -static int fetch_object(struct alt_base *repo, unsigned char *sha1) +static int fetch_object(struct walker *walker, struct alt_base *repo, unsigned char *sha1) { char *hex = sha1_to_hex(sha1); int ret = 0; @@ -855,7 +859,7 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1) step_active_slots(); } #else - start_object_request(obj_req); + start_object_request(walker, obj_req); #endif while (obj_req->state == ACTIVE) { @@ -876,7 +880,7 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1) obj_req->errorstr, obj_req->curl_result, obj_req->http_code, hex); } else if (obj_req->zret != Z_STREAM_END) { - corrupt_object_found++; + walker->corrupt_object_found++; ret = error("File %s (%s) corrupt", hex, obj_req->url); } else if (hashcmp(obj_req->sha1, obj_req->real_sha1)) { ret = error("File %s has bad hash", hex); @@ -889,20 +893,21 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1) return ret; } -int fetch(unsigned char *sha1) +static int fetch(struct walker *walker, unsigned char *sha1) { - struct alt_base *altbase = alt; + struct walker_data *data = walker->data; + struct alt_base *altbase = data->alt; - if (!fetch_object(altbase, sha1)) + if (!fetch_object(walker, altbase, sha1)) return 0; while (altbase) { - if (!fetch_pack(altbase, sha1)) + if (!fetch_pack(walker, altbase, sha1)) return 0; - fetch_alternates(alt->base); + fetch_alternates(walker, data->alt->base); altbase = altbase->next; } return error("Unable to find %s under %s", sha1_to_hex(sha1), - alt->base); + data->alt->base); } static inline int needs_quote(int ch) @@ -951,12 +956,13 @@ static char *quote_ref_url(const char *base, const char *ref) return qref; } -int fetch_ref(char *ref, unsigned char *sha1) +static int fetch_ref(struct walker *walker, char *ref, unsigned char *sha1) { char *url; char hex[42]; struct buffer buffer; - const char *base = alt->base; + struct walker_data *data = walker->data; + const char *base = data->alt->base; struct active_request_slot *slot; struct slot_results results; buffer.size = 41; @@ -985,80 +991,45 @@ int fetch_ref(char *ref, unsigned char *sha1) return 0; } -int main(int argc, const char **argv) +static void cleanup(struct walker *walker) +{ + struct walker_data *data = walker->data; + http_cleanup(); + + curl_slist_free_all(data->no_pragma_header); +} + +struct walker *get_http_walker(const char *url) { - int commits; - const char **write_ref = NULL; - char **commit_id; - const char *url; char *s; - int arg = 1; - int rc = 0; - - setup_git_directory(); - git_config(git_default_config); - - while (arg < argc && argv[arg][0] == '-') { - if (argv[arg][1] == 't') { - get_tree = 1; - } else if (argv[arg][1] == 'c') { - get_history = 1; - } else if (argv[arg][1] == 'a') { - get_all = 1; - get_tree = 1; - get_history = 1; - } else if (argv[arg][1] == 'v') { - get_verbosely = 1; - } else if (argv[arg][1] == 'w') { - write_ref = &argv[arg + 1]; - arg++; - } else if (!strcmp(argv[arg], "--recover")) { - get_recover = 1; - } else if (!strcmp(argv[arg], "--stdin")) { - commits_on_stdin = 1; - } - arg++; - } - if (argc < arg + 2 - commits_on_stdin) { - usage("git-http-fetch [-c] [-t] [-a] [-v] [--recover] [-w ref] [--stdin] commit-id url"); - return 1; - } - if (commits_on_stdin) { - commits = pull_targets_stdin(&commit_id, &write_ref); - } else { - commit_id = (char **) &argv[arg++]; - commits = 1; - } - url = argv[arg]; + struct walker_data *data = xmalloc(sizeof(struct walker_data)); + struct walker *walker = xmalloc(sizeof(struct walker)); http_init(); - no_pragma_header = curl_slist_append(no_pragma_header, "Pragma:"); + data->no_pragma_header = curl_slist_append(NULL, "Pragma:"); - alt = xmalloc(sizeof(*alt)); - alt->base = xmalloc(strlen(url) + 1); - strcpy(alt->base, url); - for (s = alt->base + strlen(alt->base) - 1; *s == '/'; --s) + data->alt = xmalloc(sizeof(*data->alt)); + data->alt->base = xmalloc(strlen(url) + 1); + strcpy(data->alt->base, url); + for (s = data->alt->base + strlen(data->alt->base) - 1; *s == '/'; --s) *s = 0; - alt->got_indices = 0; - alt->packs = NULL; - alt->next = NULL; - if (pull(commits, commit_id, write_ref, url)) - rc = 1; - - http_cleanup(); + data->alt->got_indices = 0; + data->alt->packs = NULL; + data->alt->next = NULL; + data->got_alternates = -1; - curl_slist_free_all(no_pragma_header); + walker->corrupt_object_found = 0; + walker->fetch = fetch; + walker->fetch_ref = fetch_ref; + walker->prefetch = prefetch; + walker->cleanup = cleanup; + walker->data = data; - if (commits_on_stdin) - pull_targets_free(commits, commit_id, write_ref); +#ifdef USE_CURL_MULTI + add_fill_function(walker, (int (*)(void *)) fill_active_slot); +#endif - if (corrupt_object_found) { - fprintf(stderr, -"Some loose object were found to be corrupt, but they might be just\n" -"a false '404 Not Found' error message sent with incorrect HTTP\n" -"status code. Suggest running git-fsck.\n"); - } - return rc; + return walker; } @@ -276,6 +276,7 @@ void http_cleanup(void) #endif while (slot != NULL) { + struct active_request_slot *next = slot->next; #ifdef USE_CURL_MULTI if (slot->in_use) { curl_easy_getinfo(slot->curl, @@ -287,8 +288,10 @@ void http_cleanup(void) #endif if (slot->curl != NULL) curl_easy_cleanup(slot->curl); - slot = slot->next; + free(slot); + slot = next; } + active_queue_head = NULL; #ifndef NO_CURL_EASY_DUPHANDLE curl_easy_cleanup(curl_default); @@ -300,7 +303,7 @@ void http_cleanup(void) curl_global_cleanup(); curl_slist_free_all(pragma_header); - pragma_header = NULL; + pragma_header = NULL; } struct active_request_slot *get_active_slot(void) @@ -372,6 +375,7 @@ int start_active_slot(struct active_request_slot *slot) { #ifdef USE_CURL_MULTI CURLMcode curlm_result = curl_multi_add_handle(curlm, slot->curl); + int num_transfers; if (curlm_result != CURLM_OK && curlm_result != CURLM_CALL_MULTI_PERFORM) { @@ -379,11 +383,60 @@ int start_active_slot(struct active_request_slot *slot) slot->in_use = 0; return 0; } + + /* + * We know there must be something to do, since we just added + * something. + */ + curl_multi_perform(curlm, &num_transfers); #endif return 1; } #ifdef USE_CURL_MULTI +struct fill_chain { + void *data; + int (*fill)(void *); + struct fill_chain *next; +}; + +static struct fill_chain *fill_cfg = NULL; + +void add_fill_function(void *data, int (*fill)(void *)) +{ + struct fill_chain *new = malloc(sizeof(*new)); + struct fill_chain **linkp = &fill_cfg; + new->data = data; + new->fill = fill; + new->next = NULL; + while (*linkp) + linkp = &(*linkp)->next; + *linkp = new; +} + +void fill_active_slots(void) +{ + struct active_request_slot *slot = active_queue_head; + + while (active_requests < max_requests) { + struct fill_chain *fill; + for (fill = fill_cfg; fill; fill = fill->next) + if (fill->fill(fill->data)) + break; + + if (!fill) + break; + } + + while (slot != NULL) { + if (!slot->in_use && slot->curl != NULL) { + curl_easy_cleanup(slot->curl); + slot->curl = NULL; + } + slot = slot->next; + } +} + void step_active_slots(void) { int num_transfers; @@ -70,6 +70,7 @@ extern void release_active_slot(struct active_request_slot *slot); #ifdef USE_CURL_MULTI extern void fill_active_slots(void); +extern void add_fill_function(void *data, int (*fill)(void *)); extern void step_active_slots(void); #endif @@ -79,10 +80,6 @@ extern void http_cleanup(void); extern int data_received; extern int active_requests; -#ifdef USE_CURL_MULTI -extern int max_requests; -extern CURLM *curlm; -#endif #ifndef NO_CURL_EASY_DUPHANDLE extern CURL *curl_default; #endif @@ -103,6 +100,4 @@ extern long curl_low_speed_time; extern struct curl_slist *pragma_header; extern struct curl_slist *no_range_header; -extern struct active_request_slot *active_queue_head; - #endif /* HTTP_H */ diff --git a/local-fetch.c b/local-fetch.c deleted file mode 100644 index bf7ec6c2a3..0000000000 --- a/local-fetch.c +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Copyright (C) 2005 Junio C Hamano - */ -#include "cache.h" -#include "commit.h" -#include "fetch.h" - -static int use_link; -static int use_symlink; -static int use_filecopy = 1; -static int commits_on_stdin; - -static const char *path; /* "Remote" git repository */ - -void prefetch(unsigned char *sha1) -{ -} - -static struct packed_git *packs; - -static void setup_index(unsigned char *sha1) -{ - struct packed_git *new_pack; - char filename[PATH_MAX]; - strcpy(filename, path); - strcat(filename, "/objects/pack/pack-"); - strcat(filename, sha1_to_hex(sha1)); - strcat(filename, ".idx"); - new_pack = parse_pack_index_file(sha1, filename); - new_pack->next = packs; - packs = new_pack; -} - -static int setup_indices(void) -{ - DIR *dir; - struct dirent *de; - char filename[PATH_MAX]; - unsigned char sha1[20]; - sprintf(filename, "%s/objects/pack/", path); - dir = opendir(filename); - if (!dir) - return -1; - while ((de = readdir(dir)) != NULL) { - int namelen = strlen(de->d_name); - if (namelen != 50 || - !has_extension(de->d_name, ".pack")) - continue; - get_sha1_hex(de->d_name + 5, sha1); - setup_index(sha1); - } - closedir(dir); - return 0; -} - -static int copy_file(const char *source, char *dest, const char *hex, - int warn_if_not_exists) -{ - safe_create_leading_directories(dest); - if (use_link) { - if (!link(source, dest)) { - pull_say("link %s\n", hex); - return 0; - } - /* If we got ENOENT there is no point continuing. */ - if (errno == ENOENT) { - if (!warn_if_not_exists) - return -1; - return error("does not exist %s", source); - } - } - if (use_symlink) { - struct stat st; - if (stat(source, &st)) { - if (!warn_if_not_exists && errno == ENOENT) - return -1; - return error("cannot stat %s: %s", source, - strerror(errno)); - } - if (!symlink(source, dest)) { - pull_say("symlink %s\n", hex); - return 0; - } - } - if (use_filecopy) { - int ifd, ofd, status = 0; - - ifd = open(source, O_RDONLY); - if (ifd < 0) { - if (!warn_if_not_exists && errno == ENOENT) - return -1; - return error("cannot open %s", source); - } - ofd = open(dest, O_WRONLY | O_CREAT | O_EXCL, 0666); - if (ofd < 0) { - close(ifd); - return error("cannot open %s", dest); - } - status = copy_fd(ifd, ofd); - close(ofd); - if (status) - return error("cannot write %s", dest); - pull_say("copy %s\n", hex); - return 0; - } - return error("failed to copy %s with given copy methods.", hex); -} - -static int fetch_pack(const unsigned char *sha1) -{ - struct packed_git *target; - char filename[PATH_MAX]; - if (setup_indices()) - return -1; - target = find_sha1_pack(sha1, packs); - if (!target) - return error("Couldn't find %s: not separate or in any pack", - sha1_to_hex(sha1)); - if (get_verbosely) { - fprintf(stderr, "Getting pack %s\n", - sha1_to_hex(target->sha1)); - fprintf(stderr, " which contains %s\n", - sha1_to_hex(sha1)); - } - sprintf(filename, "%s/objects/pack/pack-%s.pack", - path, sha1_to_hex(target->sha1)); - copy_file(filename, sha1_pack_name(target->sha1), - sha1_to_hex(target->sha1), 1); - sprintf(filename, "%s/objects/pack/pack-%s.idx", - path, sha1_to_hex(target->sha1)); - copy_file(filename, sha1_pack_index_name(target->sha1), - sha1_to_hex(target->sha1), 1); - install_packed_git(target); - return 0; -} - -static int fetch_file(const unsigned char *sha1) -{ - static int object_name_start = -1; - static char filename[PATH_MAX]; - char *hex = sha1_to_hex(sha1); - char *dest_filename = sha1_file_name(sha1); - - if (object_name_start < 0) { - strcpy(filename, path); /* e.g. git.git */ - strcat(filename, "/objects/"); - object_name_start = strlen(filename); - } - filename[object_name_start+0] = hex[0]; - filename[object_name_start+1] = hex[1]; - filename[object_name_start+2] = '/'; - strcpy(filename + object_name_start + 3, hex + 2); - return copy_file(filename, dest_filename, hex, 0); -} - -int fetch(unsigned char *sha1) -{ - if (has_sha1_file(sha1)) - return 0; - else - return fetch_file(sha1) && fetch_pack(sha1); -} - -int fetch_ref(char *ref, unsigned char *sha1) -{ - static int ref_name_start = -1; - static char filename[PATH_MAX]; - static char hex[41]; - int ifd; - - if (ref_name_start < 0) { - sprintf(filename, "%s/refs/", path); - ref_name_start = strlen(filename); - } - strcpy(filename + ref_name_start, ref); - ifd = open(filename, O_RDONLY); - if (ifd < 0) { - close(ifd); - return error("cannot open %s", filename); - } - if (read_in_full(ifd, hex, 40) != 40 || get_sha1_hex(hex, sha1)) { - close(ifd); - return error("cannot read from %s", filename); - } - close(ifd); - pull_say("ref %s\n", sha1_to_hex(sha1)); - return 0; -} - -static const char local_pull_usage[] = -"git-local-fetch [-c] [-t] [-a] [-v] [-w filename] [--recover] [-l] [-s] [-n] [--stdin] commit-id path"; - -/* - * By default we only use file copy. - * If -l is specified, a hard link is attempted. - * If -s is specified, then a symlink is attempted. - * If -n is _not_ specified, then a regular file-to-file copy is done. - */ -int main(int argc, const char **argv) -{ - int commits; - const char **write_ref = NULL; - char **commit_id; - int arg = 1; - - setup_git_directory(); - git_config(git_default_config); - - while (arg < argc && argv[arg][0] == '-') { - if (argv[arg][1] == 't') - get_tree = 1; - else if (argv[arg][1] == 'c') - get_history = 1; - else if (argv[arg][1] == 'a') { - get_all = 1; - get_tree = 1; - get_history = 1; - } - else if (argv[arg][1] == 'l') - use_link = 1; - else if (argv[arg][1] == 's') - use_symlink = 1; - else if (argv[arg][1] == 'n') - use_filecopy = 0; - else if (argv[arg][1] == 'v') - get_verbosely = 1; - else if (argv[arg][1] == 'w') - write_ref = &argv[++arg]; - else if (!strcmp(argv[arg], "--recover")) - get_recover = 1; - else if (!strcmp(argv[arg], "--stdin")) - commits_on_stdin = 1; - else - usage(local_pull_usage); - arg++; - } - if (argc < arg + 2 - commits_on_stdin) - usage(local_pull_usage); - if (commits_on_stdin) { - commits = pull_targets_stdin(&commit_id, &write_ref); - } else { - commit_id = (char **) &argv[arg++]; - commits = 1; - } - path = argv[arg]; - - if (pull(commits, commit_id, write_ref, path)) - return 1; - - if (commits_on_stdin) - pull_targets_free(commits, commit_id, write_ref); - - return 0; -} diff --git a/log-tree.c b/log-tree.c index 62edd34455..3763ce94fc 100644 --- a/log-tree.c +++ b/log-tree.c @@ -15,7 +15,7 @@ static void show_parents(struct commit *commit, int abbrev) } } -static void show_decorations(struct commit *commit) +void show_decorations(struct commit *commit) { const char *prefix; struct name_decoration *decoration; diff --git a/log-tree.h b/log-tree.h index e82b56a20d..b33f7cd7ac 100644 --- a/log-tree.h +++ b/log-tree.h @@ -12,5 +12,6 @@ int log_tree_diff_flush(struct rev_info *); int log_tree_commit(struct rev_info *, struct commit *); int log_tree_opt_parse(struct rev_info *, const char **, int); void show_log(struct rev_info *opt, const char *sep); +void show_decorations(struct commit *commit); #endif diff --git a/match-trees.c b/match-trees.c index d7e29c4d1d..0fd6df7d6e 100644 --- a/match-trees.c +++ b/match-trees.c @@ -132,7 +132,7 @@ static void match_trees(const unsigned char *hash1, const unsigned char *hash2, int *best_score, char **best_match, - char *base, + const char *base, int recurse_limit) { struct tree_desc one; diff --git a/merge-recursive.c b/merge-recursive.c index 4a5c77c3b6..6c6f595fbc 100644 --- a/merge-recursive.c +++ b/merge-recursive.c @@ -1572,7 +1572,7 @@ static int merge(struct commit *h1, { struct commit_list *iter; struct commit *merged_common_ancestors; - struct tree *mrtree; + struct tree *mrtree = mrtree; int clean; if (show(4)) { diff --git a/pack-write.c b/pack-write.c index d1ed3abe21..665e2b29b8 100644 --- a/pack-write.c +++ b/pack-write.c @@ -180,3 +180,29 @@ void fixup_pack_header_footer(int pack_fd, SHA1_Final(pack_file_sha1, &c); write_or_die(pack_fd, pack_file_sha1, 20); } + +char *index_pack_lockfile(int ip_out) +{ + int len, s; + char packname[46]; + + /* + * The first thing we expects from index-pack's output + * is "pack\t%40s\n" or "keep\t%40s\n" (46 bytes) where + * %40s is the newly created pack SHA1 name. In the "keep" + * case, we need it to remove the corresponding .keep file + * later on. If we don't get that then tough luck with it. + */ + for (len = 0; + len < 46 && (s = xread(ip_out, packname+len, 46-len)) > 0; + len += s); + if (len == 46 && packname[45] == '\n' && + memcmp(packname, "keep\t", 5) == 0) { + char path[PATH_MAX]; + packname[45] = 0; + snprintf(path, sizeof(path), "%s/pack/pack-%s.keep", + get_object_directory(), packname + 5); + return xstrdup(path); + } + return NULL; +} @@ -59,6 +59,7 @@ extern char *write_idx_file(char *index_name, struct pack_idx_entry **objects, i extern int verify_pack(struct packed_git *, int); extern void fixup_pack_header_footer(int, unsigned char *, const char *, uint32_t); +extern char *index_pack_lockfile(int fd); #define PH_ERROR_EOF (-1) #define PH_ERROR_PACK_SIGNATURE (-2) diff --git a/peek-remote.c b/peek-remote.c index ceb787170e..8d20f7c9c6 100644 --- a/peek-remote.c +++ b/peek-remote.c @@ -25,7 +25,7 @@ int main(int argc, char **argv) int i, ret; char *dest = NULL; int fd[2]; - pid_t pid; + struct child_process *conn; int nongit = 0; unsigned flags = 0; @@ -64,12 +64,10 @@ int main(int argc, char **argv) if (!dest || i != argc - 1) usage(peek_remote_usage); - pid = git_connect(fd, dest, uploadpack, 0); - if (pid < 0) - return 1; + conn = git_connect(fd, dest, uploadpack, 0); ret = peek_remote(fd, flags); close(fd[0]); close(fd[1]); - ret |= finish_connect(pid); + ret |= finish_connect(conn); return !!ret; } diff --git a/read-cache.c b/read-cache.c index 56202d13df..056b322fb0 100644 --- a/read-cache.c +++ b/read-cache.c @@ -149,6 +149,8 @@ static int ce_match_stat_basic(struct cache_entry *ce, struct stat *st) else if (ce_compare_gitlink(ce)) changed |= DATA_CHANGED; return changed; + case 0: /* Special case: unmerged file in index */ + return MODE_CHANGED | DATA_CHANGED | TYPE_CHANGED; default: die("internal error: ce_mode is %o", ntohl(ce->ce_mode)); } diff --git a/receive-pack.c b/receive-pack.c index d3c422be58..38e35c06b9 100644 --- a/receive-pack.c +++ b/receive-pack.c @@ -166,7 +166,7 @@ static const char *update(struct command *cmd) struct ref_lock *lock; if (!prefixcmp(name, "refs/") && check_ref_format(name + 5)) { - error("refusing to create funny ref '%s' locally", name); + error("refusing to create funny ref '%s' remotely", name); return "funny refname"; } @@ -382,9 +382,8 @@ static const char *unpack(void) } } else { const char *keeper[6]; - int s, len, status; + int s, status; char keep_arg[256]; - char packname[46]; struct child_process ip; s = sprintf(keep_arg, "--keep=receive-pack %i on ", getpid()); @@ -403,26 +402,7 @@ static const char *unpack(void) ip.git_cmd = 1; if (start_command(&ip)) return "index-pack fork failed"; - - /* - * The first thing we expects from index-pack's output - * is "pack\t%40s\n" or "keep\t%40s\n" (46 bytes) where - * %40s is the newly created pack SHA1 name. In the "keep" - * case, we need it to remove the corresponding .keep file - * later on. If we don't get that then tough luck with it. - */ - for (len = 0; - len < 46 && (s = xread(ip.out, packname+len, 46-len)) > 0; - len += s); - if (len == 46 && packname[45] == '\n' && - memcmp(packname, "keep\t", 5) == 0) { - char path[PATH_MAX]; - packname[45] = 0; - snprintf(path, sizeof(path), "%s/pack/pack-%s.keep", - get_object_directory(), packname + 5); - pack_lockfile = xstrdup(path); - } - + pack_lockfile = index_pack_lockfile(ip.out); status = finish_command(&ip); if (!status) { reprepare_packed_git(); @@ -2,6 +2,7 @@ #include "refs.h" #include "object.h" #include "tag.h" +#include "dir.h" /* ISSYMREF=01 and ISPACKED=02 are public interfaces */ #define REF_KNOWS_PEELED 04 @@ -671,57 +672,23 @@ static struct ref_lock *verify_lock(struct ref_lock *lock, return lock; } -static int remove_empty_dir_recursive(char *path, int len) -{ - DIR *dir = opendir(path); - struct dirent *e; - int ret = 0; - - if (!dir) - return -1; - if (path[len-1] != '/') - path[len++] = '/'; - while ((e = readdir(dir)) != NULL) { - struct stat st; - int namlen; - if ((e->d_name[0] == '.') && - ((e->d_name[1] == 0) || - ((e->d_name[1] == '.') && e->d_name[2] == 0))) - continue; /* "." and ".." */ - - namlen = strlen(e->d_name); - if ((len + namlen < PATH_MAX) && - strcpy(path + len, e->d_name) && - !lstat(path, &st) && - S_ISDIR(st.st_mode) && - !remove_empty_dir_recursive(path, len + namlen)) - continue; /* happy */ - - /* path too long, stat fails, or non-directory still exists */ - ret = -1; - break; - } - closedir(dir); - if (!ret) { - path[len] = 0; - ret = rmdir(path); - } - return ret; -} - -static int remove_empty_directories(char *file) +static int remove_empty_directories(const char *file) { /* we want to create a file but there is a directory there; * if that is an empty directory (or a directory that contains * only empty directories), remove them. */ - char path[PATH_MAX]; - int len = strlen(file); + struct strbuf path; + int result; - if (len >= PATH_MAX) /* path too long ;-) */ - return -1; - strcpy(path, file); - return remove_empty_dir_recursive(path, len); + strbuf_init(&path, 20); + strbuf_addstr(&path, file); + + result = remove_dir_recursively(&path, 1); + + strbuf_release(&path); + + return result; } static int is_refname_available(const char *ref, const char *oldref, @@ -5,6 +5,12 @@ static struct remote **remotes; static int allocated_remotes; +static struct branch **branches; +static int allocated_branches; + +static struct branch *current_branch; +static const char *default_remote_name; + #define BUF_SIZE (2048) static char buffer[BUF_SIZE]; @@ -26,13 +32,13 @@ static void add_fetch_refspec(struct remote *remote, const char *ref) remote->fetch_refspec_nr = nr; } -static void add_uri(struct remote *remote, const char *uri) +static void add_url(struct remote *remote, const char *url) { - int nr = remote->uri_nr + 1; - remote->uri = - xrealloc(remote->uri, nr * sizeof(char *)); - remote->uri[nr-1] = uri; - remote->uri_nr = nr; + int nr = remote->url_nr + 1; + remote->url = + xrealloc(remote->url, nr * sizeof(char *)); + remote->url[nr-1] = url; + remote->url_nr = nr; } static struct remote *make_remote(const char *name, int len) @@ -67,6 +73,54 @@ static struct remote *make_remote(const char *name, int len) return remotes[empty]; } +static void add_merge(struct branch *branch, const char *name) +{ + int nr = branch->merge_nr + 1; + branch->merge_name = + xrealloc(branch->merge_name, nr * sizeof(char *)); + branch->merge_name[nr-1] = name; + branch->merge_nr = nr; +} + +static struct branch *make_branch(const char *name, int len) +{ + int i, empty = -1; + char *refname; + + for (i = 0; i < allocated_branches; i++) { + if (!branches[i]) { + if (empty < 0) + empty = i; + } else { + if (len ? (!strncmp(name, branches[i]->name, len) && + !branches[i]->name[len]) : + !strcmp(name, branches[i]->name)) + return branches[i]; + } + } + + if (empty < 0) { + empty = allocated_branches; + allocated_branches += allocated_branches ? allocated_branches : 1; + branches = xrealloc(branches, + sizeof(*branches) * allocated_branches); + memset(branches + empty, 0, + (allocated_branches - empty) * sizeof(*branches)); + } + branches[empty] = xcalloc(1, sizeof(struct branch)); + if (len) + branches[empty]->name = xstrndup(name, len); + else + branches[empty]->name = xstrdup(name); + refname = malloc(strlen(name) + strlen("refs/heads/") + 1); + strcpy(refname, "refs/heads/"); + strcpy(refname + strlen("refs/heads/"), + branches[empty]->name); + branches[empty]->refname = refname; + + return branches[empty]; +} + static void read_remotes_file(struct remote *remote) { FILE *f = fopen(git_path("remotes/%s", remote->name), "r"); @@ -100,7 +154,7 @@ static void read_remotes_file(struct remote *remote) switch (value_list) { case 0: - add_uri(remote, xstrdup(s)); + add_url(remote, xstrdup(s)); break; case 1: add_push_refspec(remote, xstrdup(s)); @@ -116,6 +170,8 @@ static void read_remotes_file(struct remote *remote) static void read_branches_file(struct remote *remote) { const char *slash = strchr(remote->name, '/'); + char *frag; + char *branch; int n = slash ? slash - remote->name : 1000; FILE *f = fopen(git_path("branches/%.*s", n, remote->name), "r"); char *s, *p; @@ -141,23 +197,41 @@ static void read_branches_file(struct remote *remote) strcpy(p, s); if (slash) strcat(p, slash); - add_uri(remote, p); + frag = strchr(p, '#'); + if (frag) { + *(frag++) = '\0'; + branch = xmalloc(strlen(frag) + 12); + strcpy(branch, "refs/heads/"); + strcat(branch, frag); + } else { + branch = "refs/heads/master"; + } + add_url(remote, p); + add_fetch_refspec(remote, branch); + remote->fetch_tags = 1; /* always auto-follow */ } -static char *default_remote_name = NULL; -static const char *current_branch = NULL; -static int current_branch_len = 0; - static int handle_config(const char *key, const char *value) { const char *name; const char *subkey; struct remote *remote; - if (!prefixcmp(key, "branch.") && current_branch && - !strncmp(key + 7, current_branch, current_branch_len) && - !strcmp(key + 7 + current_branch_len, ".remote")) { - free(default_remote_name); - default_remote_name = xstrdup(value); + struct branch *branch; + if (!prefixcmp(key, "branch.")) { + name = key + 7; + subkey = strrchr(name, '.'); + branch = make_branch(name, subkey - name); + if (!subkey) + return 0; + if (!value) + return 0; + if (!strcmp(subkey, ".remote")) { + branch->remote_name = xstrdup(value); + if (branch == current_branch) + default_remote_name = branch->remote_name; + } else if (!strcmp(subkey, ".merge")) + add_merge(branch, xstrdup(value)); + return 0; } if (prefixcmp(key, "remote.")) return 0; @@ -186,7 +260,7 @@ static int handle_config(const char *key, const char *value) return 0; /* ignore unknown booleans */ } if (!strcmp(subkey, ".url")) { - add_uri(remote, xstrdup(value)); + add_url(remote, xstrdup(value)); } else if (!strcmp(subkey, ".push")) { add_push_refspec(remote, xstrdup(value)); } else if (!strcmp(subkey, ".fetch")) { @@ -196,6 +270,14 @@ static int handle_config(const char *key, const char *value) remote->receivepack = xstrdup(value); else error("more than one receivepack given, using the first"); + } else if (!strcmp(subkey, ".uploadpack")) { + if (!remote->uploadpack) + remote->uploadpack = xstrdup(value); + else + error("more than one uploadpack given, using the first"); + } else if (!strcmp(subkey, ".tagopt")) { + if (!strcmp(value, "--no-tags")) + remote->fetch_tags = -1; } return 0; } @@ -212,13 +294,13 @@ static void read_config(void) head_ref = resolve_ref("HEAD", sha1, 0, &flag); if (head_ref && (flag & REF_ISSYMREF) && !prefixcmp(head_ref, "refs/heads/")) { - current_branch = head_ref + strlen("refs/heads/"); - current_branch_len = strlen(current_branch); + current_branch = + make_branch(head_ref + strlen("refs/heads/"), 0); } git_config(handle_config); } -static struct refspec *parse_ref_spec(int nr_refspec, const char **refspec) +struct refspec *parse_ref_spec(int nr_refspec, const char **refspec) { int i; struct refspec *rs = xcalloc(sizeof(*rs), nr_refspec); @@ -265,14 +347,14 @@ struct remote *remote_get(const char *name) name = default_remote_name; ret = make_remote(name, 0); if (name[0] != '/') { - if (!ret->uri) + if (!ret->url) read_remotes_file(ret); - if (!ret->uri) + if (!ret->url) read_branches_file(ret); } - if (!ret->uri) - add_uri(ret, name); - if (!ret->uri) + if (!ret->url) + add_url(ret, name); + if (!ret->url) return NULL; ret->fetch = parse_ref_spec(ret->fetch_refspec_nr, ret->fetch_refspec); ret->push = parse_ref_spec(ret->push_refspec_nr, ret->push_refspec); @@ -298,16 +380,62 @@ int for_each_remote(each_remote_fn fn, void *priv) return result; } -int remote_has_uri(struct remote *remote, const char *uri) +void ref_remove_duplicates(struct ref *ref_map) +{ + struct ref **posn; + struct ref *next; + for (; ref_map; ref_map = ref_map->next) { + if (!ref_map->peer_ref) + continue; + posn = &ref_map->next; + while (*posn) { + if ((*posn)->peer_ref && + !strcmp((*posn)->peer_ref->name, + ref_map->peer_ref->name)) { + if (strcmp((*posn)->name, ref_map->name)) + die("%s tracks both %s and %s", + ref_map->peer_ref->name, + (*posn)->name, ref_map->name); + next = (*posn)->next; + free((*posn)->peer_ref); + free(*posn); + *posn = next; + } else { + posn = &(*posn)->next; + } + } + } +} + +int remote_has_url(struct remote *remote, const char *url) { int i; - for (i = 0; i < remote->uri_nr; i++) { - if (!strcmp(remote->uri[i], uri)) + for (i = 0; i < remote->url_nr; i++) { + if (!strcmp(remote->url[i], url)) return 1; } return 0; } +/* + * Returns true if, under the matching rules for fetching, name is the + * same as the given full name. + */ +static int ref_matches_abbrev(const char *name, const char *full) +{ + if (!prefixcmp(name, "refs/") || !strcmp(name, "HEAD")) + return !strcmp(name, full); + if (prefixcmp(full, "refs/")) + return 0; + if (!prefixcmp(name, "heads/") || + !prefixcmp(name, "tags/") || + !prefixcmp(name, "remotes/")) + return !strcmp(name, full + 5); + if (prefixcmp(full + 5, "heads/")) + return 0; + return !strcmp(full + 11, name); +} + int remote_find_tracking(struct remote *remote, struct refspec *refspec) { int find_src = refspec->src == NULL; @@ -315,7 +443,7 @@ int remote_find_tracking(struct remote *remote, struct refspec *refspec) int i; if (find_src) { - if (refspec->dst == NULL) + if (!refspec->dst) return error("find_tracking: need either src or dst"); needle = refspec->dst; result = &refspec->src; @@ -357,6 +485,14 @@ struct ref *alloc_ref(unsigned namelen) return ret; } +static struct ref *copy_ref(struct ref *ref) +{ + struct ref *ret = xmalloc(sizeof(struct ref) + strlen(ref->name) + 1); + memcpy(ret, ref, sizeof(struct ref) + strlen(ref->name) + 1); + ret->next = NULL; + return ret; +} + void free_refs(struct ref *ref) { struct ref *next; @@ -489,23 +625,23 @@ static int match_explicit(struct ref *src, struct ref *dst, * way to delete 'other' ref at the remote end. */ matched_src = try_explicit_object_name(rs->src); - if (matched_src) - break; - error("src refspec %s does not match any.", - rs->src); + if (!matched_src) + error("src refspec %s does not match any.", rs->src); break; default: matched_src = NULL; - error("src refspec %s matches more than one.", - rs->src); + error("src refspec %s matches more than one.", rs->src); break; } if (!matched_src) errs = 1; - if (dst_value == NULL) + if (!dst_value) { + if (!matched_src) + return errs; dst_value = matched_src->name; + } switch (count_refspec_match(dst_value, dst, &matched_dst)) { case 1: @@ -524,7 +660,7 @@ static int match_explicit(struct ref *src, struct ref *dst, dst_value); break; } - if (errs || matched_dst == NULL) + if (errs || !matched_dst) return 1; if (matched_dst->peer_ref) { errs = 1; @@ -626,8 +762,161 @@ int match_refs(struct ref *src, struct ref *dst, struct ref ***dst_tail, hashcpy(dst_peer->new_sha1, src->new_sha1); } dst_peer->peer_ref = src; + if (pat) + dst_peer->force = pat->force; free_name: free(dst_name); } return 0; } + +struct branch *branch_get(const char *name) +{ + struct branch *ret; + + read_config(); + if (!name || !*name || !strcmp(name, "HEAD")) + ret = current_branch; + else + ret = make_branch(name, 0); + if (ret && ret->remote_name) { + ret->remote = remote_get(ret->remote_name); + if (ret->merge_nr) { + int i; + ret->merge = xcalloc(sizeof(*ret->merge), + ret->merge_nr); + for (i = 0; i < ret->merge_nr; i++) { + ret->merge[i] = xcalloc(1, sizeof(**ret->merge)); + ret->merge[i]->src = xstrdup(ret->merge_name[i]); + remote_find_tracking(ret->remote, + ret->merge[i]); + } + } + } + return ret; +} + +int branch_has_merge_config(struct branch *branch) +{ + return branch && !!branch->merge; +} + +int branch_merge_matches(struct branch *branch, + int i, + const char *refname) +{ + if (!branch || i < 0 || i >= branch->merge_nr) + return 0; + return ref_matches_abbrev(branch->merge[i]->src, refname); +} + +static struct ref *get_expanded_map(struct ref *remote_refs, + const struct refspec *refspec) +{ + struct ref *ref; + struct ref *ret = NULL; + struct ref **tail = &ret; + + int remote_prefix_len = strlen(refspec->src); + int local_prefix_len = strlen(refspec->dst); + + for (ref = remote_refs; ref; ref = ref->next) { + if (strchr(ref->name, '^')) + continue; /* a dereference item */ + if (!prefixcmp(ref->name, refspec->src)) { + char *match; + struct ref *cpy = copy_ref(ref); + match = ref->name + remote_prefix_len; + + cpy->peer_ref = alloc_ref(local_prefix_len + + strlen(match) + 1); + sprintf(cpy->peer_ref->name, "%s%s", + refspec->dst, match); + if (refspec->force) + cpy->peer_ref->force = 1; + *tail = cpy; + tail = &cpy->next; + } + } + + return ret; +} + +static struct ref *find_ref_by_name_abbrev(struct ref *refs, const char *name) +{ + struct ref *ref; + for (ref = refs; ref; ref = ref->next) { + if (ref_matches_abbrev(name, ref->name)) + return ref; + } + return NULL; +} + +struct ref *get_remote_ref(struct ref *remote_refs, const char *name) +{ + struct ref *ref = find_ref_by_name_abbrev(remote_refs, name); + + if (!ref) + return NULL; + + return copy_ref(ref); +} + +static struct ref *get_local_ref(const char *name) +{ + struct ref *ret; + if (!name) + return NULL; + + if (!prefixcmp(name, "refs/")) { + ret = alloc_ref(strlen(name) + 1); + strcpy(ret->name, name); + return ret; + } + + if (!prefixcmp(name, "heads/") || + !prefixcmp(name, "tags/") || + !prefixcmp(name, "remotes/")) { + ret = alloc_ref(strlen(name) + 6); + sprintf(ret->name, "refs/%s", name); + return ret; + } + + ret = alloc_ref(strlen(name) + 12); + sprintf(ret->name, "refs/heads/%s", name); + return ret; +} + +int get_fetch_map(struct ref *remote_refs, + const struct refspec *refspec, + struct ref ***tail, + int missing_ok) +{ + struct ref *ref_map, *rm; + + if (refspec->pattern) { + ref_map = get_expanded_map(remote_refs, refspec); + } else { + const char *name = refspec->src[0] ? refspec->src : "HEAD"; + + ref_map = get_remote_ref(remote_refs, name); + if (!missing_ok && !ref_map) + die("Couldn't find remote ref %s", name); + if (ref_map) { + ref_map->peer_ref = get_local_ref(refspec->dst); + if (ref_map->peer_ref && refspec->force) + ref_map->peer_ref->force = 1; + } + } + + for (rm = ref_map; rm; rm = rm->next) { + if (rm->peer_ref && check_ref_format(rm->peer_ref->name + 5)) + die("* refusing to create funny ref '%s' locally", + rm->peer_ref->name); + } + + if (ref_map) + tail_link_ref(ref_map, tail); + + return 0; +} @@ -4,8 +4,8 @@ struct remote { const char *name; - const char **uri; - int uri_nr; + const char **url; + int url_nr; const char **push_refspec; struct refspec *push; @@ -15,7 +15,16 @@ struct remote { struct refspec *fetch; int fetch_refspec_nr; + /* + * -1 to never fetch tags + * 0 to auto-follow tags on heuristic (default) + * 1 to always auto-follow tags + * 2 to always fetch tags + */ + int fetch_tags; + const char *receivepack; + const char *uploadpack; }; struct remote *remote_get(const char *name); @@ -23,7 +32,7 @@ struct remote *remote_get(const char *name); typedef int each_remote_fn(struct remote *remote, void *priv); int for_each_remote(each_remote_fn fn, void *priv); -int remote_has_uri(struct remote *remote, const char *uri); +int remote_has_url(struct remote *remote, const char *url); struct refspec { unsigned force : 1; @@ -40,12 +49,53 @@ struct ref *alloc_ref(unsigned namelen); */ void free_refs(struct ref *ref); +/* + * Removes and frees any duplicate refs in the map. + */ +void ref_remove_duplicates(struct ref *ref_map); + +struct refspec *parse_ref_spec(int nr_refspec, const char **refspec); + int match_refs(struct ref *src, struct ref *dst, struct ref ***dst_tail, int nr_refspec, char **refspec, int all); /* + * Given a list of the remote refs and the specification of things to + * fetch, makes a (separate) list of the refs to fetch and the local + * refs to store into. + * + * *tail is the pointer to the tail pointer of the list of results + * beforehand, and will be set to the tail pointer of the list of + * results afterward. + * + * missing_ok is usually false, but when we are adding branch.$name.merge + * it is Ok if the branch is not at the remote anymore. + */ +int get_fetch_map(struct ref *remote_refs, const struct refspec *refspec, + struct ref ***tail, int missing_ok); + +struct ref *get_remote_ref(struct ref *remote_refs, const char *name); + +/* * For the given remote, reads the refspec's src and sets the other fields. */ int remote_find_tracking(struct remote *remote, struct refspec *refspec); +struct branch { + const char *name; + const char *refname; + + const char *remote_name; + struct remote *remote; + + const char **merge_name; + struct refspec **merge; + int merge_nr; +}; + +struct branch *branch_get(const char *name); + +int branch_has_merge_config(struct branch *branch); +int branch_merge_matches(struct branch *, int n, const char *); + #endif diff --git a/rsh.c b/rsh.c deleted file mode 100644 index 016d72ead7..0000000000 --- a/rsh.c +++ /dev/null @@ -1,79 +0,0 @@ -#include "cache.h" -#include "rsh.h" -#include "quote.h" - -#define COMMAND_SIZE 4096 - -int setup_connection(int *fd_in, int *fd_out, const char *remote_prog, - char *url, int rmt_argc, char **rmt_argv) -{ - char *host; - char *path; - int sv[2]; - int i; - pid_t pid; - struct strbuf cmd; - - if (!strcmp(url, "-")) { - *fd_in = 0; - *fd_out = 1; - return 0; - } - - host = strstr(url, "//"); - if (host) { - host += 2; - path = strchr(host, '/'); - } else { - host = url; - path = strchr(host, ':'); - if (path) - *(path++) = '\0'; - } - if (!path) { - return error("Bad URL: %s", url); - } - - /* $GIT_RSH <host> "env GIT_DIR=<path> <remote_prog> <args...>" */ - strbuf_init(&cmd, COMMAND_SIZE); - strbuf_addstr(&cmd, "env "); - strbuf_addstr(&cmd, GIT_DIR_ENVIRONMENT "="); - sq_quote_buf(&cmd, path); - strbuf_addch(&cmd, ' '); - sq_quote_buf(&cmd, remote_prog); - - for (i = 0 ; i < rmt_argc ; i++) { - strbuf_addch(&cmd, ' '); - sq_quote_buf(&cmd, rmt_argv[i]); - } - - strbuf_addstr(&cmd, " -"); - - if (cmd.len >= COMMAND_SIZE) - return error("Command line too long"); - - if (socketpair(AF_UNIX, SOCK_STREAM, 0, sv)) - return error("Couldn't create socket"); - - pid = fork(); - if (pid < 0) - return error("Couldn't fork"); - if (!pid) { - const char *ssh, *ssh_basename; - ssh = getenv("GIT_SSH"); - if (!ssh) ssh = "ssh"; - ssh_basename = strrchr(ssh, '/'); - if (!ssh_basename) - ssh_basename = ssh; - else - ssh_basename++; - close(sv[1]); - dup2(sv[0], 0); - dup2(sv[0], 1); - execlp(ssh, ssh_basename, host, cmd.buf, NULL); - } - close(sv[0]); - *fd_in = sv[1]; - *fd_out = sv[1]; - return 0; -} diff --git a/rsh.h b/rsh.h deleted file mode 100644 index ee2f499291..0000000000 --- a/rsh.h +++ /dev/null @@ -1,7 +0,0 @@ -#ifndef RSH_H -#define RSH_H - -int setup_connection(int *fd_in, int *fd_out, const char *remote_prog, - char *url, int rmt_argc, char **rmt_argv); - -#endif diff --git a/run-command.c b/run-command.c index 7e779d33ee..d99a6c4ea7 100644 --- a/run-command.c +++ b/run-command.c @@ -17,8 +17,8 @@ static inline void dup_devnull(int to) int start_command(struct child_process *cmd) { - int need_in, need_out; - int fdin[2], fdout[2]; + int need_in, need_out, need_err; + int fdin[2], fdout[2], fderr[2]; need_in = !cmd->no_stdin && cmd->in < 0; if (need_in) { @@ -41,12 +41,26 @@ int start_command(struct child_process *cmd) cmd->close_out = 1; } + need_err = cmd->err < 0; + if (need_err) { + if (pipe(fderr) < 0) { + if (need_in) + close_pair(fdin); + if (need_out) + close_pair(fdout); + return -ERR_RUN_COMMAND_PIPE; + } + cmd->err = fderr[0]; + } + cmd->pid = fork(); if (cmd->pid < 0) { if (need_in) close_pair(fdin); if (need_out) close_pair(fdout); + if (need_err) + close_pair(fderr); return -ERR_RUN_COMMAND_FORK; } @@ -73,6 +87,11 @@ int start_command(struct child_process *cmd) close(cmd->out); } + if (need_err) { + dup2(fderr[1], 2); + close_pair(fderr); + } + if (cmd->dir && chdir(cmd->dir)) die("exec %s: cd to %s failed (%s)", cmd->argv[0], cmd->dir, strerror(errno)); @@ -102,19 +121,17 @@ int start_command(struct child_process *cmd) else if (cmd->out > 1) close(cmd->out); + if (need_err) + close(fderr[1]); + return 0; } -int finish_command(struct child_process *cmd) +static int wait_or_whine(pid_t pid) { - if (cmd->close_in) - close(cmd->in); - if (cmd->close_out) - close(cmd->out); - for (;;) { int status, code; - pid_t waiting = waitpid(cmd->pid, &status, 0); + pid_t waiting = waitpid(pid, &status, 0); if (waiting < 0) { if (errno == EINTR) @@ -122,7 +139,7 @@ int finish_command(struct child_process *cmd) error("waitpid failed (%s)", strerror(errno)); return -ERR_RUN_COMMAND_WAITPID; } - if (waiting != cmd->pid) + if (waiting != pid) return -ERR_RUN_COMMAND_WAITPID_WRONG_PID; if (WIFSIGNALED(status)) return -ERR_RUN_COMMAND_WAITPID_SIGNAL; @@ -136,6 +153,15 @@ int finish_command(struct child_process *cmd) } } +int finish_command(struct child_process *cmd) +{ + if (cmd->close_in) + close(cmd->in); + if (cmd->close_out) + close(cmd->out); + return wait_or_whine(cmd->pid); +} + int run_command(struct child_process *cmd) { int code = start_command(cmd); @@ -178,3 +204,34 @@ int run_command_v_opt_cd_env(const char **argv, int opt, const char *dir, const cmd.env = env; return run_command(&cmd); } + +int start_async(struct async *async) +{ + int pipe_out[2]; + + if (pipe(pipe_out) < 0) + return error("cannot create pipe: %s", strerror(errno)); + + async->pid = fork(); + if (async->pid < 0) { + error("fork (async) failed: %s", strerror(errno)); + close_pair(pipe_out); + return -1; + } + if (!async->pid) { + close(pipe_out[0]); + exit(!!async->proc(pipe_out[1], async->data)); + } + async->out = pipe_out[0]; + close(pipe_out[1]); + return 0; +} + +int finish_async(struct async *async) +{ + int ret = 0; + + if (wait_or_whine(async->pid)) + ret = error("waitpid (async) failed"); + return ret; +} diff --git a/run-command.h b/run-command.h index 7958eb1e0b..94e1e9d516 100644 --- a/run-command.h +++ b/run-command.h @@ -16,6 +16,7 @@ struct child_process { pid_t pid; int in; int out; + int err; const char *dir; const char *const *env; unsigned close_in:1; @@ -42,4 +43,26 @@ int run_command_v_opt_cd(const char **argv, int opt, const char *dir); */ int run_command_v_opt_cd_env(const char **argv, int opt, const char *dir, const char *const *env); +/* + * The purpose of the following functions is to feed a pipe by running + * a function asynchronously and providing output that the caller reads. + * + * It is expected that no synchronization and mutual exclusion between + * the caller and the feed function is necessary so that the function + * can run in a thread without interfering with the caller. + */ +struct async { + /* + * proc writes to fd and closes it; + * returns 0 on success, non-zero on failure + */ + int (*proc)(int fd, void *data); + void *data; + int out; /* caller reads from here and closes it */ + pid_t pid; +}; + +int start_async(struct async *async); +int finish_async(struct async *async); + #endif diff --git a/send-pack.c b/send-pack.c index c1807f0794..5e127a1b7b 100644 --- a/send-pack.c +++ b/send-pack.c @@ -178,6 +178,35 @@ static int receive_status(int in) return ret; } +static void update_tracking_ref(struct remote *remote, struct ref *ref) +{ + struct refspec rs; + int will_delete_ref; + + rs.src = ref->name; + rs.dst = NULL; + + if (!ref->peer_ref) + return; + + will_delete_ref = is_null_sha1(ref->peer_ref->new_sha1); + + if (!will_delete_ref && + !hashcmp(ref->old_sha1, ref->peer_ref->new_sha1)) + return; + + if (!remote_find_tracking(remote, &rs)) { + fprintf(stderr, "updating local tracking ref '%s'\n", rs.dst); + if (is_null_sha1(ref->peer_ref->new_sha1)) { + if (delete_ref(rs.dst, NULL)) + error("Failed to delete"); + } else + update_ref("update by push", rs.dst, + ref->new_sha1, NULL, 0, 0); + free(rs.dst); + } +} + static int send_pack(int in, int out, struct remote *remote, int nr_refspec, char **refspec) { struct ref *ref; @@ -306,22 +335,6 @@ static int send_pack(int in, int out, struct remote *remote, int nr_refspec, cha fprintf(stderr, "\n from %s\n to %s\n", old_hex, new_hex); } - if (remote && !dry_run) { - struct refspec rs; - rs.src = ref->name; - rs.dst = NULL; - if (!remote_find_tracking(remote, &rs)) { - fprintf(stderr, " Also local %s\n", rs.dst); - if (will_delete_ref) { - if (delete_ref(rs.dst, NULL)) { - error("Failed to delete"); - } - } else - update_ref("update by push", rs.dst, - ref->new_sha1, NULL, 0, 0); - free(rs.dst); - } - } } packet_flush(out); @@ -334,6 +347,11 @@ static int send_pack(int in, int out, struct remote *remote, int nr_refspec, cha ret = -4; } + if (!dry_run && remote && ret == 0) { + for (ref = remote_refs; ref; ref = ref->next) + update_tracking_ref(remote, ref); + } + if (!new_refs && ret == 0) fprintf(stderr, "Everything up-to-date\n"); return ret; @@ -366,7 +384,7 @@ int main(int argc, char **argv) char *dest = NULL; char **heads = NULL; int fd[2], ret; - pid_t pid; + struct child_process *conn; char *remote_name = NULL; struct remote *remote = NULL; @@ -428,18 +446,16 @@ int main(int argc, char **argv) if (remote_name) { remote = remote_get(remote_name); - if (!remote_has_uri(remote, dest)) { + if (!remote_has_url(remote, dest)) { die("Destination %s is not a uri for %s", dest, remote_name); } } - pid = git_connect(fd, dest, receivepack, verbose ? CONNECT_VERBOSE : 0); - if (pid < 0) - return 1; + conn = git_connect(fd, dest, receivepack, verbose ? CONNECT_VERBOSE : 0); ret = send_pack(fd[0], fd[1], remote, nr_heads, heads); close(fd[0]); close(fd[1]); - ret |= finish_connect(pid); + ret |= finish_connect(conn); return !!ret; } diff --git a/sha1_file.c b/sha1_file.c index 83a06a7aed..f007874cbb 100644 --- a/sha1_file.c +++ b/sha1_file.c @@ -521,13 +521,15 @@ static int check_packed_git_idx(const char *path, struct packed_git *p) munmap(idx_map, idx_size); return error("wrong index v2 file size in %s", path); } - if (idx_size != min_size) { - /* make sure we can deal with large pack offsets */ - off_t x = 0x7fffffffUL, y = 0xffffffffUL; - if (x > (x + 1) || y > (y + 1)) { - munmap(idx_map, idx_size); - return error("pack too large for current definition of off_t in %s", path); - } + if (idx_size != min_size && + /* + * make sure we can deal with large pack offsets. + * 31-bit signed offset won't be enough, neither + * 32-bit unsigned one will be. + */ + (sizeof(off_t) <= 4)) { + munmap(idx_map, idx_size); + return error("pack too large for current definition of off_t in %s", path); } } @@ -24,17 +24,11 @@ static int do_cvs_cmd(const char *me, char *arg) const char *cvsserver_argv[3] = { "cvsserver", "server", NULL }; - const char *oldpath = getenv("PATH"); - struct strbuf newpath = STRBUF_INIT; if (!arg || strcmp(arg, "server")) die("git-cvsserver only handles server: %s", arg); - strbuf_addstr(&newpath, git_exec_path()); - strbuf_addch(&newpath, ':'); - strbuf_addstr(&newpath, oldpath); - - setenv("PATH", strbuf_detach(&newpath, NULL), 1); + setup_path(NULL); return execv_git_cmd(cvsserver_argv); } diff --git a/show-index.c b/show-index.c index 57ed9e87b7..7253991fff 100644 --- a/show-index.c +++ b/show-index.c @@ -68,7 +68,7 @@ int main(int argc, char **argv) ntohl(off64[1]); off64_nr++; } - printf("%llu %s (%08x)\n", (unsigned long long) offset, + printf("%" PRIuMAX " %s (%08x)\n", (uintmax_t) offset, sha1_to_hex(entries[i].sha1), ntohl(entries[i].crc)); } diff --git a/ssh-fetch.c b/ssh-fetch.c deleted file mode 100644 index bdf51a7a14..0000000000 --- a/ssh-fetch.c +++ /dev/null @@ -1,166 +0,0 @@ -#ifndef COUNTERPART_ENV_NAME -#define COUNTERPART_ENV_NAME "GIT_SSH_UPLOAD" -#endif -#ifndef COUNTERPART_PROGRAM_NAME -#define COUNTERPART_PROGRAM_NAME "git-ssh-upload" -#endif -#ifndef MY_PROGRAM_NAME -#define MY_PROGRAM_NAME "git-ssh-fetch" -#endif - -#include "cache.h" -#include "commit.h" -#include "rsh.h" -#include "fetch.h" -#include "refs.h" - -static int fd_in; -static int fd_out; - -static unsigned char remote_version; -static unsigned char local_version = 1; - -static int prefetches; - -static struct object_list *in_transit; -static struct object_list **end_of_transit = &in_transit; - -void prefetch(unsigned char *sha1) -{ - char type = 'o'; - struct object_list *node; - if (prefetches > 100) { - fetch(in_transit->item->sha1); - } - node = xmalloc(sizeof(struct object_list)); - node->next = NULL; - node->item = lookup_unknown_object(sha1); - *end_of_transit = node; - end_of_transit = &node->next; - /* XXX: what if these writes fail? */ - write_in_full(fd_out, &type, 1); - write_in_full(fd_out, sha1, 20); - prefetches++; -} - -static char conn_buf[4096]; -static size_t conn_buf_posn; - -int fetch(unsigned char *sha1) -{ - int ret; - signed char remote; - struct object_list *temp; - - if (hashcmp(sha1, in_transit->item->sha1)) { - /* we must have already fetched it to clean the queue */ - return has_sha1_file(sha1) ? 0 : -1; - } - prefetches--; - temp = in_transit; - in_transit = in_transit->next; - if (!in_transit) - end_of_transit = &in_transit; - free(temp); - - if (conn_buf_posn) { - remote = conn_buf[0]; - memmove(conn_buf, conn_buf + 1, --conn_buf_posn); - } else { - if (xread(fd_in, &remote, 1) < 1) - return -1; - } - /* fprintf(stderr, "Got %d\n", remote); */ - if (remote < 0) - return remote; - ret = write_sha1_from_fd(sha1, fd_in, conn_buf, 4096, &conn_buf_posn); - if (!ret) - pull_say("got %s\n", sha1_to_hex(sha1)); - return ret; -} - -static int get_version(void) -{ - char type = 'v'; - if (write_in_full(fd_out, &type, 1) != 1 || - write_in_full(fd_out, &local_version, 1)) { - return error("Couldn't request version from remote end"); - } - if (xread(fd_in, &remote_version, 1) < 1) { - return error("Couldn't read version from remote end"); - } - return 0; -} - -int fetch_ref(char *ref, unsigned char *sha1) -{ - signed char remote; - char type = 'r'; - int length = strlen(ref) + 1; - if (write_in_full(fd_out, &type, 1) != 1 || - write_in_full(fd_out, ref, length) != length) - return -1; - - if (read_in_full(fd_in, &remote, 1) != 1) - return -1; - if (remote < 0) - return remote; - if (read_in_full(fd_in, sha1, 20) != 20) - return -1; - return 0; -} - -static const char ssh_fetch_usage[] = - MY_PROGRAM_NAME - " [-c] [-t] [-a] [-v] [--recover] [-w ref] commit-id url"; -int main(int argc, char **argv) -{ - const char *write_ref = NULL; - char *commit_id; - char *url; - int arg = 1; - const char *prog; - - prog = getenv("GIT_SSH_PUSH"); - if (!prog) prog = "git-ssh-upload"; - - setup_git_directory(); - git_config(git_default_config); - - while (arg < argc && argv[arg][0] == '-') { - if (argv[arg][1] == 't') { - get_tree = 1; - } else if (argv[arg][1] == 'c') { - get_history = 1; - } else if (argv[arg][1] == 'a') { - get_all = 1; - get_tree = 1; - get_history = 1; - } else if (argv[arg][1] == 'v') { - get_verbosely = 1; - } else if (argv[arg][1] == 'w') { - write_ref = argv[arg + 1]; - arg++; - } else if (!strcmp(argv[arg], "--recover")) { - get_recover = 1; - } - arg++; - } - if (argc < arg + 2) { - usage(ssh_fetch_usage); - return 1; - } - commit_id = argv[arg]; - url = argv[arg + 1]; - - if (setup_connection(&fd_in, &fd_out, prog, url, arg, argv + 1)) - return 1; - - if (get_version()) - return 1; - - if (pull(1, &commit_id, &write_ref, url)) - return 1; - - return 0; -} diff --git a/ssh-pull.c b/ssh-pull.c deleted file mode 100644 index 868ce4d41f..0000000000 --- a/ssh-pull.c +++ /dev/null @@ -1,4 +0,0 @@ -#define COUNTERPART_ENV_NAME "GIT_SSH_PUSH" -#define COUNTERPART_PROGRAM_NAME "git-ssh-push" -#define MY_PROGRAM_NAME "git-ssh-pull" -#include "ssh-fetch.c" diff --git a/ssh-push.c b/ssh-push.c deleted file mode 100644 index a562df1b31..0000000000 --- a/ssh-push.c +++ /dev/null @@ -1,4 +0,0 @@ -#define COUNTERPART_ENV_NAME "GIT_SSH_PULL" -#define COUNTERPART_PROGRAM_NAME "git-ssh-pull" -#define MY_PROGRAM_NAME "git-ssh-push" -#include "ssh-upload.c" diff --git a/ssh-upload.c b/ssh-upload.c deleted file mode 100644 index 20c35f03dd..0000000000 --- a/ssh-upload.c +++ /dev/null @@ -1,143 +0,0 @@ -#ifndef COUNTERPART_ENV_NAME -#define COUNTERPART_ENV_NAME "GIT_SSH_FETCH" -#endif -#ifndef COUNTERPART_PROGRAM_NAME -#define COUNTERPART_PROGRAM_NAME "git-ssh-fetch" -#endif -#ifndef MY_PROGRAM_NAME -#define MY_PROGRAM_NAME "git-ssh-upload" -#endif - -#include "cache.h" -#include "rsh.h" -#include "refs.h" - -static unsigned char local_version = 1; -static unsigned char remote_version; - -static int verbose; - -static int serve_object(int fd_in, int fd_out) { - ssize_t size; - unsigned char sha1[20]; - signed char remote; - - size = read_in_full(fd_in, sha1, 20); - if (size < 0) { - perror("git-ssh-upload: read "); - return -1; - } - if (!size) - return -1; - - if (verbose) - fprintf(stderr, "Serving %s\n", sha1_to_hex(sha1)); - - remote = 0; - - if (!has_sha1_file(sha1)) { - fprintf(stderr, "git-ssh-upload: could not find %s\n", - sha1_to_hex(sha1)); - remote = -1; - } - - if (write_in_full(fd_out, &remote, 1) != 1) - return 0; - - if (remote < 0) - return 0; - - return write_sha1_to_fd(fd_out, sha1); -} - -static int serve_version(int fd_in, int fd_out) -{ - if (xread(fd_in, &remote_version, 1) < 1) - return -1; - write_in_full(fd_out, &local_version, 1); - return 0; -} - -static int serve_ref(int fd_in, int fd_out) -{ - char ref[PATH_MAX]; - unsigned char sha1[20]; - int posn = 0; - signed char remote = 0; - do { - if (posn >= PATH_MAX || xread(fd_in, ref + posn, 1) < 1) - return -1; - posn++; - } while (ref[posn - 1]); - - if (verbose) - fprintf(stderr, "Serving %s\n", ref); - - if (get_ref_sha1(ref, sha1)) - remote = -1; - if (write_in_full(fd_out, &remote, 1) != 1) - return 0; - if (remote) - return 0; - write_in_full(fd_out, sha1, 20); - return 0; -} - - -static void service(int fd_in, int fd_out) { - char type; - ssize_t retval; - do { - retval = xread(fd_in, &type, 1); - if (retval < 1) { - if (retval < 0) - perror("git-ssh-upload: read "); - return; - } - if (type == 'v' && serve_version(fd_in, fd_out)) - return; - if (type == 'o' && serve_object(fd_in, fd_out)) - return; - if (type == 'r' && serve_ref(fd_in, fd_out)) - return; - } while (1); -} - -static const char ssh_push_usage[] = - MY_PROGRAM_NAME " [-c] [-t] [-a] [-w ref] commit-id url"; - -int main(int argc, char **argv) -{ - int arg = 1; - char *commit_id; - char *url; - int fd_in, fd_out; - const char *prog; - unsigned char sha1[20]; - char hex[41]; - - prog = getenv(COUNTERPART_ENV_NAME); - if (!prog) prog = COUNTERPART_PROGRAM_NAME; - - setup_git_directory(); - - while (arg < argc && argv[arg][0] == '-') { - if (argv[arg][1] == 'w') - arg++; - arg++; - } - if (argc < arg + 2) - usage(ssh_push_usage); - commit_id = argv[arg]; - url = argv[arg + 1]; - if (get_sha1(commit_id, sha1)) - die("Not a valid object name %s", commit_id); - memcpy(hex, sha1_to_hex(sha1), sizeof(hex)); - argv[arg] = hex; - - if (setup_connection(&fd_in, &fd_out, prog, url, arg, argv + 1)) - return 1; - - service(fd_in, fd_out); - return 0; -} diff --git a/t/t0020-crlf.sh b/t/t0020-crlf.sh index 0807d9f01a..62bc4bb077 100755 --- a/t/t0020-crlf.sh +++ b/t/t0020-crlf.sh @@ -371,4 +371,11 @@ test_expect_success 'in-tree .gitattributes (4)' ' } ' +test_expect_success 'invalid .gitattributes (must not crash)' ' + + echo "three +crlf" >>.gitattributes && + git diff + +' + test_done diff --git a/t/t0021-conversion.sh b/t/t0021-conversion.sh index a839f4e074..cb860296ed 100755 --- a/t/t0021-conversion.sh +++ b/t/t0021-conversion.sh @@ -42,7 +42,12 @@ test_expect_success check ' git diff --raw --exit-code :test :test.i && id=$(git rev-parse --verify :test) && embedded=$(sed -ne "$script" test.i) && - test "z$id" = "z$embedded" + test "z$id" = "z$embedded" && + + git cat-file blob :test.t > test.r && + + ./rot13.sh < test.o > test.t && + cmp test.r test.t ' # If an expanded ident ever gets into the repository, we want to make sure that diff --git a/t/t5400-send-pack.sh b/t/t5400-send-pack.sh index 57c6397be1..2d0c07fd6a 100755 --- a/t/t5400-send-pack.sh +++ b/t/t5400-send-pack.sh @@ -123,4 +123,52 @@ test_expect_success \ git-branch -a >branches && ! grep -q origin/master branches ' +rewound_push_setup() { + rm -rf parent child && + mkdir parent && cd parent && + git-init && echo one >file && git-add file && git-commit -m one && + echo two >file && git-commit -a -m two && + cd .. && + git-clone parent child && cd child && git-reset --hard HEAD^ +} + +rewound_push_succeeded() { + cmp ../parent/.git/refs/heads/master .git/refs/heads/master +} + +rewound_push_failed() { + if rewound_push_succeeded + then + false + else + true + fi +} + +test_expect_success \ + 'pushing explicit refspecs respects forcing' ' + rewound_push_setup && + if git-send-pack ../parent/.git refs/heads/master:refs/heads/master + then + false + else + true + fi && rewound_push_failed && + git-send-pack ../parent/.git +refs/heads/master:refs/heads/master && + rewound_push_succeeded +' + +test_expect_success \ + 'pushing wildcard refspecs respects forcing' ' + rewound_push_setup && + if git-send-pack ../parent/.git refs/heads/*:refs/heads/* + then + false + else + true + fi && rewound_push_failed && + git-send-pack ../parent/.git +refs/heads/*:refs/heads/* && + rewound_push_succeeded +' + test_done diff --git a/t/t5510-fetch.sh b/t/t5510-fetch.sh index 439430f569..d217657146 100755 --- a/t/t5510-fetch.sh +++ b/t/t5510-fetch.sh @@ -67,6 +67,18 @@ test_expect_success "fetch test for-merge" ' cut -f -2 .git/FETCH_HEAD >actual && diff expected actual' +test_expect_success 'fetch tags when there is no tags' ' + + cd "$D" && + + mkdir notags && + cd notags && + git init && + + git fetch -t .. + +' + test_expect_success 'fetch following tags' ' cd "$D" && @@ -153,4 +165,47 @@ test_expect_success 'bundle should be able to create a full history' ' ' +test "$TEST_RSYNC" && { +test_expect_success 'fetch via rsync' ' + git pack-refs && + mkdir rsynced && + cd rsynced && + git init && + git fetch rsync://127.0.0.1$(pwd)/../.git master:refs/heads/master && + git gc --prune && + test $(git rev-parse master) = $(cd .. && git rev-parse master) && + git fsck --full +' + +test_expect_success 'push via rsync' ' + mkdir ../rsynced2 && + (cd ../rsynced2 && + git init) && + git push rsync://127.0.0.1$(pwd)/../rsynced2/.git master && + cd ../rsynced2 && + git gc --prune && + test $(git rev-parse master) = $(cd .. && git rev-parse master) && + git fsck --full +' + +test_expect_success 'push via rsync' ' + cd .. && + mkdir rsynced3 && + (cd rsynced3 && + git init) && + git push --all rsync://127.0.0.1$(pwd)/rsynced3/.git && + cd rsynced3 && + test $(git rev-parse master) = $(cd .. && git rev-parse master) && + git fsck --full +' +} + +test_expect_success 'fetch with a non-applying branch.<name>.merge' ' + git config branch.master.remote yeti && + git config branch.master.merge refs/heads/bigfoot && + git config remote.blub.url one && + git config remote.blub.fetch "refs/heads/*:refs/remotes/one/*" && + git fetch blub +' + test_done diff --git a/t/t5515-fetch-merge-logic.sh b/t/t5515-fetch-merge-logic.sh index 6c9cc67508..31c1081617 100755 --- a/t/t5515-fetch-merge-logic.sh +++ b/t/t5515-fetch-merge-logic.sh @@ -84,8 +84,7 @@ test_expect_success setup ' git config branch.br-$remote-merge.merge refs/heads/three && git config branch.br-$remote-octopus.remote $remote && git config branch.br-$remote-octopus.merge refs/heads/one && - git config --add branch.br-$remote-octopus.merge two && - git config --add branch.br-$remote-octopus.merge remotes/rem/three + git config --add branch.br-$remote-octopus.merge two done ' diff --git a/t/t5515/fetch.br-branches-default-merge b/t/t5515/fetch.br-branches-default-merge index ea65f31bde..ca2cc1d1b4 100644 --- a/t/t5515/fetch.br-branches-default-merge +++ b/t/t5515/fetch.br-branches-default-merge @@ -1,5 +1,6 @@ # br-branches-default-merge -754b754407bf032e9a2f9d5a9ad05ca79a6b228f branch 'master' of ../ +754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../ +0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-branches-default-merge_branches-default b/t/t5515/fetch.br-branches-default-merge_branches-default index 7b5fa949e6..7d947cd80f 100644 --- a/t/t5515/fetch.br-branches-default-merge_branches-default +++ b/t/t5515/fetch.br-branches-default-merge_branches-default @@ -1,5 +1,6 @@ # br-branches-default-merge branches-default -754b754407bf032e9a2f9d5a9ad05ca79a6b228f branch 'master' of ../ +754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../ +0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-branches-default-octopus b/t/t5515/fetch.br-branches-default-octopus index 128397d737..ec39c54b7e 100644 --- a/t/t5515/fetch.br-branches-default-octopus +++ b/t/t5515/fetch.br-branches-default-octopus @@ -1,5 +1,7 @@ # br-branches-default-octopus -754b754407bf032e9a2f9d5a9ad05ca79a6b228f branch 'master' of ../ +754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../ +8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ +6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-branches-default-octopus_branches-default b/t/t5515/fetch.br-branches-default-octopus_branches-default index 4b37cd481a..6bf42e24b6 100644 --- a/t/t5515/fetch.br-branches-default-octopus_branches-default +++ b/t/t5515/fetch.br-branches-default-octopus_branches-default @@ -1,5 +1,7 @@ # br-branches-default-octopus branches-default -754b754407bf032e9a2f9d5a9ad05ca79a6b228f branch 'master' of ../ +754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../ +8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ +6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-branches-one-merge b/t/t5515/fetch.br-branches-one-merge index 3a4e77ead5..b4b3b35ce0 100644 --- a/t/t5515/fetch.br-branches-one-merge +++ b/t/t5515/fetch.br-branches-one-merge @@ -1,5 +1,6 @@ # br-branches-one-merge -8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ +8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../ +0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-branches-one-merge_branches-one b/t/t5515/fetch.br-branches-one-merge_branches-one index 00e04b435e..2ecef384eb 100644 --- a/t/t5515/fetch.br-branches-one-merge_branches-one +++ b/t/t5515/fetch.br-branches-one-merge_branches-one @@ -1,5 +1,6 @@ # br-branches-one-merge branches-one -8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ +8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../ +0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-branches-one-octopus b/t/t5515/fetch.br-branches-one-octopus index 53fe808a3b..96e3029416 100644 --- a/t/t5515/fetch.br-branches-one-octopus +++ b/t/t5515/fetch.br-branches-one-octopus @@ -1,5 +1,6 @@ # br-branches-one-octopus 8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ +6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-branches-one-octopus_branches-one b/t/t5515/fetch.br-branches-one-octopus_branches-one index 41b18ff78a..55e0bad621 100644 --- a/t/t5515/fetch.br-branches-one-octopus_branches-one +++ b/t/t5515/fetch.br-branches-one-octopus_branches-one @@ -1,5 +1,6 @@ # br-branches-one-octopus branches-one 8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ +6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-config-glob-octopus b/t/t5515/fetch.br-config-glob-octopus index 9ee213ea45..938e532db2 100644 --- a/t/t5515/fetch.br-config-glob-octopus +++ b/t/t5515/fetch.br-config-glob-octopus @@ -2,7 +2,7 @@ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ 0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../ -6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../ +6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-config-glob-octopus_config-glob b/t/t5515/fetch.br-config-glob-octopus_config-glob index 44bd0ec59f..c9225bf6ff 100644 --- a/t/t5515/fetch.br-config-glob-octopus_config-glob +++ b/t/t5515/fetch.br-config-glob-octopus_config-glob @@ -2,7 +2,7 @@ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ 0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../ -6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../ +6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-remote-glob-octopus b/t/t5515/fetch.br-remote-glob-octopus index c1554f8f2d..b08e046195 100644 --- a/t/t5515/fetch.br-remote-glob-octopus +++ b/t/t5515/fetch.br-remote-glob-octopus @@ -2,7 +2,7 @@ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ 0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../ -6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../ +6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5515/fetch.br-remote-glob-octopus_remote-glob b/t/t5515/fetch.br-remote-glob-octopus_remote-glob index e6134345b8..d4d547c847 100644 --- a/t/t5515/fetch.br-remote-glob-octopus_remote-glob +++ b/t/t5515/fetch.br-remote-glob-octopus_remote-glob @@ -2,7 +2,7 @@ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../ 0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../ -6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../ +6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../ 754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../ 8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../ 22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../ diff --git a/t/t5516-fetch-push.sh b/t/t5516-fetch-push.sh index 4fbd5b1f47..86f9b5346a 100755 --- a/t/t5516-fetch-push.sh +++ b/t/t5516-fetch-push.sh @@ -254,4 +254,32 @@ test_expect_success 'push with dry-run' ' check_push_result $old_commit heads/master ' +test_expect_success 'push updates local refs' ' + + rm -rf parent child && + mkdir parent && cd parent && git init && + echo one >foo && git add foo && git commit -m one && + cd .. && + git clone parent child && cd child && + echo two >foo && git commit -a -m two && + git push && + test $(git rev-parse master) = $(git rev-parse remotes/origin/master) + +' + +test_expect_success 'push does not update local refs on failure' ' + + rm -rf parent child && + mkdir parent && cd parent && git init && + echo one >foo && git add foo && git commit -m one && + echo exit 1 >.git/hooks/pre-receive && + chmod +x .git/hooks/pre-receive && + cd .. && + git clone parent child && cd child && + echo two >foo && git commit -a -m two || exit 1 + git push && exit 1 + test $(git rev-parse master) != $(git rev-parse remotes/origin/master) + +' + test_done diff --git a/t/t5700-clone-reference.sh b/t/t5700-clone-reference.sh index 4e93aaab02..b6a54867b4 100755 --- a/t/t5700-clone-reference.sh +++ b/t/t5700-clone-reference.sh @@ -38,7 +38,7 @@ cd "$base_dir" test_expect_success 'pulling from reference' \ 'cd C && -git pull ../B' +git pull ../B master' cd "$base_dir" @@ -61,7 +61,7 @@ test_expect_success 'existence of info/alternates' \ cd "$base_dir" test_expect_success 'pulling from reference' \ -'cd D && git pull ../B' +'cd D && git pull ../B master' cd "$base_dir" diff --git a/t/t6030-bisect-porcelain.sh b/t/t6030-bisect-porcelain.sh index 03cdba5808..53956c08e2 100755 --- a/t/t6030-bisect-porcelain.sh +++ b/t/t6030-bisect-porcelain.sh @@ -71,6 +71,63 @@ test_expect_success 'bisect start with one bad and good' ' git bisect next ' +# $HASH1 is good, $HASH4 is bad, we skip $HASH3 +# but $HASH2 is bad, +# so we should find $HASH2 as the first bad commit +test_expect_success 'bisect skip: successfull result' ' + git bisect reset && + git bisect start $HASH4 $HASH1 && + git bisect skip && + git bisect bad > my_bisect_log.txt && + grep "$HASH2 is first bad commit" my_bisect_log.txt && + git bisect reset +' + +# $HASH1 is good, $HASH4 is bad, we skip $HASH3 and $HASH2 +# so we should not be able to tell the first bad commit +# among $HASH2, $HASH3 and $HASH4 +test_expect_success 'bisect skip: cannot tell between 3 commits' ' + git bisect start $HASH4 $HASH1 && + git bisect skip || return 1 + + if git bisect skip > my_bisect_log.txt + then + echo Oops, should have failed. + false + else + test $? -eq 2 && + grep "first bad commit could be any of" my_bisect_log.txt && + ! grep $HASH1 my_bisect_log.txt && + grep $HASH2 my_bisect_log.txt && + grep $HASH3 my_bisect_log.txt && + grep $HASH4 my_bisect_log.txt && + git bisect reset + fi +' + +# $HASH1 is good, $HASH4 is bad, we skip $HASH3 +# but $HASH2 is good, +# so we should not be able to tell the first bad commit +# among $HASH3 and $HASH4 +test_expect_success 'bisect skip: cannot tell between 2 commits' ' + git bisect start $HASH4 $HASH1 && + git bisect skip || return 1 + + if git bisect good > my_bisect_log.txt + then + echo Oops, should have failed. + false + else + test $? -eq 2 && + grep "first bad commit could be any of" my_bisect_log.txt && + ! grep $HASH1 my_bisect_log.txt && + ! grep $HASH2 my_bisect_log.txt && + grep $HASH3 my_bisect_log.txt && + grep $HASH4 my_bisect_log.txt && + git bisect reset + fi +' + # We want to automatically find the commit that # introduced "Another" into hello. test_expect_success \ @@ -99,6 +156,67 @@ test_expect_success \ grep "$HASH4 is first bad commit" my_bisect_log.txt && git bisect reset' +# $HASH1 is good, $HASH5 is bad, we skip $HASH3 +# but $HASH4 is good, +# so we should find $HASH5 as the first bad commit +HASH5= +test_expect_success 'bisect skip: add line and then a new test' ' + add_line_into_file "5: Another new line." hello && + HASH5=$(git rev-parse --verify HEAD) && + git bisect start $HASH5 $HASH1 && + git bisect skip && + git bisect good > my_bisect_log.txt && + grep "$HASH5 is first bad commit" my_bisect_log.txt && + git bisect log > log_to_replay.txt + git bisect reset +' + +test_expect_success 'bisect skip and bisect replay' ' + git bisect replay log_to_replay.txt > my_bisect_log.txt && + grep "$HASH5 is first bad commit" my_bisect_log.txt && + git bisect reset +' + +HASH6= +test_expect_success 'bisect run & skip: cannot tell between 2' ' + add_line_into_file "6: Yet a line." hello && + HASH6=$(git rev-parse --verify HEAD) && + echo "#"\!"/bin/sh" > test_script.sh && + echo "tail -1 hello | grep Ciao > /dev/null && exit 125" >> test_script.sh && + echo "grep line hello > /dev/null" >> test_script.sh && + echo "test \$? -ne 0" >> test_script.sh && + chmod +x test_script.sh && + git bisect start $HASH6 $HASH1 && + if git bisect run ./test_script.sh > my_bisect_log.txt + then + echo Oops, should have failed. + false + else + test $? -eq 2 && + grep "first bad commit could be any of" my_bisect_log.txt && + ! grep $HASH3 my_bisect_log.txt && + ! grep $HASH6 my_bisect_log.txt && + grep $HASH4 my_bisect_log.txt && + grep $HASH5 my_bisect_log.txt + fi +' + +HASH7= +test_expect_success 'bisect run & skip: find first bad' ' + git bisect reset && + add_line_into_file "7: Should be the last line." hello && + HASH7=$(git rev-parse --verify HEAD) && + echo "#"\!"/bin/sh" > test_script.sh && + echo "tail -1 hello | grep Ciao > /dev/null && exit 125" >> test_script.sh && + echo "tail -1 hello | grep day > /dev/null && exit 125" >> test_script.sh && + echo "grep Yet hello > /dev/null" >> test_script.sh && + echo "test \$? -ne 0" >> test_script.sh && + chmod +x test_script.sh && + git bisect start $HASH7 $HASH1 && + git bisect run ./test_script.sh > my_bisect_log.txt && + grep "$HASH6 is first bad commit" my_bisect_log.txt +' + # # test_done diff --git a/t/t7005-editor.sh b/t/t7005-editor.sh index 28643b0da4..01cc0c02b1 100755 --- a/t/t7005-editor.sh +++ b/t/t7005-editor.sh @@ -4,6 +4,8 @@ test_description='GIT_EDITOR, core.editor, and stuff' . ./test-lib.sh +OLD_TERM="$TERM" + for i in GIT_EDITOR core_editor EDITOR VISUAL vi do cat >e-$i.sh <<-EOF @@ -88,4 +90,6 @@ do ' done +TERM="$OLD_TERM" + test_done diff --git a/t/t8004-blame.sh b/t/t8004-blame.sh new file mode 100755 index 0000000000..ba19ac127e --- /dev/null +++ b/t/t8004-blame.sh @@ -0,0 +1,73 @@ +#!/bin/sh + +# Based on a test case submitted by Björn Steinbrink. + +test_description='git blame on conflicted files' +. ./test-lib.sh + +test_expect_success 'setup first case' ' + # Create the old file + echo "Old line" > file1 && + git add file1 && + git commit --author "Old Line <ol@localhost>" -m file1.a && + + # Branch + git checkout -b foo && + + # Do an ugly move and change + git rm file1 && + echo "New line ..." > file2 && + echo "... and more" >> file2 && + git add file2 && + git commit --author "U Gly <ug@localhost>" -m ugly && + + # Back to master and change something + git checkout master && + echo " + +bla" >> file1 && + git commit --author "Old Line <ol@localhost>" -a -m file1.b && + + # Back to foo and merge master + git checkout foo && + if git merge master; then + echo needed conflict here + exit 1 + else + echo merge failed - resolving automatically + fi && + echo "New line ... +... and more + +bla +Even more" > file2 && + git rm file1 && + git commit --author "M Result <mr@localhost>" -a -m merged && + + # Back to master and change file1 again + git checkout master && + sed s/bla/foo/ <file1 >X && + rm file1 && + mv X file1 && + git commit --author "No Bla <nb@localhost>" -a -m replace && + + # Try to merge into foo again + git checkout foo && + if git merge master; then + echo needed conflict here + exit 1 + else + echo merge failed - test is setup + fi +' + +test_expect_success \ + 'blame runs on unconflicted file while other file has conflicts' ' + git blame file2 +' + +test_expect_success 'blame runs on conflicted file in stages 1,3' ' + git blame file1 +' + +test_done diff --git a/t/test-lib.sh b/t/test-lib.sh index cc1253ccab..603a8cd5e7 100644 --- a/t/test-lib.sh +++ b/t/test-lib.sh @@ -59,15 +59,12 @@ esac # ' # . ./test-lib.sh -error () { - echo "* error: $*" - trap - exit - exit 1 -} - -say () { - echo "* $*" -} +[ "x$TERM" != "xdumb" ] && + [ -t 1 ] && + tput bold >/dev/null 2>&1 && + tput setaf 1 >/dev/null 2>&1 && + tput sgr0 >/dev/null 2>&1 && + color=t test "${test_description}" != "" || error "Test script did not set test_description." @@ -84,6 +81,10 @@ do exit 0 ;; -v|--v|--ve|--ver|--verb|--verbo|--verbos|--verbose) verbose=t; shift ;; + -q|--q|--qu|--qui|--quie|--quiet) + quiet=t; shift ;; + --no-color) + color=; shift ;; --no-python) # noop now... shift ;; @@ -92,6 +93,37 @@ do esac done +if test -n "$color"; then + say_color () { + case "$1" in + error) tput bold; tput setaf 1;; # bold red + skip) tput bold; tput setaf 2;; # bold green + pass) tput setaf 2;; # green + info) tput setaf 3;; # brown + *) test -n "$quiet" && return;; + esac + shift + echo "* $*" + tput sgr0 + } +else + say_color() { + test -z "$1" && test -n "$quiet" && return + shift + echo "* $*" + } +fi + +error () { + say_color error "error: $*" + trap - exit + exit 1 +} + +say () { + say_color info "$*" +} + exec 5>&1 if test "$verbose" = "t" then @@ -122,13 +154,13 @@ test_tick () { test_ok_ () { test_count=$(expr "$test_count" + 1) - say " ok $test_count: $@" + say_color "" " ok $test_count: $@" } test_failure_ () { test_count=$(expr "$test_count" + 1) test_failure=$(expr "$test_failure" + 1); - say "FAIL $test_count: $1" + say_color error "FAIL $test_count: $1" shift echo "$@" | sed -e 's/^/ /' test "$immediate" = "" || { trap - exit; exit 1; } @@ -158,9 +190,9 @@ test_skip () { done case "$to_skip" in t) - say >&3 "skipping test: $@" + say_color skip >&3 "skipping test: $@" test_count=$(expr "$test_count" + 1) - say "skip $test_count: $1" + say_color skip "skip $test_count: $1" : true ;; *) @@ -247,11 +279,11 @@ test_done () { # The Makefile provided will clean this test area so # we will leave things as they are. - say "passed all $test_count test(s)" + say_color pass "passed all $test_count test(s)" exit 0 ;; *) - say "failed $test_failure among $test_count test(s)" + say_color error "failed $test_failure among $test_count test(s)" exit 1 ;; esac @@ -296,8 +328,8 @@ do done case "$to_skip" in t) - say >&3 "skipping test $this_test altogether" - say "skip all tests in $this_test" + say_color skip >&3 "skipping test $this_test altogether" + say_color skip "skip all tests in $this_test" test_done esac done diff --git a/templates/hooks--pre-commit b/templates/hooks--pre-commit index 18b87309f6..a19279b3e4 100644 --- a/templates/hooks--pre-commit +++ b/templates/hooks--pre-commit @@ -58,7 +58,7 @@ perl -e ' if (/\s$/) { bad_line("trailing whitespace", $_); } - if (/^\s* /) { + if (/^\s* \t/) { bad_line("indent SP followed by a TAB", $_); } if (/^(?:[<>=]){7}/) { diff --git a/transport.c b/transport.c new file mode 100644 index 0000000000..d44fe7cee7 --- /dev/null +++ b/transport.c @@ -0,0 +1,827 @@ +#include "cache.h" +#include "transport.h" +#include "run-command.h" +#ifndef NO_CURL +#include "http.h" +#endif +#include "pkt-line.h" +#include "fetch-pack.h" +#include "walker.h" +#include "bundle.h" +#include "dir.h" +#include "refs.h" + +/* rsync support */ + +/* + * We copy packed-refs and refs/ into a temporary file, then read the + * loose refs recursively (sorting whenever possible), and then inserting + * those packed refs that are not yet in the list (not validating, but + * assuming that the file is sorted). + * + * Appears refactoring this from refs.c is too cumbersome. + */ + +static int str_cmp(const void *a, const void *b) +{ + const char *s1 = a; + const char *s2 = b; + + return strcmp(s1, s2); +} + +/* path->buf + name_offset is expected to point to "refs/" */ + +static int read_loose_refs(struct strbuf *path, int name_offset, + struct ref **tail) +{ + DIR *dir = opendir(path->buf); + struct dirent *de; + struct { + char **entries; + int nr, alloc; + } list; + int i, pathlen; + + if (!dir) + return -1; + + memset (&list, 0, sizeof(list)); + + while ((de = readdir(dir))) { + if (de->d_name[0] == '.' && (de->d_name[1] == '\0' || + (de->d_name[1] == '.' && + de->d_name[2] == '\0'))) + continue; + ALLOC_GROW(list.entries, list.nr + 1, list.alloc); + list.entries[list.nr++] = xstrdup(de->d_name); + } + closedir(dir); + + /* sort the list */ + + qsort(list.entries, list.nr, sizeof(char *), str_cmp); + + pathlen = path->len; + strbuf_addch(path, '/'); + + for (i = 0; i < list.nr; i++, strbuf_setlen(path, pathlen + 1)) { + strbuf_addstr(path, list.entries[i]); + if (read_loose_refs(path, name_offset, tail)) { + int fd = open(path->buf, O_RDONLY); + char buffer[40]; + struct ref *next; + + if (fd < 0) + continue; + next = alloc_ref(path->len - name_offset + 1); + if (read_in_full(fd, buffer, 40) != 40 || + get_sha1_hex(buffer, next->old_sha1)) { + close(fd); + free(next); + continue; + } + close(fd); + strcpy(next->name, path->buf + name_offset); + (*tail)->next = next; + *tail = next; + } + } + strbuf_setlen(path, pathlen); + + for (i = 0; i < list.nr; i++) + free(list.entries[i]); + free(list.entries); + + return 0; +} + +/* insert the packed refs for which no loose refs were found */ + +static void insert_packed_refs(const char *packed_refs, struct ref **list) +{ + FILE *f = fopen(packed_refs, "r"); + static char buffer[PATH_MAX]; + + if (!f) + return; + + for (;;) { + int cmp = cmp, len; + + if (!fgets(buffer, sizeof(buffer), f)) { + fclose(f); + return; + } + + if (hexval(buffer[0]) > 0xf) + continue; + len = strlen(buffer); + if (buffer[len - 1] == '\n') + buffer[--len] = '\0'; + if (len < 41) + continue; + while ((*list)->next && + (cmp = strcmp(buffer + 41, + (*list)->next->name)) > 0) + list = &(*list)->next; + if (!(*list)->next || cmp < 0) { + struct ref *next = alloc_ref(len - 40); + buffer[40] = '\0'; + if (get_sha1_hex(buffer, next->old_sha1)) { + warning ("invalid SHA-1: %s", buffer); + free(next); + continue; + } + strcpy(next->name, buffer + 41); + next->next = (*list)->next; + (*list)->next = next; + list = &(*list)->next; + } + } +} + +static struct ref *get_refs_via_rsync(const struct transport *transport) +{ + struct strbuf buf = STRBUF_INIT, temp_dir = STRBUF_INIT; + struct ref dummy, *tail = &dummy; + struct child_process rsync; + const char *args[5]; + int temp_dir_len; + + /* copy the refs to the temporary directory */ + + strbuf_addstr(&temp_dir, git_path("rsync-refs-XXXXXX")); + if (!mkdtemp(temp_dir.buf)) + die ("Could not make temporary directory"); + temp_dir_len = temp_dir.len; + + strbuf_addstr(&buf, transport->url); + strbuf_addstr(&buf, "/refs"); + + memset(&rsync, 0, sizeof(rsync)); + rsync.argv = args; + rsync.stdout_to_stderr = 1; + args[0] = "rsync"; + args[1] = (transport->verbose > 0) ? "-rv" : "-r"; + args[2] = buf.buf; + args[3] = temp_dir.buf; + args[4] = NULL; + + if (run_command(&rsync)) + die ("Could not run rsync to get refs"); + + strbuf_reset(&buf); + strbuf_addstr(&buf, transport->url); + strbuf_addstr(&buf, "/packed-refs"); + + args[2] = buf.buf; + + if (run_command(&rsync)) + die ("Could not run rsync to get refs"); + + /* read the copied refs */ + + strbuf_addstr(&temp_dir, "/refs"); + read_loose_refs(&temp_dir, temp_dir_len + 1, &tail); + strbuf_setlen(&temp_dir, temp_dir_len); + + tail = &dummy; + strbuf_addstr(&temp_dir, "/packed-refs"); + insert_packed_refs(temp_dir.buf, &tail); + strbuf_setlen(&temp_dir, temp_dir_len); + + if (remove_dir_recursively(&temp_dir, 0)) + warning ("Error removing temporary directory %s.", + temp_dir.buf); + + strbuf_release(&buf); + strbuf_release(&temp_dir); + + return dummy.next; +} + +static int fetch_objs_via_rsync(struct transport *transport, + int nr_objs, struct ref **to_fetch) +{ + struct strbuf buf = STRBUF_INIT; + struct child_process rsync; + const char *args[8]; + int result; + + strbuf_addstr(&buf, transport->url); + strbuf_addstr(&buf, "/objects/"); + + memset(&rsync, 0, sizeof(rsync)); + rsync.argv = args; + rsync.stdout_to_stderr = 1; + args[0] = "rsync"; + args[1] = (transport->verbose > 0) ? "-rv" : "-r"; + args[2] = "--ignore-existing"; + args[3] = "--exclude"; + args[4] = "info"; + args[5] = buf.buf; + args[6] = get_object_directory(); + args[7] = NULL; + + /* NEEDSWORK: handle one level of alternates */ + result = run_command(&rsync); + + strbuf_release(&buf); + + return result; +} + +static int write_one_ref(const char *name, const unsigned char *sha1, + int flags, void *data) +{ + struct strbuf *buf = data; + int len = buf->len; + FILE *f; + + /* when called via for_each_ref(), flags is non-zero */ + if (flags && prefixcmp(name, "refs/heads/") && + prefixcmp(name, "refs/tags/")) + return 0; + + strbuf_addstr(buf, name); + if (safe_create_leading_directories(buf->buf) || + !(f = fopen(buf->buf, "w")) || + fprintf(f, "%s\n", sha1_to_hex(sha1)) < 0 || + fclose(f)) + return error("problems writing temporary file %s", buf->buf); + strbuf_setlen(buf, len); + return 0; +} + +static int write_refs_to_temp_dir(struct strbuf *temp_dir, + int refspec_nr, const char **refspec) +{ + int i; + + for (i = 0; i < refspec_nr; i++) { + unsigned char sha1[20]; + char *ref; + + if (dwim_ref(refspec[i], strlen(refspec[i]), sha1, &ref) != 1) + return error("Could not get ref %s", refspec[i]); + + if (write_one_ref(ref, sha1, 0, temp_dir)) { + free(ref); + return -1; + } + free(ref); + } + return 0; +} + +static int rsync_transport_push(struct transport *transport, + int refspec_nr, const char **refspec, int flags) +{ + struct strbuf buf = STRBUF_INIT, temp_dir = STRBUF_INIT; + int result = 0, i; + struct child_process rsync; + const char *args[10]; + + /* first push the objects */ + + strbuf_addstr(&buf, transport->url); + strbuf_addch(&buf, '/'); + + memset(&rsync, 0, sizeof(rsync)); + rsync.argv = args; + rsync.stdout_to_stderr = 1; + i = 0; + args[i++] = "rsync"; + args[i++] = "-a"; + if (flags & TRANSPORT_PUSH_DRY_RUN) + args[i++] = "--dry-run"; + if (transport->verbose > 0) + args[i++] = "-v"; + args[i++] = "--ignore-existing"; + args[i++] = "--exclude"; + args[i++] = "info"; + args[i++] = get_object_directory(); + args[i++] = buf.buf; + args[i++] = NULL; + + if (run_command(&rsync)) + return error("Could not push objects to %s", transport->url); + + /* copy the refs to the temporary directory; they could be packed. */ + + strbuf_addstr(&temp_dir, git_path("rsync-refs-XXXXXX")); + if (!mkdtemp(temp_dir.buf)) + die ("Could not make temporary directory"); + strbuf_addch(&temp_dir, '/'); + + if (flags & TRANSPORT_PUSH_ALL) { + if (for_each_ref(write_one_ref, &temp_dir)) + return -1; + } else if (write_refs_to_temp_dir(&temp_dir, refspec_nr, refspec)) + return -1; + + i = 2; + if (flags & TRANSPORT_PUSH_DRY_RUN) + args[i++] = "--dry-run"; + if (!(flags & TRANSPORT_PUSH_FORCE)) + args[i++] = "--ignore-existing"; + args[i++] = temp_dir.buf; + args[i++] = transport->url; + args[i++] = NULL; + if (run_command(&rsync)) + result = error("Could not push to %s", transport->url); + + if (remove_dir_recursively(&temp_dir, 0)) + warning ("Could not remove temporary directory %s.", + temp_dir.buf); + + strbuf_release(&buf); + strbuf_release(&temp_dir); + + return result; +} + +/* Generic functions for using commit walkers */ + +static int fetch_objs_via_walker(struct transport *transport, + int nr_objs, struct ref **to_fetch) +{ + char *dest = xstrdup(transport->url); + struct walker *walker = transport->data; + char **objs = xmalloc(nr_objs * sizeof(*objs)); + int i; + + walker->get_all = 1; + walker->get_tree = 1; + walker->get_history = 1; + walker->get_verbosely = transport->verbose >= 0; + walker->get_recover = 0; + + for (i = 0; i < nr_objs; i++) + objs[i] = xstrdup(sha1_to_hex(to_fetch[i]->old_sha1)); + + if (walker_fetch(walker, nr_objs, objs, NULL, NULL)) + die("Fetch failed."); + + for (i = 0; i < nr_objs; i++) + free(objs[i]); + free(objs); + free(dest); + return 0; +} + +static int disconnect_walker(struct transport *transport) +{ + struct walker *walker = transport->data; + if (walker) + walker_free(walker); + return 0; +} + +#ifndef NO_CURL +static int curl_transport_push(struct transport *transport, int refspec_nr, const char **refspec, int flags) { + const char **argv; + int argc; + int err; + + argv = xmalloc((refspec_nr + 11) * sizeof(char *)); + argv[0] = "http-push"; + argc = 1; + if (flags & TRANSPORT_PUSH_ALL) + argv[argc++] = "--all"; + if (flags & TRANSPORT_PUSH_FORCE) + argv[argc++] = "--force"; + if (flags & TRANSPORT_PUSH_DRY_RUN) + argv[argc++] = "--dry-run"; + argv[argc++] = transport->url; + while (refspec_nr--) + argv[argc++] = *refspec++; + argv[argc] = NULL; + err = run_command_v_opt(argv, RUN_GIT_CMD); + switch (err) { + case -ERR_RUN_COMMAND_FORK: + error("unable to fork for %s", argv[0]); + case -ERR_RUN_COMMAND_EXEC: + error("unable to exec %s", argv[0]); + break; + case -ERR_RUN_COMMAND_WAITPID: + case -ERR_RUN_COMMAND_WAITPID_WRONG_PID: + case -ERR_RUN_COMMAND_WAITPID_SIGNAL: + case -ERR_RUN_COMMAND_WAITPID_NOEXIT: + error("%s died with strange error", argv[0]); + } + return !!err; +} + +static int missing__target(int code, int result) +{ + return /* file:// URL -- do we ever use one??? */ + (result == CURLE_FILE_COULDNT_READ_FILE) || + /* http:// and https:// URL */ + (code == 404 && result == CURLE_HTTP_RETURNED_ERROR) || + /* ftp:// URL */ + (code == 550 && result == CURLE_FTP_COULDNT_RETR_FILE) + ; +} + +#define missing_target(a) missing__target((a)->http_code, (a)->curl_result) + +static struct ref *get_refs_via_curl(const struct transport *transport) +{ + struct buffer buffer; + char *data, *start, *mid; + char *ref_name; + char *refs_url; + int i = 0; + + struct active_request_slot *slot; + struct slot_results results; + + struct ref *refs = NULL; + struct ref *ref = NULL; + struct ref *last_ref = NULL; + + data = xmalloc(4096); + buffer.size = 4096; + buffer.posn = 0; + buffer.buffer = data; + + refs_url = xmalloc(strlen(transport->url) + 11); + sprintf(refs_url, "%s/info/refs", transport->url); + + http_init(); + + slot = get_active_slot(); + slot->results = &results; + curl_easy_setopt(slot->curl, CURLOPT_FILE, &buffer); + curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_buffer); + curl_easy_setopt(slot->curl, CURLOPT_URL, refs_url); + curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, NULL); + if (start_active_slot(slot)) { + run_active_slot(slot); + if (results.curl_result != CURLE_OK) { + if (missing_target(&results)) { + free(buffer.buffer); + return NULL; + } else { + free(buffer.buffer); + error("%s", curl_errorstr); + return NULL; + } + } + } else { + free(buffer.buffer); + error("Unable to start request"); + return NULL; + } + + http_cleanup(); + + data = buffer.buffer; + start = NULL; + mid = data; + while (i < buffer.posn) { + if (!start) + start = &data[i]; + if (data[i] == '\t') + mid = &data[i]; + if (data[i] == '\n') { + data[i] = 0; + ref_name = mid + 1; + ref = xmalloc(sizeof(struct ref) + + strlen(ref_name) + 1); + memset(ref, 0, sizeof(struct ref)); + strcpy(ref->name, ref_name); + get_sha1_hex(start, ref->old_sha1); + if (!refs) + refs = ref; + if (last_ref) + last_ref->next = ref; + last_ref = ref; + start = NULL; + } + i++; + } + + free(buffer.buffer); + + return refs; +} + +static int fetch_objs_via_curl(struct transport *transport, + int nr_objs, struct ref **to_fetch) +{ + if (!transport->data) + transport->data = get_http_walker(transport->url); + return fetch_objs_via_walker(transport, nr_objs, to_fetch); +} + +#endif + +struct bundle_transport_data { + int fd; + struct bundle_header header; +}; + +static struct ref *get_refs_from_bundle(const struct transport *transport) +{ + struct bundle_transport_data *data = transport->data; + struct ref *result = NULL; + int i; + + if (data->fd > 0) + close(data->fd); + data->fd = read_bundle_header(transport->url, &data->header); + if (data->fd < 0) + die ("Could not read bundle '%s'.", transport->url); + for (i = 0; i < data->header.references.nr; i++) { + struct ref_list_entry *e = data->header.references.list + i; + struct ref *ref = alloc_ref(strlen(e->name) + 1); + hashcpy(ref->old_sha1, e->sha1); + strcpy(ref->name, e->name); + ref->next = result; + result = ref; + } + return result; +} + +static int fetch_refs_from_bundle(struct transport *transport, + int nr_heads, struct ref **to_fetch) +{ + struct bundle_transport_data *data = transport->data; + return unbundle(&data->header, data->fd); +} + +static int close_bundle(struct transport *transport) +{ + struct bundle_transport_data *data = transport->data; + if (data->fd > 0) + close(data->fd); + free(data); + return 0; +} + +struct git_transport_data { + unsigned thin : 1; + unsigned keep : 1; + int depth; + const char *uploadpack; + const char *receivepack; +}; + +static int set_git_option(struct transport *connection, + const char *name, const char *value) +{ + struct git_transport_data *data = connection->data; + if (!strcmp(name, TRANS_OPT_UPLOADPACK)) { + data->uploadpack = value; + return 0; + } else if (!strcmp(name, TRANS_OPT_RECEIVEPACK)) { + data->receivepack = value; + return 0; + } else if (!strcmp(name, TRANS_OPT_THIN)) { + data->thin = !!value; + return 0; + } else if (!strcmp(name, TRANS_OPT_KEEP)) { + data->keep = !!value; + return 0; + } else if (!strcmp(name, TRANS_OPT_DEPTH)) { + if (!value) + data->depth = 0; + else + data->depth = atoi(value); + return 0; + } + return 1; +} + +static struct ref *get_refs_via_connect(const struct transport *transport) +{ + struct git_transport_data *data = transport->data; + struct ref *refs; + int fd[2]; + char *dest = xstrdup(transport->url); + struct child_process *conn = git_connect(fd, dest, data->uploadpack, 0); + + get_remote_heads(fd[0], &refs, 0, NULL, 0); + packet_flush(fd[1]); + + finish_connect(conn); + + free(dest); + + return refs; +} + +static int fetch_refs_via_pack(struct transport *transport, + int nr_heads, struct ref **to_fetch) +{ + struct git_transport_data *data = transport->data; + char **heads = xmalloc(nr_heads * sizeof(*heads)); + char **origh = xmalloc(nr_heads * sizeof(*origh)); + struct ref *refs; + char *dest = xstrdup(transport->url); + struct fetch_pack_args args; + int i; + + memset(&args, 0, sizeof(args)); + args.uploadpack = data->uploadpack; + args.keep_pack = data->keep; + args.lock_pack = 1; + args.use_thin_pack = data->thin; + args.verbose = transport->verbose > 0; + args.depth = data->depth; + + for (i = 0; i < nr_heads; i++) + origh[i] = heads[i] = xstrdup(to_fetch[i]->name); + refs = fetch_pack(&args, dest, nr_heads, heads, &transport->pack_lockfile); + + for (i = 0; i < nr_heads; i++) + free(origh[i]); + free(origh); + free(heads); + free_refs(refs); + free(dest); + return 0; +} + +static int git_transport_push(struct transport *transport, int refspec_nr, const char **refspec, int flags) { + struct git_transport_data *data = transport->data; + const char **argv; + char *rem; + int argc; + int err; + + argv = xmalloc((refspec_nr + 11) * sizeof(char *)); + argv[0] = "send-pack"; + argc = 1; + if (flags & TRANSPORT_PUSH_ALL) + argv[argc++] = "--all"; + if (flags & TRANSPORT_PUSH_FORCE) + argv[argc++] = "--force"; + if (flags & TRANSPORT_PUSH_DRY_RUN) + argv[argc++] = "--dry-run"; + if (data->receivepack) { + char *rp = xmalloc(strlen(data->receivepack) + 16); + sprintf(rp, "--receive-pack=%s", data->receivepack); + argv[argc++] = rp; + } + if (data->thin) + argv[argc++] = "--thin"; + rem = xmalloc(strlen(transport->remote->name) + 10); + sprintf(rem, "--remote=%s", transport->remote->name); + argv[argc++] = rem; + argv[argc++] = transport->url; + while (refspec_nr--) + argv[argc++] = *refspec++; + argv[argc] = NULL; + err = run_command_v_opt(argv, RUN_GIT_CMD); + switch (err) { + case -ERR_RUN_COMMAND_FORK: + error("unable to fork for %s", argv[0]); + case -ERR_RUN_COMMAND_EXEC: + error("unable to exec %s", argv[0]); + break; + case -ERR_RUN_COMMAND_WAITPID: + case -ERR_RUN_COMMAND_WAITPID_WRONG_PID: + case -ERR_RUN_COMMAND_WAITPID_SIGNAL: + case -ERR_RUN_COMMAND_WAITPID_NOEXIT: + error("%s died with strange error", argv[0]); + } + return !!err; +} + +static int disconnect_git(struct transport *transport) +{ + free(transport->data); + return 0; +} + +static int is_local(const char *url) +{ + const char *colon = strchr(url, ':'); + const char *slash = strchr(url, '/'); + return !colon || (slash && slash < colon); +} + +static int is_file(const char *url) +{ + struct stat buf; + if (stat(url, &buf)) + return 0; + return S_ISREG(buf.st_mode); +} + +struct transport *transport_get(struct remote *remote, const char *url) +{ + struct transport *ret = xcalloc(1, sizeof(*ret)); + + ret->remote = remote; + ret->url = url; + + if (!prefixcmp(url, "rsync://")) { + ret->get_refs_list = get_refs_via_rsync; + ret->fetch = fetch_objs_via_rsync; + ret->push = rsync_transport_push; + + } else if (!prefixcmp(url, "http://") + || !prefixcmp(url, "https://") + || !prefixcmp(url, "ftp://")) { +#ifdef NO_CURL + error("git was compiled without libcurl support."); +#else + ret->get_refs_list = get_refs_via_curl; + ret->fetch = fetch_objs_via_curl; + ret->push = curl_transport_push; +#endif + ret->disconnect = disconnect_walker; + + } else if (is_local(url) && is_file(url)) { + struct bundle_transport_data *data = xcalloc(1, sizeof(*data)); + ret->data = data; + ret->get_refs_list = get_refs_from_bundle; + ret->fetch = fetch_refs_from_bundle; + ret->disconnect = close_bundle; + + } else { + struct git_transport_data *data = xcalloc(1, sizeof(*data)); + ret->data = data; + ret->set_option = set_git_option; + ret->get_refs_list = get_refs_via_connect; + ret->fetch = fetch_refs_via_pack; + ret->push = git_transport_push; + ret->disconnect = disconnect_git; + + data->thin = 1; + data->uploadpack = "git-upload-pack"; + if (remote && remote->uploadpack) + data->uploadpack = remote->uploadpack; + data->receivepack = "git-receive-pack"; + if (remote && remote->receivepack) + data->receivepack = remote->receivepack; + } + + return ret; +} + +int transport_set_option(struct transport *transport, + const char *name, const char *value) +{ + if (transport->set_option) + return transport->set_option(transport, name, value); + return 1; +} + +int transport_push(struct transport *transport, + int refspec_nr, const char **refspec, int flags) +{ + if (!transport->push) + return 1; + return transport->push(transport, refspec_nr, refspec, flags); +} + +struct ref *transport_get_remote_refs(struct transport *transport) +{ + if (!transport->remote_refs) + transport->remote_refs = transport->get_refs_list(transport); + return transport->remote_refs; +} + +int transport_fetch_refs(struct transport *transport, struct ref *refs) +{ + int rc; + int nr_heads = 0, nr_alloc = 0; + struct ref **heads = NULL; + struct ref *rm; + + for (rm = refs; rm; rm = rm->next) { + if (rm->peer_ref && + !hashcmp(rm->peer_ref->old_sha1, rm->old_sha1)) + continue; + ALLOC_GROW(heads, nr_heads + 1, nr_alloc); + heads[nr_heads++] = rm; + } + + rc = transport->fetch(transport, nr_heads, heads); + free(heads); + return rc; +} + +void transport_unlock_pack(struct transport *transport) +{ + if (transport->pack_lockfile) { + unlink(transport->pack_lockfile); + free(transport->pack_lockfile); + transport->pack_lockfile = NULL; + } +} + +int transport_disconnect(struct transport *transport) +{ + int ret = 0; + if (transport->disconnect) + ret = transport->disconnect(transport); + free(transport); + return ret; +} diff --git a/transport.h b/transport.h new file mode 100644 index 0000000000..df12ea7424 --- /dev/null +++ b/transport.h @@ -0,0 +1,70 @@ +#ifndef TRANSPORT_H +#define TRANSPORT_H + +#include "cache.h" +#include "remote.h" + +struct transport { + struct remote *remote; + const char *url; + void *data; + struct ref *remote_refs; + + /** + * Returns 0 if successful, positive if the option is not + * recognized or is inapplicable, and negative if the option + * is applicable but the value is invalid. + **/ + int (*set_option)(struct transport *connection, const char *name, + const char *value); + + struct ref *(*get_refs_list)(const struct transport *transport); + int (*fetch)(struct transport *transport, int refs_nr, struct ref **refs); + int (*push)(struct transport *connection, int refspec_nr, const char **refspec, int flags); + + int (*disconnect)(struct transport *connection); + char *pack_lockfile; + signed verbose : 2; +}; + +#define TRANSPORT_PUSH_ALL 1 +#define TRANSPORT_PUSH_FORCE 2 +#define TRANSPORT_PUSH_DRY_RUN 4 + +/* Returns a transport suitable for the url */ +struct transport *transport_get(struct remote *, const char *); + +/* Transport options which apply to git:// and scp-style URLs */ + +/* The program to use on the remote side to send a pack */ +#define TRANS_OPT_UPLOADPACK "uploadpack" + +/* The program to use on the remote side to receive a pack */ +#define TRANS_OPT_RECEIVEPACK "receivepack" + +/* Transfer the data as a thin pack if not null */ +#define TRANS_OPT_THIN "thin" + +/* Keep the pack that was transferred if not null */ +#define TRANS_OPT_KEEP "keep" + +/* Limit the depth of the fetch if not null */ +#define TRANS_OPT_DEPTH "depth" + +/** + * Returns 0 if the option was used, non-zero otherwise. Prints a + * message to stderr if the option is not used. + **/ +int transport_set_option(struct transport *transport, const char *name, + const char *value); + +int transport_push(struct transport *connection, + int refspec_nr, const char **refspec, int flags); + +struct ref *transport_get_remote_refs(struct transport *transport); + +int transport_fetch_refs(struct transport *transport, struct ref *refs); +void transport_unlock_pack(struct transport *transport); +int transport_disconnect(struct transport *transport); + +#endif diff --git a/tree-diff.c b/tree-diff.c index 26bdbdd2bf..7c261fd7c3 100644 --- a/tree-diff.c +++ b/tree-diff.c @@ -319,6 +319,7 @@ static void try_to_follow_renames(struct tree_desc *t1, struct tree_desc *t2, co diff_opts.detect_rename = DIFF_DETECT_RENAME; diff_opts.output_format = DIFF_FORMAT_NO_OUTPUT; diff_opts.single_follow = opt->paths[0]; + diff_opts.break_opt = opt->break_opt; paths[0] = NULL; diff_tree_setup_paths(paths, &diff_opts); if (diff_setup_done(&diff_opts) < 0) diff --git a/upload-pack.c b/upload-pack.c index fe96ef15c4..67994680f2 100644 --- a/upload-pack.c +++ b/upload-pack.c @@ -9,6 +9,7 @@ #include "diff.h" #include "revision.h" #include "list-objects.h" +#include "run-command.h" static const char upload_pack_usage[] = "git-upload-pack [--strict] [--timeout=nn] <dir>"; @@ -96,110 +97,86 @@ static void show_edge(struct commit *commit) fprintf(pack_pipe, "-%s\n", sha1_to_hex(commit->object.sha1)); } +static int do_rev_list(int fd, void *create_full_pack) +{ + int i; + struct rev_info revs; + + pack_pipe = fdopen(fd, "w"); + if (create_full_pack) + use_thin_pack = 0; /* no point doing it */ + init_revisions(&revs, NULL); + revs.tag_objects = 1; + revs.tree_objects = 1; + revs.blob_objects = 1; + if (use_thin_pack) + revs.edge_hint = 1; + + if (create_full_pack) { + const char *args[] = {"rev-list", "--all", NULL}; + setup_revisions(2, args, &revs, NULL); + } else { + for (i = 0; i < want_obj.nr; i++) { + struct object *o = want_obj.objects[i].item; + /* why??? */ + o->flags &= ~UNINTERESTING; + add_pending_object(&revs, o, NULL); + } + for (i = 0; i < have_obj.nr; i++) { + struct object *o = have_obj.objects[i].item; + o->flags |= UNINTERESTING; + add_pending_object(&revs, o, NULL); + } + setup_revisions(0, NULL, &revs, NULL); + } + prepare_revision_walk(&revs); + mark_edges_uninteresting(revs.commits, &revs, show_edge); + traverse_commit_list(&revs, show_commit, show_object); + return 0; +} + static void create_pack_file(void) { - /* Pipes between rev-list to pack-objects, pack-objects to us - * and pack-objects error stream for progress bar. - */ - int lp_pipe[2], pu_pipe[2], pe_pipe[2]; - pid_t pid_rev_list, pid_pack_objects; + struct async rev_list; + struct child_process pack_objects; int create_full_pack = (nr_our_refs == want_obj.nr && !have_obj.nr); char data[8193], progress[128]; char abort_msg[] = "aborting due to possible repository " "corruption on the remote side."; int buffered = -1; + const char *argv[10]; + int arg = 0; - if (pipe(lp_pipe) < 0) - die("git-upload-pack: unable to create pipe"); - pid_rev_list = fork(); - if (pid_rev_list < 0) + rev_list.proc = do_rev_list; + /* .data is just a boolean: any non-NULL value will do */ + rev_list.data = create_full_pack ? &rev_list : NULL; + if (start_async(&rev_list)) die("git-upload-pack: unable to fork git-rev-list"); - if (!pid_rev_list) { - int i; - struct rev_info revs; - - close(lp_pipe[0]); - pack_pipe = fdopen(lp_pipe[1], "w"); - - if (create_full_pack) - use_thin_pack = 0; /* no point doing it */ - init_revisions(&revs, NULL); - revs.tag_objects = 1; - revs.tree_objects = 1; - revs.blob_objects = 1; - if (use_thin_pack) - revs.edge_hint = 1; - - if (create_full_pack) { - const char *args[] = {"rev-list", "--all", NULL}; - setup_revisions(2, args, &revs, NULL); - } else { - for (i = 0; i < want_obj.nr; i++) { - struct object *o = want_obj.objects[i].item; - /* why??? */ - o->flags &= ~UNINTERESTING; - add_pending_object(&revs, o, NULL); - } - for (i = 0; i < have_obj.nr; i++) { - struct object *o = have_obj.objects[i].item; - o->flags |= UNINTERESTING; - add_pending_object(&revs, o, NULL); - } - setup_revisions(0, NULL, &revs, NULL); - } - prepare_revision_walk(&revs); - mark_edges_uninteresting(revs.commits, &revs, show_edge); - traverse_commit_list(&revs, show_commit, show_object); - exit(0); - } - - if (pipe(pu_pipe) < 0) - die("git-upload-pack: unable to create pipe"); - if (pipe(pe_pipe) < 0) - die("git-upload-pack: unable to create pipe"); - pid_pack_objects = fork(); - if (pid_pack_objects < 0) { + argv[arg++] = "pack-objects"; + argv[arg++] = "--stdout"; + if (!no_progress) + argv[arg++] = "--progress"; + if (use_ofs_delta) + argv[arg++] = "--delta-base-offset"; + argv[arg++] = NULL; + + memset(&pack_objects, 0, sizeof(pack_objects)); + pack_objects.in = rev_list.out; /* start_command closes it */ + pack_objects.out = -1; + pack_objects.err = -1; + pack_objects.git_cmd = 1; + pack_objects.argv = argv; + + if (start_command(&pack_objects)) { /* daemon sets things up to ignore TERM */ - kill(pid_rev_list, SIGKILL); + kill(rev_list.pid, SIGKILL); die("git-upload-pack: unable to fork git-pack-objects"); } - if (!pid_pack_objects) { - const char *argv[10]; - int i = 0; - - dup2(lp_pipe[0], 0); - dup2(pu_pipe[1], 1); - dup2(pe_pipe[1], 2); - - close(lp_pipe[0]); - close(lp_pipe[1]); - close(pu_pipe[0]); - close(pu_pipe[1]); - close(pe_pipe[0]); - close(pe_pipe[1]); - - argv[i++] = "pack-objects"; - argv[i++] = "--stdout"; - if (!no_progress) - argv[i++] = "--progress"; - if (use_ofs_delta) - argv[i++] = "--delta-base-offset"; - argv[i++] = NULL; - - execv_git_cmd(argv); - kill(pid_rev_list, SIGKILL); - die("git-upload-pack: unable to exec git-pack-objects"); - } - - close(lp_pipe[0]); - close(lp_pipe[1]); - /* We read from pe_pipe[0] to capture stderr output for - * progress bar, and pu_pipe[0] to capture the pack data. + /* We read from pack_objects.err to capture stderr output for + * progress bar, and pack_objects.out to capture the pack data. */ - close(pe_pipe[1]); - close(pu_pipe[1]); while (1) { const char *who; @@ -214,14 +191,14 @@ static void create_pack_file(void) pollsize = 0; pe = pu = -1; - if (0 <= pu_pipe[0]) { - pfd[pollsize].fd = pu_pipe[0]; + if (0 <= pack_objects.out) { + pfd[pollsize].fd = pack_objects.out; pfd[pollsize].events = POLLIN; pu = pollsize; pollsize++; } - if (0 <= pe_pipe[0]) { - pfd[pollsize].fd = pe_pipe[0]; + if (0 <= pack_objects.err) { + pfd[pollsize].fd = pack_objects.err; pfd[pollsize].events = POLLIN; pe = pollsize; pollsize++; @@ -254,13 +231,13 @@ static void create_pack_file(void) *cp++ = buffered; outsz++; } - sz = xread(pu_pipe[0], cp, + sz = xread(pack_objects.out, cp, sizeof(data) - outsz); if (0 < sz) ; else if (sz == 0) { - close(pu_pipe[0]); - pu_pipe[0] = -1; + close(pack_objects.out); + pack_objects.out = -1; } else goto fail; @@ -279,13 +256,13 @@ static void create_pack_file(void) /* Status ready; we ship that in the side-band * or dump to the standard error. */ - sz = xread(pe_pipe[0], progress, + sz = xread(pack_objects.err, progress, sizeof(progress)); if (0 < sz) send_client_data(2, progress, sz); else if (sz == 0) { - close(pe_pipe[0]); - pe_pipe[0] = -1; + close(pack_objects.err); + pack_objects.err = -1; } else goto fail; @@ -293,12 +270,12 @@ static void create_pack_file(void) } /* See if the children are still there */ - if (pid_rev_list || pid_pack_objects) { + if (rev_list.pid || pack_objects.pid) { pid = waitpid(-1, &status, WNOHANG); if (!pid) continue; - who = ((pid == pid_rev_list) ? "git-rev-list" : - (pid == pid_pack_objects) ? "git-pack-objects" : + who = ((pid == rev_list.pid) ? "git-rev-list" : + (pid == pack_objects.pid) ? "git-pack-objects" : NULL); if (!who) { if (pid < 0) { @@ -315,11 +292,11 @@ static void create_pack_file(void) who); goto fail; } - if (pid == pid_rev_list) - pid_rev_list = 0; - if (pid == pid_pack_objects) - pid_pack_objects = 0; - if (pid_rev_list || pid_pack_objects) + if (pid == rev_list.pid) + rev_list.pid = 0; + if (pid == pack_objects.pid) + pack_objects.pid = 0; + if (rev_list.pid || pack_objects.pid) continue; } @@ -340,10 +317,10 @@ static void create_pack_file(void) return; } fail: - if (pid_pack_objects) - kill(pid_pack_objects, SIGKILL); - if (pid_rev_list) - kill(pid_rev_list, SIGKILL); + if (pack_objects.pid) + kill(pack_objects.pid, SIGKILL); + if (rev_list.pid) + kill(rev_list.pid, SIGKILL); send_client_data(3, abort_msg, sizeof(abort_msg)); die("git-upload-pack: %s", abort_msg); } @@ -1,5 +1,5 @@ #include "cache.h" -#include "fetch.h" +#include "walker.h" #include "commit.h" #include "tree.h" #include "tree-walk.h" @@ -7,16 +7,11 @@ #include "blob.h" #include "refs.h" -int get_tree = 0; -int get_history = 0; -int get_all = 0; -int get_verbosely = 0; -int get_recover = 0; static unsigned char current_commit_sha1[20]; -void pull_say(const char *fmt, const char *hex) +void walker_say(struct walker *walker, const char *fmt, const char *hex) { - if (get_verbosely) + if (walker->get_verbosely) fprintf(stderr, fmt, hex); } @@ -31,9 +26,9 @@ static void report_missing(const struct object *obj) sha1_to_hex(current_commit_sha1)); } -static int process(struct object *obj); +static int process(struct walker *walker, struct object *obj); -static int process_tree(struct tree *tree) +static int process_tree(struct walker *walker, struct tree *tree) { struct tree_desc desc; struct name_entry entry; @@ -58,7 +53,7 @@ static int process_tree(struct tree *tree) if (blob) obj = &blob->object; } - if (!obj || process(obj)) + if (!obj || process(walker, obj)) return -1; } free(tree->buffer); @@ -73,7 +68,7 @@ static int process_tree(struct tree *tree) static struct commit_list *complete = NULL; -static int process_commit(struct commit *commit) +static int process_commit(struct walker *walker, struct commit *commit) { if (parse_commit(commit)) return -1; @@ -87,43 +82,43 @@ static int process_commit(struct commit *commit) hashcpy(current_commit_sha1, commit->object.sha1); - pull_say("walk %s\n", sha1_to_hex(commit->object.sha1)); + walker_say(walker, "walk %s\n", sha1_to_hex(commit->object.sha1)); - if (get_tree) { - if (process(&commit->tree->object)) + if (walker->get_tree) { + if (process(walker, &commit->tree->object)) return -1; - if (!get_all) - get_tree = 0; + if (!walker->get_all) + walker->get_tree = 0; } - if (get_history) { + if (walker->get_history) { struct commit_list *parents = commit->parents; for (; parents; parents = parents->next) { - if (process(&parents->item->object)) + if (process(walker, &parents->item->object)) return -1; } } return 0; } -static int process_tag(struct tag *tag) +static int process_tag(struct walker *walker, struct tag *tag) { if (parse_tag(tag)) return -1; - return process(tag->tagged); + return process(walker, tag->tagged); } static struct object_list *process_queue = NULL; static struct object_list **process_queue_end = &process_queue; -static int process_object(struct object *obj) +static int process_object(struct walker *walker, struct object *obj) { if (obj->type == OBJ_COMMIT) { - if (process_commit((struct commit *)obj)) + if (process_commit(walker, (struct commit *)obj)) return -1; return 0; } if (obj->type == OBJ_TREE) { - if (process_tree((struct tree *)obj)) + if (process_tree(walker, (struct tree *)obj)) return -1; return 0; } @@ -131,7 +126,7 @@ static int process_object(struct object *obj) return 0; } if (obj->type == OBJ_TAG) { - if (process_tag((struct tag *)obj)) + if (process_tag(walker, (struct tag *)obj)) return -1; return 0; } @@ -140,7 +135,7 @@ static int process_object(struct object *obj) typename(obj->type), sha1_to_hex(obj->sha1)); } -static int process(struct object *obj) +static int process(struct walker *walker, struct object *obj) { if (obj->flags & SEEN) return 0; @@ -153,7 +148,7 @@ static int process(struct object *obj) else { if (obj->flags & COMPLETE) return 0; - prefetch(obj->sha1); + walker->prefetch(walker, obj->sha1); } object_list_insert(obj, process_queue_end); @@ -161,7 +156,7 @@ static int process(struct object *obj) return 0; } -static int loop(void) +static int loop(struct walker *walker) { struct object_list *elem; @@ -177,25 +172,25 @@ static int loop(void) * the queue because we needed to fetch it first. */ if (! (obj->flags & TO_SCAN)) { - if (fetch(obj->sha1)) { + if (walker->fetch(walker, obj->sha1)) { report_missing(obj); return -1; } } if (!obj->type) parse_object(obj->sha1); - if (process_object(obj)) + if (process_object(walker, obj)) return -1; } return 0; } -static int interpret_target(char *target, unsigned char *sha1) +static int interpret_target(struct walker *walker, char *target, unsigned char *sha1) { if (!get_sha1_hex(target, sha1)) return 0; if (!check_ref_format(target)) { - if (!fetch_ref(target, sha1)) { + if (!walker->fetch_ref(walker, target, sha1)) { return 0; } } @@ -212,7 +207,7 @@ static int mark_complete(const char *path, const unsigned char *sha1, int flag, return 0; } -int pull_targets_stdin(char ***target, const char ***write_ref) +int walker_targets_stdin(char ***target, const char ***write_ref) { int targets = 0, targets_alloc = 0; struct strbuf buf; @@ -242,7 +237,7 @@ int pull_targets_stdin(char ***target, const char ***write_ref) return targets; } -void pull_targets_free(int targets, char **target, const char **write_ref) +void walker_targets_free(int targets, char **target, const char **write_ref) { while (targets--) { free(target[targets]); @@ -251,8 +246,8 @@ void pull_targets_free(int targets, char **target, const char **write_ref) } } -int pull(int targets, char **target, const char **write_ref, - const char *write_ref_log_details) +int walker_fetch(struct walker *walker, int targets, char **target, + const char **write_ref, const char *write_ref_log_details) { struct ref_lock **lock = xcalloc(targets, sizeof(struct ref_lock *)); unsigned char *sha1 = xmalloc(targets * 20); @@ -274,19 +269,19 @@ int pull(int targets, char **target, const char **write_ref, } } - if (!get_recover) + if (!walker->get_recover) for_each_ref(mark_complete, NULL); for (i = 0; i < targets; i++) { - if (interpret_target(target[i], &sha1[20 * i])) { + if (interpret_target(walker, target[i], &sha1[20 * i])) { error("Could not interpret %s as something to pull", target[i]); goto unlock_and_fail; } - if (process(lookup_unknown_object(&sha1[20 * i]))) + if (process(walker, lookup_unknown_object(&sha1[20 * i]))) goto unlock_and_fail; } - if (loop()) + if (loop(walker)) goto unlock_and_fail; if (write_ref_log_details) { @@ -307,10 +302,16 @@ int pull(int targets, char **target, const char **write_ref, return 0; - unlock_and_fail: for (i = 0; i < targets; i++) if (lock[i]) unlock_ref(lock[i]); + return -1; } + +void walker_free(struct walker *walker) +{ + walker->cleanup(walker); + free(walker); +} diff --git a/walker.h b/walker.h new file mode 100644 index 0000000000..ea2c363f4e --- /dev/null +++ b/walker.h @@ -0,0 +1,37 @@ +#ifndef WALKER_H +#define WALKER_H + +struct walker { + void *data; + int (*fetch_ref)(struct walker *, char *ref, unsigned char *sha1); + void (*prefetch)(struct walker *, unsigned char *sha1); + int (*fetch)(struct walker *, unsigned char *sha1); + void (*cleanup)(struct walker *); + int get_tree; + int get_history; + int get_all; + int get_verbosely; + int get_recover; + + int corrupt_object_found; +}; + +/* Report what we got under get_verbosely */ +void walker_say(struct walker *walker, const char *, const char *); + +/* Load pull targets from stdin */ +int walker_targets_stdin(char ***target, const char ***write_ref); + +/* Free up loaded targets */ +void walker_targets_free(int targets, char **target, const char **write_ref); + +/* If write_ref is set, the ref filename to write the target value to. */ +/* If write_ref_log_details is set, additional text will appear in the ref log. */ +int walker_fetch(struct walker *impl, int targets, char **target, + const char **write_ref, const char *write_ref_log_details); + +void walker_free(struct walker *walker); + +struct walker *get_http_walker(const char *url); + +#endif /* WALKER_H */ diff --git a/wt-status.c b/wt-status.c index 10ce6eedc7..03b5ec4488 100644 --- a/wt-status.c +++ b/wt-status.c @@ -52,31 +52,34 @@ void wt_status_prepare(struct wt_status *s) head = resolve_ref("HEAD", sha1, 0, NULL); s->branch = head ? xstrdup(head) : NULL; s->reference = "HEAD"; + s->fp = stdout; + s->index_file = get_index_file(); } -static void wt_status_print_cached_header(const char *reference) +static void wt_status_print_cached_header(struct wt_status *s) { const char *c = color(WT_STATUS_HEADER); - color_printf_ln(c, "# Changes to be committed:"); - if (reference) { - color_printf_ln(c, "# (use \"git reset %s <file>...\" to unstage)", reference); + color_fprintf_ln(s->fp, c, "# Changes to be committed:"); + if (s->reference) { + color_fprintf_ln(s->fp, c, "# (use \"git reset %s <file>...\" to unstage)", s->reference); } else { - color_printf_ln(c, "# (use \"git rm --cached <file>...\" to unstage)"); + color_fprintf_ln(s->fp, c, "# (use \"git rm --cached <file>...\" to unstage)"); } - color_printf_ln(c, "#"); + color_fprintf_ln(s->fp, c, "#"); } -static void wt_status_print_header(const char *main, const char *sub) +static void wt_status_print_header(struct wt_status *s, + const char *main, const char *sub) { const char *c = color(WT_STATUS_HEADER); - color_printf_ln(c, "# %s:", main); - color_printf_ln(c, "# (%s)", sub); - color_printf_ln(c, "#"); + color_fprintf_ln(s->fp, c, "# %s:", main); + color_fprintf_ln(s->fp, c, "# (%s)", sub); + color_fprintf_ln(s->fp, c, "#"); } -static void wt_status_print_trailer(void) +static void wt_status_print_trailer(struct wt_status *s) { - color_printf_ln(color(WT_STATUS_HEADER), "#"); + color_fprintf_ln(s->fp, color(WT_STATUS_HEADER), "#"); } static const char *quote_crlf(const char *in, char *buf, size_t sz) @@ -108,7 +111,8 @@ static const char *quote_crlf(const char *in, char *buf, size_t sz) return ret; } -static void wt_status_print_filepair(int t, struct diff_filepair *p) +static void wt_status_print_filepair(struct wt_status *s, + int t, struct diff_filepair *p) { const char *c = color(t); const char *one, *two; @@ -117,36 +121,36 @@ static void wt_status_print_filepair(int t, struct diff_filepair *p) one = quote_crlf(p->one->path, onebuf, sizeof(onebuf)); two = quote_crlf(p->two->path, twobuf, sizeof(twobuf)); - color_printf(color(WT_STATUS_HEADER), "#\t"); + color_fprintf(s->fp, color(WT_STATUS_HEADER), "#\t"); switch (p->status) { case DIFF_STATUS_ADDED: - color_printf(c, "new file: %s", one); + color_fprintf(s->fp, c, "new file: %s", one); break; case DIFF_STATUS_COPIED: - color_printf(c, "copied: %s -> %s", one, two); + color_fprintf(s->fp, c, "copied: %s -> %s", one, two); break; case DIFF_STATUS_DELETED: - color_printf(c, "deleted: %s", one); + color_fprintf(s->fp, c, "deleted: %s", one); break; case DIFF_STATUS_MODIFIED: - color_printf(c, "modified: %s", one); + color_fprintf(s->fp, c, "modified: %s", one); break; case DIFF_STATUS_RENAMED: - color_printf(c, "renamed: %s -> %s", one, two); + color_fprintf(s->fp, c, "renamed: %s -> %s", one, two); break; case DIFF_STATUS_TYPE_CHANGED: - color_printf(c, "typechange: %s", one); + color_fprintf(s->fp, c, "typechange: %s", one); break; case DIFF_STATUS_UNKNOWN: - color_printf(c, "unknown: %s", one); + color_fprintf(s->fp, c, "unknown: %s", one); break; case DIFF_STATUS_UNMERGED: - color_printf(c, "unmerged: %s", one); + color_fprintf(s->fp, c, "unmerged: %s", one); break; default: die("bug: unhandled diff status %c", p->status); } - printf("\n"); + fprintf(s->fp, "\n"); } static void wt_status_print_updated_cb(struct diff_queue_struct *q, @@ -160,14 +164,14 @@ static void wt_status_print_updated_cb(struct diff_queue_struct *q, if (q->queue[i]->status == 'U') continue; if (!shown_header) { - wt_status_print_cached_header(s->reference); + wt_status_print_cached_header(s); s->commitable = 1; shown_header = 1; } - wt_status_print_filepair(WT_STATUS_UPDATED, q->queue[i]); + wt_status_print_filepair(s, WT_STATUS_UPDATED, q->queue[i]); } if (shown_header) - wt_status_print_trailer(); + wt_status_print_trailer(s); } static void wt_status_print_changed_cb(struct diff_queue_struct *q, @@ -184,18 +188,18 @@ static void wt_status_print_changed_cb(struct diff_queue_struct *q, msg = use_add_rm_msg; break; } - wt_status_print_header("Changed but not updated", msg); + wt_status_print_header(s, "Changed but not updated", msg); } for (i = 0; i < q->nr; i++) - wt_status_print_filepair(WT_STATUS_CHANGED, q->queue[i]); + wt_status_print_filepair(s, WT_STATUS_CHANGED, q->queue[i]); if (q->nr) - wt_status_print_trailer(); + wt_status_print_trailer(s); } static void wt_read_cache(struct wt_status *s) { discard_cache(); - read_cache(); + read_cache_from(s->index_file); } static void wt_status_print_initial(struct wt_status *s) @@ -206,16 +210,16 @@ static void wt_status_print_initial(struct wt_status *s) wt_read_cache(s); if (active_nr) { s->commitable = 1; - wt_status_print_cached_header(NULL); + wt_status_print_cached_header(s); } for (i = 0; i < active_nr; i++) { - color_printf(color(WT_STATUS_HEADER), "#\t"); - color_printf_ln(color(WT_STATUS_UPDATED), "new file: %s", + color_fprintf(s->fp, color(WT_STATUS_HEADER), "#\t"); + color_fprintf_ln(s->fp, color(WT_STATUS_UPDATED), "new file: %s", quote_crlf(active_cache[i]->name, buf, sizeof(buf))); } if (active_nr) - wt_status_print_trailer(); + wt_status_print_trailer(s); } static void wt_status_print_updated(struct wt_status *s) @@ -282,12 +286,12 @@ static void wt_status_print_untracked(struct wt_status *s) } if (!shown_header) { s->workdir_untracked = 1; - wt_status_print_header("Untracked files", + wt_status_print_header(s, "Untracked files", use_add_to_include_msg); shown_header = 1; } - color_printf(color(WT_STATUS_HEADER), "#\t"); - color_printf_ln(color(WT_STATUS_UNTRACKED), "%.*s", + color_fprintf(s->fp, color(WT_STATUS_HEADER), "#\t"); + color_fprintf_ln(s->fp, color(WT_STATUS_UNTRACKED), "%.*s", ent->len, ent->name); } } @@ -317,14 +321,14 @@ void wt_status_print(struct wt_status *s) branch_name = ""; on_what = "Not currently on any branch."; } - color_printf_ln(color(WT_STATUS_HEADER), + color_fprintf_ln(s->fp, color(WT_STATUS_HEADER), "# %s%s", on_what, branch_name); } if (s->is_initial) { - color_printf_ln(color(WT_STATUS_HEADER), "#"); - color_printf_ln(color(WT_STATUS_HEADER), "# Initial commit"); - color_printf_ln(color(WT_STATUS_HEADER), "#"); + color_fprintf_ln(s->fp, color(WT_STATUS_HEADER), "#"); + color_fprintf_ln(s->fp, color(WT_STATUS_HEADER), "# Initial commit"); + color_fprintf_ln(s->fp, color(WT_STATUS_HEADER), "#"); wt_status_print_initial(s); } else { @@ -338,7 +342,7 @@ void wt_status_print(struct wt_status *s) wt_status_print_verbose(s); if (!s->commitable) { if (s->amend) - printf("# No changes\n"); + fprintf(s->fp, "# No changes\n"); else if (s->workdir_dirty) printf("no changes added to commit (use \"git add\" and/or \"git commit -a\")\n"); else if (s->workdir_untracked) diff --git a/wt-status.h b/wt-status.h index cfea4ae688..77449326db 100644 --- a/wt-status.h +++ b/wt-status.h @@ -1,6 +1,8 @@ #ifndef STATUS_H #define STATUS_H +#include <stdio.h> + enum color_wt_status { WT_STATUS_HEADER, WT_STATUS_UPDATED, @@ -19,6 +21,8 @@ struct wt_status { int commitable; int workdir_dirty; int workdir_untracked; + const char *index_file; + FILE *fp; }; int git_status_config(const char *var, const char *value); |