summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--Documentation/Makefile44
-rw-r--r--Documentation/RelNotes/1.8.3.2.txt59
-rw-r--r--Documentation/RelNotes/1.8.3.3.txt11
-rw-r--r--Documentation/RelNotes/1.8.4.txt62
-rw-r--r--Documentation/config.txt3
-rw-r--r--Documentation/diff-options.txt3
-rw-r--r--Documentation/git-check-ignore.txt2
-rw-r--r--Documentation/git-config.txt9
-rw-r--r--Documentation/git-diff.txt14
-rw-r--r--Documentation/git-log.txt4
-rw-r--r--Documentation/git-merge.txt4
-rw-r--r--Documentation/git-name-rev.txt3
-rw-r--r--Documentation/git-push.txt9
-rw-r--r--Documentation/git-rev-parse.txt16
-rw-r--r--Documentation/git-stash.txt13
-rw-r--r--Documentation/git-web--browse.txt1
-rw-r--r--Documentation/git.txt3
-rw-r--r--Documentation/rev-list-options.txt4
-rw-r--r--Documentation/technical/api-builtin.txt2
-rw-r--r--Documentation/technical/api-parse-options.txt12
-rw-r--r--Documentation/user-manual.txt19
-rwxr-xr-xGIT-VERSION-GEN2
-rw-r--r--Makefile5
-rw-r--r--advice.c2
-rw-r--r--advice.h1
-rw-r--r--builtin.h16
-rw-r--r--builtin/apply.c2
-rw-r--r--builtin/checkout.c7
-rw-r--r--builtin/clone.c5
-rw-r--r--builtin/commit.c3
-rw-r--r--builtin/help.c2
-rw-r--r--builtin/log.c2
-rw-r--r--builtin/ls-files.c12
-rw-r--r--builtin/name-rev.c36
-rw-r--r--builtin/notes.c138
-rw-r--r--builtin/replace.c2
-rw-r--r--builtin/rev-parse.c24
-rw-r--r--builtin/rm.c101
-rw-r--r--builtin/show-branch.c14
-rw-r--r--cache.h60
-rw-r--r--commit-slab.h98
-rw-r--r--commit.c180
-rw-r--r--commit.h17
-rw-r--r--configure.ac2
-rw-r--r--contrib/completion/git-prompt.sh12
-rw-r--r--contrib/mw-to-git/.perlcriticrc28
-rw-r--r--contrib/mw-to-git/Makefile2
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki.perl537
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw-lib.sh19
-rw-r--r--contrib/mw-to-git/t/test.config4
-rw-r--r--diff.c2
-rwxr-xr-xgit-add--interactive.perl5
-rwxr-xr-xgit-am.sh17
-rw-r--r--git-mergetool--lib.sh82
-rwxr-xr-xgit-p4.py2
-rwxr-xr-xgit-pull.sh11
-rwxr-xr-xgit-rebase.sh15
-rwxr-xr-xgit-send-email.perl5
-rw-r--r--git-sh-setup.sh12
-rwxr-xr-xgit-stash.sh52
-rwxr-xr-xgit-submodule.sh142
-rwxr-xr-xgit-svn.perl2
-rwxr-xr-xgit-web--browse.sh11
-rw-r--r--http.c12
-rw-r--r--notes-merge.c27
-rw-r--r--notes-merge.h14
-rw-r--r--notes-utils.c157
-rw-r--r--notes-utils.h37
-rw-r--r--prio-queue.c84
-rw-r--r--prio-queue.h48
-rw-r--r--read-cache.c181
-rw-r--r--refs.c422
-rw-r--r--refs.h26
-rw-r--r--revision.c13
-rw-r--r--revision.h6
-rw-r--r--sequencer.c29
-rw-r--r--sha1_file.c11
-rw-r--r--t/lib-httpd/apache.conf20
-rw-r--r--t/lib-rebase.sh33
-rw-r--r--t/lib-t6000.sh104
-rwxr-xr-xt/t0009-prio-queue.sh50
-rwxr-xr-xt/t1513-rev-parse-prefix.sh96
-rwxr-xr-xt/t3400-rebase.sh60
-rwxr-xr-xt/t3401-rebase-partial.sh69
-rwxr-xr-xt/t3404-rebase-interactive.sh21
-rwxr-xr-xt/t3406-rebase-message.sh50
-rwxr-xr-xt/t3409-rebase-preserve-merges.sh53
-rwxr-xr-xt/t3420-rebase-autostash.sh22
-rwxr-xr-xt/t3421-rebase-topology-linear.sh350
-rwxr-xr-xt/t3425-rebase-topology-merges.sh258
-rwxr-xr-xt/t3600-rm.sh96
-rwxr-xr-xt/t3903-stash.sh19
-rwxr-xr-xt/t4015-diff-whitespace.sh345
-rwxr-xr-xt/t4111-apply-subdir.sh14
-rwxr-xr-xt/t4150-am.sh40
-rwxr-xr-xt/t5150-request-pull.sh2
-rwxr-xr-xt/t5303-pack-corruption-resilience.sh29
-rwxr-xr-xt/t5520-pull.sh29
-rwxr-xr-xt/t6002-rev-list-bisect.sh84
-rwxr-xr-xt/t6003-rev-list-topo-order.sh101
-rwxr-xr-xt/t7011-skip-worktree-reading.sh4
-rwxr-xr-xt/t7400-submodule-basic.sh95
-rwxr-xr-xt/t7401-submodule-summary.sh116
-rwxr-xr-xt/t7403-submodule-sync.sh388
-rwxr-xr-xt/t7406-submodule-update.sh15
-rwxr-xr-xt/t7407-submodule-foreach.sh16
-rwxr-xr-xt/t7512-status-help.sh6
-rwxr-xr-xt/t7600-merge.sh2
-rwxr-xr-xt/t9001-send-email.sh14
-rwxr-xr-xt/t9802-git-p4-filetype.sh4
-rw-r--r--test-prio-queue.c39
-rw-r--r--transport.c2
-rw-r--r--transport.h2
-rw-r--r--tree-walk.h2
-rw-r--r--unpack-trees.c18
-rw-r--r--wt-status.c6
-rw-r--r--xdiff/xdiff.h2
-rw-r--r--xdiff/xdiffi.c29
-rw-r--r--xdiff/xdiffi.h1
-rw-r--r--xdiff/xemit.c49
-rw-r--r--xdiff/xemit.h2
-rw-r--r--xdiff/xutils.c13
-rw-r--r--xdiff/xutils.h1
124 files changed, 4385 insertions, 1442 deletions
diff --git a/.gitignore b/.gitignore
index c0e00eb37b..efa8db0035 100644
--- a/.gitignore
+++ b/.gitignore
@@ -191,6 +191,7 @@
/test-mktemp
/test-parse-options
/test-path-utils
+/test-prio-queue
/test-read-cache
/test-regex
/test-revision-walking
diff --git a/Documentation/Makefile b/Documentation/Makefile
index 62dbd9ac7c..0cfdc36b44 100644
--- a/Documentation/Makefile
+++ b/Documentation/Makefile
@@ -31,11 +31,11 @@ MAN7_TXT += gittutorial.txt
MAN7_TXT += gitworkflows.txt
MAN_TXT = $(MAN1_TXT) $(MAN5_TXT) $(MAN7_TXT)
-MAN_XML=$(patsubst %.txt,%.xml,$(MAN_TXT))
-MAN_HTML=$(patsubst %.txt,%.html,$(MAN_TXT))
+MAN_XML = $(patsubst %.txt,%.xml,$(MAN_TXT))
+MAN_HTML = $(patsubst %.txt,%.html,$(MAN_TXT))
OBSOLETE_HTML = git-remote-helpers.html
-DOC_HTML=$(MAN_HTML) $(OBSOLETE_HTML)
+DOC_HTML = $(MAN_HTML) $(OBSOLETE_HTML)
ARTICLES = howto-index
ARTICLES += everyday
@@ -74,35 +74,35 @@ SP_ARTICLES += technical/api-index
DOC_HTML += $(patsubst %,%.html,$(ARTICLES) $(SP_ARTICLES))
-DOC_MAN1=$(patsubst %.txt,%.1,$(MAN1_TXT))
-DOC_MAN5=$(patsubst %.txt,%.5,$(MAN5_TXT))
-DOC_MAN7=$(patsubst %.txt,%.7,$(MAN7_TXT))
-
-prefix?=$(HOME)
-bindir?=$(prefix)/bin
-htmldir?=$(prefix)/share/doc/git-doc
-pdfdir?=$(prefix)/share/doc/git-doc
-mandir?=$(prefix)/share/man
-man1dir=$(mandir)/man1
-man5dir=$(mandir)/man5
-man7dir=$(mandir)/man7
-# DESTDIR=
+DOC_MAN1 = $(patsubst %.txt,%.1,$(MAN1_TXT))
+DOC_MAN5 = $(patsubst %.txt,%.5,$(MAN5_TXT))
+DOC_MAN7 = $(patsubst %.txt,%.7,$(MAN7_TXT))
+
+prefix ?= $(HOME)
+bindir ?= $(prefix)/bin
+htmldir ?= $(prefix)/share/doc/git-doc
+infodir ?= $(prefix)/share/info
+pdfdir ?= $(prefix)/share/doc/git-doc
+mandir ?= $(prefix)/share/man
+man1dir = $(mandir)/man1
+man5dir = $(mandir)/man5
+man7dir = $(mandir)/man7
+# DESTDIR =
ASCIIDOC = asciidoc
ASCIIDOC_EXTRA =
MANPAGE_XSL = manpage-normal.xsl
XMLTO = xmlto
XMLTO_EXTRA =
-INSTALL?=install
+INSTALL ?= install
RM ?= rm -f
MAN_REPO = ../../git-manpages
HTML_REPO = ../../git-htmldocs
-infodir?=$(prefix)/share/info
-MAKEINFO=makeinfo
-INSTALL_INFO=install-info
-DOCBOOK2X_TEXI=docbook2x-texi
-DBLATEX=dblatex
+MAKEINFO = makeinfo
+INSTALL_INFO = install-info
+DOCBOOK2X_TEXI = docbook2x-texi
+DBLATEX = dblatex
ifndef PERL_PATH
PERL_PATH = /usr/bin/perl
endif
diff --git a/Documentation/RelNotes/1.8.3.2.txt b/Documentation/RelNotes/1.8.3.2.txt
new file mode 100644
index 0000000000..26ae142c3d
--- /dev/null
+++ b/Documentation/RelNotes/1.8.3.2.txt
@@ -0,0 +1,59 @@
+Git v1.8.3.2 Release Notes
+==========================
+
+Fixes since v1.8.3.1
+--------------------
+
+ * Cloning with "git clone --depth N" while fetch.fsckobjects (or
+ transfer.fsckobjects) is set to true did not tell the cut-off
+ points of the shallow history to the process that validates the
+ objects and the history received, causing the validation to fail.
+
+ * "git checkout foo" DWIMs the intended "upstream" and turns it into
+ "git checkout -t -b foo remotes/origin/foo". This codepath has been
+ updated to correctly take existing remote definitions into account.
+
+ * "git fetch" into a shallow repository from a repository that does
+ not know about the shallow boundary commits (e.g. a different fork
+ from the repository the current shallow repository was cloned from)
+ did not work correctly.
+
+ * "git subtree" (in contrib/) had one codepath with loose error
+ checks to lose data at the remote side.
+
+ * "git log --ancestry-path A...B" did not work as expected, as it did
+ not pay attention to the fact that the merge base between A and B
+ was the bottom of the range being specified.
+
+ * "git diff -c -p" was not showing a deleted line from a hunk when
+ another hunk immediately begins where the earlier one ends.
+
+ * "git merge @{-1}~22" was rewritten to "git merge frotz@{1}~22"
+ incorrectly when your previous branch was "frotz" (it should be
+ rewritten to "git merge frotz~22" instead).
+
+ * "git commit --allow-empty-message -m ''" should not start an
+ editor.
+
+ * "git push --[no-]verify" was not documented.
+
+ * An entry for "file://" scheme in the enumeration of URL types Git
+ can take in the HTML documentation was made into a clickable link
+ by mistake.
+
+ * zsh prompt script that borrowed from bash prompt script did not
+ work due to slight differences in array variable notation between
+ these two shells.
+
+ * The bash prompt code (in contrib/) displayed the name of the branch
+ being rebased when "rebase -i/-m/-p" modes are in use, but not the
+ plain vanilla "rebase".
+
+ * "git push $there HEAD:branch" did not resolve HEAD early enough, so
+ it was easy to flip it around while push is still going on and push
+ out a branch that the user did not originally intended when the
+ command was started.
+
+ * "difftool --dir-diff" did not copy back changes made by the
+ end-user in the diff tool backend to the working tree in some
+ cases.
diff --git a/Documentation/RelNotes/1.8.3.3.txt b/Documentation/RelNotes/1.8.3.3.txt
new file mode 100644
index 0000000000..58a570ef3d
--- /dev/null
+++ b/Documentation/RelNotes/1.8.3.3.txt
@@ -0,0 +1,11 @@
+Git v1.8.3.3 Release Notes
+==========================
+
+Fixes since v1.8.3.2
+--------------------
+
+ * Mac OS X does not like to write(2) more than INT_MAX number of
+ bytes; work it around by chopping write(2) into smaller pieces.
+
+ * Newer MacOS X encourages the programs to compile and link with
+ their CommonCrypto, not with OpenSSL.
diff --git a/Documentation/RelNotes/1.8.4.txt b/Documentation/RelNotes/1.8.4.txt
index 408c602cc4..5f440b8a6b 100644
--- a/Documentation/RelNotes/1.8.4.txt
+++ b/Documentation/RelNotes/1.8.4.txt
@@ -35,10 +35,31 @@ Foreign interfaces, subsystems and ports.
UI, Workflows & Features
+ * Various subcommands of "git submodule" refused to run from anywhere
+ other than the top of the working tree of the superproject, but
+ they have been taught to let you run from a subdirectory.
+
+ * "git diff" learned a mode that ignores hunks whose change consists
+ only of additions and removals of blank lines, which is the same as
+ "diff -B" (ignore blank lines) of GNU diff.
+
+ * "git rm" gives a single message followed by list of paths to report
+ multiple paths that cannot be removed.
+
+ * "git rebase" can be told with ":/look for this string" syntax commits
+ to replay the changes onto and where the work to be replayed begins.
+
* Many tutorials teach users to set "color.ui" to "auto" as the first
thing after you set "user.name/email" to introduce yourselves to
Git. Now the variable defaults to "auto".
+ * On Cygwin, "cygstart" is now recognised as a possible way to start
+ a web browser (used in "help -w" and "instaweb" among others).
+
+### * "git status" learned status.branch and status.short configuration
+### variables to use --branch and --short options by default (override
+### with --no-branch and --no-short options from the command line).
+
* "git cmd <name>", when <name> happens to be a 40-hex string,
directly uses the 40-hex string as an object name, even if a ref
"refs/<some hierarchy>/<name>" exists. This disambiguation order
@@ -48,7 +69,8 @@ UI, Workflows & Features
* "git rebase" learned "--[no-]autostash" option to save local
changes instead of refusing to run (to which people's normal
- response was to stash them and re-run).
+ response was to stash them and re-run). This introduced a corner
+ case breakage to "git am --abort" but it has been fixed.
* Instead of typing four capital letters "HEAD", you can say "@" now,
e.g. "git log @".
@@ -94,6 +116,16 @@ UI, Workflows & Features
Performance, Internal Implementation, etc.
+ * "git pack-refs" that races with new ref creation or deletion have
+ been susceptible to lossage of refs under right conditions, which
+ has been tightened up.
+
+ * We read loose and packed rerferences in two steps, but after
+ deciding to read a loose ref but before actually opening it to read
+ it, another process racing with us can unlink it, which would cause
+ us to barf. The codepath has been updated to retry when such a
+ race is detected, instead of outright failing.
+
* Uses of the platform fnmatch(3) function (many places in the code,
matching pathspec, .gitignore and .gitattributes to name a few)
have been replaced with wildmatch, allowing "foo/**/bar" that would
@@ -143,11 +175,37 @@ Unless otherwise noted, all the fixes since v1.8.3 in the maintenance
track are contained in this release (see release notes to them for
details).
+ * "git name-rev --refs=tags/v*" were forbidden, which was a bit
+ inconvenient (you had to give a pattern to match refs fully, like
+ --refs=refs/tags/v*).
+ (merge 98c5c4a nk/name-rev-abbreviated-refs later to maint).
+
+ * "git apply" parsed patches that add new files, generated by
+ programs other than Git, incorrectly. This is an old breakage in
+ v1.7.11 and will need to be merged down to the maintanance tracks.
+ (merge 212eb96 tr/maint-apply-non-git-patch-parsefix later to maint).
+
+ * Older cURL wanted piece of memory we call it with to be stable, but
+ we updated the auth material after handing it to a call.
+ (merge a94cf2c bc/http-keep-memory-given-to-curl later to maint).
+
+ * "git pull" into nothing trashed "local changes" that were in the
+ index, and this avoids it.
+ (merge b4dc085 jk/pull-into-dirty-unborn later to maint).
+
+ * Many "git submodule" operations do not work on a submodule at a
+ path whose name is not in ASCII.
+ (merge bed9470 fg/submodule-non-ascii-path later to maint).
+
+ * "cherry-pick" had a small leak in an error codepath.
+ (merge 706728a fc/sequencer-plug-leak later to maint).
+
* Logic used by git-send-email to suppress cc mishandled names like
"A U. Thor" <author@example.xz>, where the human readable part
needs to be quoted (the user input may not have the double quotes
around the name, and comparison was done between quoted and
- unquoted strings).
+ unquoted strings). It also mishandled names that need RFC2047
+ quoting.
(merge 1495266 mt/send-email-cc-match-fix later to maint).
* Call to discard_cache/discard_index (used when we use different
diff --git a/Documentation/config.txt b/Documentation/config.txt
index 311fcebaf5..1153585aa2 100644
--- a/Documentation/config.txt
+++ b/Documentation/config.txt
@@ -199,6 +199,9 @@ advice.*::
amWorkDir::
Advice that shows the location of the patch file when
linkgit:git-am[1] fails to apply it.
+ rmHints::
+ In case of failure in the output of linkgit:git-rm[1],
+ show directions on how to proceed from the current state.
--
core.fileMode::
diff --git a/Documentation/diff-options.txt b/Documentation/diff-options.txt
index a85288f23e..19f78a7d5c 100644
--- a/Documentation/diff-options.txt
+++ b/Documentation/diff-options.txt
@@ -461,6 +461,9 @@ endif::git-format-patch[]
differences even if one line has whitespace where the other
line has none.
+--ignore-blank-lines::
+ Ignore changes whose lines are all blank.
+
--inter-hunk-context=<lines>::
Show the context between diff hunks, up to the specified number
of lines, thereby fusing hunks that are close to each other.
diff --git a/Documentation/git-check-ignore.txt b/Documentation/git-check-ignore.txt
index 8e1f7ab7ea..d2df487aa2 100644
--- a/Documentation/git-check-ignore.txt
+++ b/Documentation/git-check-ignore.txt
@@ -102,7 +102,7 @@ SEE ALSO
--------
linkgit:gitignore[5]
linkgit:gitconfig[5]
-linkgit:git-ls-files[5]
+linkgit:git-ls-files[1]
GIT
---
diff --git a/Documentation/git-config.txt b/Documentation/git-config.txt
index d88a6fcb29..19a7be0856 100644
--- a/Documentation/git-config.txt
+++ b/Documentation/git-config.txt
@@ -114,6 +114,15 @@ rather than from all available files.
+
See also <<FILES>>.
+--local::
+ For writing options: write to the repository .git/config file.
+ This is the default behavior.
++
+For reading options: read only from the repository .git/config rather than
+from all available files.
++
+See also <<FILES>>.
+
-f config-file::
--file config-file::
Use the given config file instead of the one specified by GIT_CONFIG.
diff --git a/Documentation/git-diff.txt b/Documentation/git-diff.txt
index a7b46208f6..78d6d50489 100644
--- a/Documentation/git-diff.txt
+++ b/Documentation/git-diff.txt
@@ -18,8 +18,8 @@ SYNOPSIS
DESCRIPTION
-----------
Show changes between the working tree and the index or a tree, changes
-between the index and a tree, changes between two trees, or changes
-between two files on disk.
+between the index and a tree, changes between two trees, changes between
+two blob objects, or changes between two files on disk.
'git diff' [--options] [--] [<path>...]::
@@ -56,11 +56,6 @@ directories. This behavior can be forced by --no-index.
This is to view the changes between two arbitrary
<commit>.
-'git diff' [options] <blob> <blob>::
-
- This form is to view the differences between the raw
- contents of two blob objects.
-
'git diff' [--options] <commit>..<commit> [--] [<path>...]::
This is synonymous to the previous form. If <commit> on
@@ -87,6 +82,11 @@ and the range notations ("<commit>..<commit>" and
"<commit>\...<commit>") do not mean a range as defined in the
"SPECIFYING RANGES" section in linkgit:gitrevisions[7].
+'git diff' [options] <blob> <blob>::
+
+ This form is to view the differences between the raw
+ contents of two blob objects.
+
OPTIONS
-------
:git-diff: 1
diff --git a/Documentation/git-log.txt b/Documentation/git-log.txt
index 4687fe8192..2ea79ba168 100644
--- a/Documentation/git-log.txt
+++ b/Documentation/git-log.txt
@@ -128,9 +128,9 @@ Examples
in the "release" branch, along with the list of paths
each commit modifies.
-`git log --follow builtin-rev-list.c`::
+`git log --follow builtin/rev-list.c`::
- Shows the commits that changed builtin-rev-list.c, including
+ Shows the commits that changed builtin/rev-list.c, including
those commits that occurred before the file was given its
present name.
diff --git a/Documentation/git-merge.txt b/Documentation/git-merge.txt
index 67ca99cd92..8c7f2f66d8 100644
--- a/Documentation/git-merge.txt
+++ b/Documentation/git-merge.txt
@@ -56,8 +56,8 @@ especially if those changes were further modified after the merge
was started), 'git merge --abort' will in some cases be unable to
reconstruct the original (pre-merge) changes. Therefore:
-*Warning*: Running 'git merge' with uncommitted changes is
-discouraged: while possible, it leaves you in a state that is hard to
+*Warning*: Running 'git merge' with non-trivial uncommitted changes is
+discouraged: while possible, it may leave you in a state that is hard to
back out of in the case of a conflict.
diff --git a/Documentation/git-name-rev.txt b/Documentation/git-name-rev.txt
index ad1d1468c9..6b0f1ba75f 100644
--- a/Documentation/git-name-rev.txt
+++ b/Documentation/git-name-rev.txt
@@ -25,7 +25,8 @@ OPTIONS
Do not use branch names, but only tags to name the commits
--refs=<pattern>::
- Only use refs whose names match a given shell pattern.
+ Only use refs whose names match a given shell pattern. The pattern
+ can be one of branch name, tag name or fully qualified ref name.
--all::
List all commits reachable from all refs
diff --git a/Documentation/git-push.txt b/Documentation/git-push.txt
index df5be268ba..f7dfe48d28 100644
--- a/Documentation/git-push.txt
+++ b/Documentation/git-push.txt
@@ -136,6 +136,15 @@ already exists on the remote side.
not an ancestor of the local ref used to overwrite it.
This flag disables the check. This can cause the
remote repository to lose commits; use it with care.
+ Note that `--force` applies to all the refs that are pushed,
+ hence using it with `push.default` set to `matching` or with
+ multiple push destinations configured with `remote.*.push`
+ may overwrite refs other than the current branch (including
+ local refs that are strictly behind their remote counterpart).
+ To force a push to only one branch, use a `+` in front of the
+ refspec to push (e.g `git push origin +master` to force a push
+ to the `master` branch). See the `<refspec>...` section above
+ for details.
--repo=<repository>::
This option is only relevant if no <repository> argument is
diff --git a/Documentation/git-rev-parse.txt b/Documentation/git-rev-parse.txt
index 947d62fd25..993903c9f1 100644
--- a/Documentation/git-rev-parse.txt
+++ b/Documentation/git-rev-parse.txt
@@ -59,6 +59,22 @@ OPTIONS
If there is no parameter given by the user, use `<arg>`
instead.
+--prefix <arg>::
+ Behave as if 'git rev-parse' was invoked from the `<arg>`
+ subdirectory of the working tree. Any relative filenames are
+ resolved as if they are prefixed by `<arg>` and will be printed
+ in that form.
++
+This can be used to convert arguments to a command run in a subdirectory
+so that they can still be used after moving to the top-level of the
+repository. For example:
++
+----
+prefix=$(git rev-parse --show-prefix)
+cd "$(git rev-parse --show-toplevel)"
+eval "set -- $(git rev-parse --sq --prefix "$prefix" "$@")"
+----
+
--verify::
Verify that exactly one parameter is provided, and that it
can be turned into a raw 20-byte SHA-1 that can be used to
diff --git a/Documentation/git-stash.txt b/Documentation/git-stash.txt
index 711ffe17a7..db7e803038 100644
--- a/Documentation/git-stash.txt
+++ b/Documentation/git-stash.txt
@@ -13,10 +13,11 @@ SYNOPSIS
'git stash' drop [-q|--quiet] [<stash>]
'git stash' ( pop | apply ) [--index] [-q|--quiet] [<stash>]
'git stash' branch <branchname> [<stash>]
-'git stash' [save [--patch] [-k|--[no-]keep-index] [-q|--quiet]
+'git stash' [save [-p|--patch] [-k|--[no-]keep-index] [-q|--quiet]
[-u|--include-untracked] [-a|--all] [<message>]]
'git stash' clear
-'git stash' create
+'git stash' create [<message>]
+'git stash' store [-m|--message <message>] [-q|--quiet] <commit>
DESCRIPTION
-----------
@@ -151,7 +152,15 @@ create::
Create a stash (which is a regular commit object) and return its
object name, without storing it anywhere in the ref namespace.
+ This is intended to be useful for scripts. It is probably not
+ the command you want to use; see "save" above.
+store::
+
+ Store a given stash created via 'git stash create' (which is a
+ dangling merge commit) in the stash ref, updating the stash
+ reflog. This is intended to be useful for scripts. It is
+ probably not the command you want to use; see "save" above.
DISCUSSION
----------
diff --git a/Documentation/git-web--browse.txt b/Documentation/git-web--browse.txt
index ba79cb4f35..5aec4ecffb 100644
--- a/Documentation/git-web--browse.txt
+++ b/Documentation/git-web--browse.txt
@@ -34,6 +34,7 @@ The following browsers (or commands) are currently supported:
* dillo
* open (this is the default under Mac OS X GUI)
* start (this is the default under MinGW)
+* cygstart (this is the default under Cygwin)
Custom commands may also be specified.
diff --git a/Documentation/git.txt b/Documentation/git.txt
index 894454609f..b738a40e6b 100644
--- a/Documentation/git.txt
+++ b/Documentation/git.txt
@@ -43,9 +43,10 @@ unreleased) version of Git, that is available from 'master'
branch of the `git.git` repository.
Documentation for older releases are available here:
-* link:v1.8.3.1/git.html[documentation for release 1.8.3.1]
+* link:v1.8.3.2/git.html[documentation for release 1.8.3.2]
* release notes for
+ link:RelNotes/1.8.3.2.txt[1.8.3.2],
link:RelNotes/1.8.3.1.txt[1.8.3.1],
link:RelNotes/1.8.3.txt[1.8.3].
diff --git a/Documentation/rev-list-options.txt b/Documentation/rev-list-options.txt
index b462f17f62..e157ec3fe7 100644
--- a/Documentation/rev-list-options.txt
+++ b/Documentation/rev-list-options.txt
@@ -625,6 +625,10 @@ By default, the commits are shown in reverse chronological order.
Show no parents before all of its children are shown, but
otherwise show commits in the commit timestamp order.
+--author-date-order::
+ Show no parents before all of its children are shown, but
+ otherwise show commits in the author timestamp order.
+
--topo-order::
Show no parents before all of its children are shown, and
avoid showing commits on multiple lines of history
diff --git a/Documentation/technical/api-builtin.txt b/Documentation/technical/api-builtin.txt
index 4a4228b896..f3c1357b7c 100644
--- a/Documentation/technical/api-builtin.txt
+++ b/Documentation/technical/api-builtin.txt
@@ -39,7 +39,7 @@ where options is the bitwise-or of:
on bare repositories.
This only makes sense when `RUN_SETUP` is also set.
-. Add `builtin-foo.o` to `BUILTIN_OBJS` in `Makefile`.
+. Add `builtin/foo.o` to `BUILTIN_OBJS` in `Makefile`.
Additionally, if `foo` is a new command, there are 3 more things to do:
diff --git a/Documentation/technical/api-parse-options.txt b/Documentation/technical/api-parse-options.txt
index 1317db4d6c..0be2b5159f 100644
--- a/Documentation/technical/api-parse-options.txt
+++ b/Documentation/technical/api-parse-options.txt
@@ -275,10 +275,10 @@ Examples
--------
See `test-parse-options.c` and
-`builtin-add.c`,
-`builtin-clone.c`,
-`builtin-commit.c`,
-`builtin-fetch.c`,
-`builtin-fsck.c`,
-`builtin-rm.c`
+`builtin/add.c`,
+`builtin/clone.c`,
+`builtin/commit.c`,
+`builtin/fetch.c`,
+`builtin/fsck.c`,
+`builtin/rm.c`
for real-world examples.
diff --git a/Documentation/user-manual.txt b/Documentation/user-manual.txt
index a13de937c4..1bcf11b397 100644
--- a/Documentation/user-manual.txt
+++ b/Documentation/user-manual.txt
@@ -57,10 +57,10 @@ download a copy of an existing repository. If you don't already have a
project in mind, here are some interesting examples:
------------------------------------------------
- # Git itself (approx. 10MB download):
+ # Git itself (approx. 40MB download):
$ git clone git://git.kernel.org/pub/scm/git/git.git
- # the Linux kernel (approx. 150MB download):
-$ git clone git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux-2.6.git
+ # the Linux kernel (approx. 640MB download):
+$ git clone git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
------------------------------------------------
The initial clone may be time-consuming for a large project, but you
@@ -4262,15 +4262,16 @@ no longer need to call `setup_pager()` directly).
Nowadays, `git log` is a builtin, which means that it is _contained_ in the
command `git`. The source side of a builtin is
-- a function called `cmd_<bla>`, typically defined in `builtin-<bla>.c`,
- and declared in `builtin.h`,
+- a function called `cmd_<bla>`, typically defined in `builtin/<bla.c>`
+ (note that older versions of Git used to have it in `builtin-<bla>.c`
+ instead), and declared in `builtin.h`.
- an entry in the `commands[]` array in `git.c`, and
- an entry in `BUILTIN_OBJECTS` in the `Makefile`.
Sometimes, more than one builtin is contained in one source file. For
-example, `cmd_whatchanged()` and `cmd_log()` both reside in `builtin-log.c`,
+example, `cmd_whatchanged()` and `cmd_log()` both reside in `builtin/log.c`,
since they share quite a bit of code. In that case, the commands which are
_not_ named like the `.c` file in which they live have to be listed in
`BUILT_INS` in the `Makefile`.
@@ -4293,10 +4294,10 @@ For the sake of clarity, let's stay with `git cat-file`, because it
- is plumbing, and
- was around even in the initial commit (it literally went only through
- some 20 revisions as `cat-file.c`, was renamed to `builtin-cat-file.c`
+ some 20 revisions as `cat-file.c`, was renamed to `builtin/cat-file.c`
when made a builtin, and then saw less than 10 versions).
-So, look into `builtin-cat-file.c`, search for `cmd_cat_file()` and look what
+So, look into `builtin/cat-file.c`, search for `cmd_cat_file()` and look what
it does.
------------------------------------------------------------------
@@ -4372,7 +4373,7 @@ Another example: Find out what to do in order to make some script a
builtin:
-------------------------------------------------
-$ git log --no-merges --diff-filter=A builtin-*.c
+$ git log --no-merges --diff-filter=A builtin/*.c
-------------------------------------------------
You see, Git is actually the best tool to find out about the source of Git
diff --git a/GIT-VERSION-GEN b/GIT-VERSION-GEN
index 390782fa12..b4d4e5045f 100755
--- a/GIT-VERSION-GEN
+++ b/GIT-VERSION-GEN
@@ -11,7 +11,7 @@ LF='
if test -f version
then
VN=$(cat version) || VN="$DEF_VER"
-elif test -d .git -o -f .git &&
+elif test -d ${GIT_DIR:-.git} -o -f .git &&
VN=$(git describe --match "v[0-9]*" --abbrev=7 HEAD 2>/dev/null) &&
case "$VN" in
*$LF*) (exit 1) ;;
diff --git a/Makefile b/Makefile
index 79f961ee4b..5a68fe5431 100644
--- a/Makefile
+++ b/Makefile
@@ -569,6 +569,7 @@ TEST_PROGRAMS_NEED_X += test-mergesort
TEST_PROGRAMS_NEED_X += test-mktemp
TEST_PROGRAMS_NEED_X += test-parse-options
TEST_PROGRAMS_NEED_X += test-path-utils
+TEST_PROGRAMS_NEED_X += test-prio-queue
TEST_PROGRAMS_NEED_X += test-read-cache
TEST_PROGRAMS_NEED_X += test-regex
TEST_PROGRAMS_NEED_X += test-revision-walking
@@ -696,6 +697,7 @@ LIB_H += merge-recursive.h
LIB_H += mergesort.h
LIB_H += notes-cache.h
LIB_H += notes-merge.h
+LIB_H += notes-utils.h
LIB_H += notes.h
LIB_H += object.h
LIB_H += pack-revindex.h
@@ -704,6 +706,7 @@ LIB_H += parse-options.h
LIB_H += patch-ids.h
LIB_H += pathspec.h
LIB_H += pkt-line.h
+LIB_H += prio-queue.h
LIB_H += progress.h
LIB_H += prompt.h
LIB_H += quote.h
@@ -830,6 +833,7 @@ LIB_OBJS += name-hash.o
LIB_OBJS += notes.o
LIB_OBJS += notes-cache.o
LIB_OBJS += notes-merge.o
+LIB_OBJS += notes-utils.o
LIB_OBJS += object.o
LIB_OBJS += pack-check.o
LIB_OBJS += pack-revindex.o
@@ -844,6 +848,7 @@ LIB_OBJS += pathspec.o
LIB_OBJS += pkt-line.o
LIB_OBJS += preload-index.o
LIB_OBJS += pretty.o
+LIB_OBJS += prio-queue.o
LIB_OBJS += progress.o
LIB_OBJS += prompt.o
LIB_OBJS += quote.o
diff --git a/advice.c b/advice.c
index 54315cbd0a..2a52098a29 100644
--- a/advice.c
+++ b/advice.c
@@ -15,6 +15,7 @@ int advice_implicit_identity = 1;
int advice_detached_head = 1;
int advice_set_upstream_failure = 1;
int advice_object_name_warning = 1;
+int advice_rm_hints = 1;
static struct {
const char *name;
@@ -35,6 +36,7 @@ static struct {
{ "detachedhead", &advice_detached_head },
{ "setupstreamfailure", &advice_set_upstream_failure },
{ "object_name_warning", &advice_object_name_warning },
+ { "rmhints", &advice_rm_hints },
/* make this an alias for backward compatibility */
{ "pushnonfastforward", &advice_push_update_rejected }
diff --git a/advice.h b/advice.h
index fefe39ac5c..93a7d110ea 100644
--- a/advice.h
+++ b/advice.h
@@ -18,6 +18,7 @@ extern int advice_implicit_identity;
extern int advice_detached_head;
extern int advice_set_upstream_failure;
extern int advice_object_name_warning;
+extern int advice_rm_hints;
int git_default_advice_config(const char *var, const char *value);
void advise(const char *advice, ...);
diff --git a/builtin.h b/builtin.h
index 64bab6bf54..1ed8edb0cb 100644
--- a/builtin.h
+++ b/builtin.h
@@ -5,7 +5,6 @@
#include "strbuf.h"
#include "cache.h"
#include "commit.h"
-#include "notes.h"
#define DEFAULT_MERGE_LOG_LEN 20
@@ -26,21 +25,6 @@ struct fmt_merge_msg_opts {
extern int fmt_merge_msg(struct strbuf *in, struct strbuf *out,
struct fmt_merge_msg_opts *);
-struct notes_rewrite_cfg {
- struct notes_tree **trees;
- const char *cmd;
- int enabled;
- combine_notes_fn combine;
- struct string_list *refs;
- int refs_from_env;
- int mode_from_env;
-};
-
-struct notes_rewrite_cfg *init_copy_notes_for_rewrite(const char *cmd);
-int copy_note_for_rewrite(struct notes_rewrite_cfg *c,
- const unsigned char *from_obj, const unsigned char *to_obj);
-void finish_copy_notes_for_rewrite(struct notes_rewrite_cfg *c);
-
extern int textconv_object(const char *path, unsigned mode, const unsigned char *sha1, int sha1_valid, char **buf, unsigned long *buf_size);
extern int cmd_add(int argc, const char **argv, const char *prefix);
diff --git a/builtin/apply.c b/builtin/apply.c
index faf8e30883..0e9b631db6 100644
--- a/builtin/apply.c
+++ b/builtin/apply.c
@@ -906,7 +906,7 @@ static void parse_traditional_patch(const char *first, const char *second, struc
patch->old_name = name;
} else {
patch->old_name = name;
- patch->new_name = xstrdup(name);
+ patch->new_name = null_strdup(name);
}
}
if (!name)
diff --git a/builtin/checkout.c b/builtin/checkout.c
index f5b50e520f..3be0018821 100644
--- a/builtin/checkout.c
+++ b/builtin/checkout.c
@@ -838,13 +838,16 @@ static int check_tracking_name(struct remote *remote, void *cb_data)
memset(&query, 0, sizeof(struct refspec));
query.src = cb->src_ref;
if (remote_find_tracking(remote, &query) ||
- get_sha1(query.dst, cb->dst_sha1))
+ get_sha1(query.dst, cb->dst_sha1)) {
+ free(query.dst);
return 0;
+ }
if (cb->dst_ref) {
+ free(query.dst);
cb->unique = 0;
return 0;
}
- cb->dst_ref = xstrdup(query.dst);
+ cb->dst_ref = query.dst;
return 0;
}
diff --git a/builtin/clone.c b/builtin/clone.c
index 66bff5700f..14b1323568 100644
--- a/builtin/clone.c
+++ b/builtin/clone.c
@@ -493,13 +493,16 @@ static void write_remote_refs(const struct ref *local_refs)
{
const struct ref *r;
+ lock_packed_refs(LOCK_DIE_ON_ERROR);
+
for (r = local_refs; r; r = r->next) {
if (!r->peer_ref)
continue;
add_packed_ref(r->peer_ref->name, r->old_sha1);
}
- pack_refs(PACK_REFS_ALL);
+ if (commit_packed_refs())
+ die_errno("unable to overwrite old ref-pack file");
}
static void write_followtags(const struct ref *refs, const char *msg)
diff --git a/builtin/commit.c b/builtin/commit.c
index 1621dfcd40..6b693c16d8 100644
--- a/builtin/commit.c
+++ b/builtin/commit.c
@@ -29,6 +29,7 @@
#include "gpg-interface.h"
#include "column.h"
#include "sequencer.h"
+#include "notes-utils.h"
static const char * const builtin_commit_usage[] = {
N_("git commit [options] [--] <pathspec>..."),
@@ -1593,7 +1594,7 @@ int cmd_commit(int argc, const char **argv, const char *prefix)
if (cfg) {
/* we are amending, so current_head is not NULL */
copy_note_for_rewrite(cfg, current_head->object.sha1, sha1);
- finish_copy_notes_for_rewrite(cfg);
+ finish_copy_notes_for_rewrite(cfg, "Notes added by 'git commit --amend'");
}
run_rewrite_hook(current_head->object.sha1, sha1);
}
diff --git a/builtin/help.c b/builtin/help.c
index 062957f629..f1e236b912 100644
--- a/builtin/help.c
+++ b/builtin/help.c
@@ -1,6 +1,4 @@
/*
- * builtin-help.c
- *
* Builtin help command
*/
#include "cache.h"
diff --git a/builtin/log.c b/builtin/log.c
index 9e2123295f..e3222ed9f9 100644
--- a/builtin/log.c
+++ b/builtin/log.c
@@ -237,7 +237,7 @@ static void log_show_early(struct rev_info *revs, struct commit_list *list)
int i = revs->early_output;
int show_header = 1;
- sort_in_topological_order(&list, revs->lifo);
+ sort_in_topological_order(&list, revs->sort_order);
while (list && i) {
struct commit *commit = list->item;
switch (simplify_commit(revs, commit)) {
diff --git a/builtin/ls-files.c b/builtin/ls-files.c
index 87f3b331ca..3a410c35d9 100644
--- a/builtin/ls-files.c
+++ b/builtin/ls-files.c
@@ -165,11 +165,13 @@ static void show_ce_entry(const char *tag, struct cache_entry *ce)
}
write_name(ce->name, ce_namelen(ce));
if (debug_mode) {
- printf(" ctime: %d:%d\n", ce->ce_ctime.sec, ce->ce_ctime.nsec);
- printf(" mtime: %d:%d\n", ce->ce_mtime.sec, ce->ce_mtime.nsec);
- printf(" dev: %d\tino: %d\n", ce->ce_dev, ce->ce_ino);
- printf(" uid: %d\tgid: %d\n", ce->ce_uid, ce->ce_gid);
- printf(" size: %d\tflags: %x\n", ce->ce_size, ce->ce_flags);
+ struct stat_data *sd = &ce->ce_stat_data;
+
+ printf(" ctime: %d:%d\n", sd->sd_ctime.sec, sd->sd_ctime.nsec);
+ printf(" mtime: %d:%d\n", sd->sd_mtime.sec, sd->sd_mtime.nsec);
+ printf(" dev: %d\tino: %d\n", sd->sd_dev, sd->sd_ino);
+ printf(" uid: %d\tgid: %d\n", sd->sd_uid, sd->sd_gid);
+ printf(" size: %d\tflags: %x\n", sd->sd_size, ce->ce_flags);
}
}
diff --git a/builtin/name-rev.c b/builtin/name-rev.c
index 6238247974..87d485496f 100644
--- a/builtin/name-rev.c
+++ b/builtin/name-rev.c
@@ -82,6 +82,20 @@ copy_data:
}
}
+static int subpath_matches(const char *path, const char *filter)
+{
+ const char *subpath = path;
+
+ while (subpath) {
+ if (!fnmatch(filter, subpath, 0))
+ return subpath - path;
+ subpath = strchr(subpath, '/');
+ if (subpath)
+ subpath++;
+ }
+ return -1;
+}
+
struct name_ref_data {
int tags_only;
int name_only;
@@ -92,13 +106,23 @@ static int name_ref(const char *path, const unsigned char *sha1, int flags, void
{
struct object *o = parse_object(sha1);
struct name_ref_data *data = cb_data;
+ int can_abbreviate_output = data->tags_only && data->name_only;
int deref = 0;
if (data->tags_only && prefixcmp(path, "refs/tags/"))
return 0;
- if (data->ref_filter && fnmatch(data->ref_filter, path, 0))
- return 0;
+ if (data->ref_filter) {
+ switch (subpath_matches(path, data->ref_filter)) {
+ case -1: /* did not match */
+ return 0;
+ case 0: /* matched fully */
+ break;
+ default: /* matched subpath */
+ can_abbreviate_output = 1;
+ break;
+ }
+ }
while (o && o->type == OBJ_TAG) {
struct tag *t = (struct tag *) o;
@@ -110,12 +134,10 @@ static int name_ref(const char *path, const unsigned char *sha1, int flags, void
if (o && o->type == OBJ_COMMIT) {
struct commit *commit = (struct commit *)o;
- if (!prefixcmp(path, "refs/heads/"))
+ if (can_abbreviate_output)
+ path = shorten_unambiguous_ref(path, 0);
+ else if (!prefixcmp(path, "refs/heads/"))
path = path + 11;
- else if (data->tags_only
- && data->name_only
- && !prefixcmp(path, "refs/tags/"))
- path = path + 10;
else if (!prefixcmp(path, "refs/"))
path = path + 5;
diff --git a/builtin/notes.c b/builtin/notes.c
index 57748a6fb6..e4100c4982 100644
--- a/builtin/notes.c
+++ b/builtin/notes.c
@@ -4,7 +4,7 @@
* Copyright (c) 2010 Johan Herland <johan@herland.net>
*
* Based on git-notes.sh by Johannes Schindelin,
- * and builtin-tag.c by Kristian Høgsberg and Carlos Rica.
+ * and builtin/tag.c by Kristian Høgsberg and Carlos Rica.
*/
#include "cache.h"
@@ -18,9 +18,7 @@
#include "parse-options.h"
#include "string-list.h"
#include "notes-merge.h"
-
-static void commit_notes(struct notes_tree *t, const char *msg);
-static combine_notes_fn parse_combine_notes_fn(const char *v);
+#include "notes-utils.h"
static const char * const git_notes_usage[] = {
N_("git notes [--ref <notes_ref>] [list [<object>]]"),
@@ -287,139 +285,13 @@ static int parse_reedit_arg(const struct option *opt, const char *arg, int unset
return parse_reuse_arg(opt, arg, unset);
}
-static void commit_notes(struct notes_tree *t, const char *msg)
-{
- struct strbuf buf = STRBUF_INIT;
- unsigned char commit_sha1[20];
-
- if (!t)
- t = &default_notes_tree;
- if (!t->initialized || !t->ref || !*t->ref)
- die(_("Cannot commit uninitialized/unreferenced notes tree"));
- if (!t->dirty)
- return; /* don't have to commit an unchanged tree */
-
- /* Prepare commit message and reflog message */
- strbuf_addstr(&buf, msg);
- if (buf.buf[buf.len - 1] != '\n')
- strbuf_addch(&buf, '\n'); /* Make sure msg ends with newline */
-
- create_notes_commit(t, NULL, &buf, commit_sha1);
- strbuf_insert(&buf, 0, "notes: ", 7); /* commit message starts at index 7 */
- update_ref(buf.buf, t->ref, commit_sha1, NULL, 0, DIE_ON_ERR);
-
- strbuf_release(&buf);
-}
-
-static combine_notes_fn parse_combine_notes_fn(const char *v)
-{
- if (!strcasecmp(v, "overwrite"))
- return combine_notes_overwrite;
- else if (!strcasecmp(v, "ignore"))
- return combine_notes_ignore;
- else if (!strcasecmp(v, "concatenate"))
- return combine_notes_concatenate;
- else if (!strcasecmp(v, "cat_sort_uniq"))
- return combine_notes_cat_sort_uniq;
- else
- return NULL;
-}
-
-static int notes_rewrite_config(const char *k, const char *v, void *cb)
-{
- struct notes_rewrite_cfg *c = cb;
- if (!prefixcmp(k, "notes.rewrite.") && !strcmp(k+14, c->cmd)) {
- c->enabled = git_config_bool(k, v);
- return 0;
- } else if (!c->mode_from_env && !strcmp(k, "notes.rewritemode")) {
- if (!v)
- config_error_nonbool(k);
- c->combine = parse_combine_notes_fn(v);
- if (!c->combine) {
- error(_("Bad notes.rewriteMode value: '%s'"), v);
- return 1;
- }
- return 0;
- } else if (!c->refs_from_env && !strcmp(k, "notes.rewriteref")) {
- /* note that a refs/ prefix is implied in the
- * underlying for_each_glob_ref */
- if (!prefixcmp(v, "refs/notes/"))
- string_list_add_refs_by_glob(c->refs, v);
- else
- warning(_("Refusing to rewrite notes in %s"
- " (outside of refs/notes/)"), v);
- return 0;
- }
-
- return 0;
-}
-
-
-struct notes_rewrite_cfg *init_copy_notes_for_rewrite(const char *cmd)
-{
- struct notes_rewrite_cfg *c = xmalloc(sizeof(struct notes_rewrite_cfg));
- const char *rewrite_mode_env = getenv(GIT_NOTES_REWRITE_MODE_ENVIRONMENT);
- const char *rewrite_refs_env = getenv(GIT_NOTES_REWRITE_REF_ENVIRONMENT);
- c->cmd = cmd;
- c->enabled = 1;
- c->combine = combine_notes_concatenate;
- c->refs = xcalloc(1, sizeof(struct string_list));
- c->refs->strdup_strings = 1;
- c->refs_from_env = 0;
- c->mode_from_env = 0;
- if (rewrite_mode_env) {
- c->mode_from_env = 1;
- c->combine = parse_combine_notes_fn(rewrite_mode_env);
- if (!c->combine)
- /* TRANSLATORS: The first %s is the name of the
- environment variable, the second %s is its value */
- error(_("Bad %s value: '%s'"), GIT_NOTES_REWRITE_MODE_ENVIRONMENT,
- rewrite_mode_env);
- }
- if (rewrite_refs_env) {
- c->refs_from_env = 1;
- string_list_add_refs_from_colon_sep(c->refs, rewrite_refs_env);
- }
- git_config(notes_rewrite_config, c);
- if (!c->enabled || !c->refs->nr) {
- string_list_clear(c->refs, 0);
- free(c->refs);
- free(c);
- return NULL;
- }
- c->trees = load_notes_trees(c->refs);
- string_list_clear(c->refs, 0);
- free(c->refs);
- return c;
-}
-
-int copy_note_for_rewrite(struct notes_rewrite_cfg *c,
- const unsigned char *from_obj, const unsigned char *to_obj)
-{
- int ret = 0;
- int i;
- for (i = 0; c->trees[i]; i++)
- ret = copy_note(c->trees[i], from_obj, to_obj, 1, c->combine) || ret;
- return ret;
-}
-
-void finish_copy_notes_for_rewrite(struct notes_rewrite_cfg *c)
-{
- int i;
- for (i = 0; c->trees[i]; i++) {
- commit_notes(c->trees[i], "Notes added by 'git notes copy'");
- free_notes(c->trees[i]);
- }
- free(c->trees);
- free(c);
-}
-
static int notes_copy_from_stdin(int force, const char *rewrite_cmd)
{
struct strbuf buf = STRBUF_INIT;
struct notes_rewrite_cfg *c = NULL;
struct notes_tree *t = NULL;
int ret = 0;
+ const char *msg = "Notes added by 'git notes copy'";
if (rewrite_cmd) {
c = init_copy_notes_for_rewrite(rewrite_cmd);
@@ -461,10 +333,10 @@ static int notes_copy_from_stdin(int force, const char *rewrite_cmd)
}
if (!rewrite_cmd) {
- commit_notes(t, "Notes added by 'git notes copy'");
+ commit_notes(t, msg);
free_notes(t);
} else {
- finish_copy_notes_for_rewrite(c);
+ finish_copy_notes_for_rewrite(c, msg);
}
return ret;
}
diff --git a/builtin/replace.c b/builtin/replace.c
index 398ccd5eaa..59d31152d0 100644
--- a/builtin/replace.c
+++ b/builtin/replace.c
@@ -3,7 +3,7 @@
*
* Copyright (c) 2008 Christian Couder <chriscool@tuxfamily.org>
*
- * Based on builtin-tag.c by Kristian Høgsberg <krh@redhat.com>
+ * Based on builtin/tag.c by Kristian Høgsberg <krh@redhat.com>
* and Carlos Rica <jasampler@gmail.com> that was itself based on
* git-tag.sh and mktag.c by Linus Torvalds.
*/
diff --git a/builtin/rev-parse.c b/builtin/rev-parse.c
index f267a1d3b5..de894c7577 100644
--- a/builtin/rev-parse.c
+++ b/builtin/rev-parse.c
@@ -212,11 +212,17 @@ static void show_datestring(const char *flag, const char *datestr)
show(buffer);
}
-static int show_file(const char *arg)
+static int show_file(const char *arg, int output_prefix)
{
show_default();
if ((filter & (DO_NONFLAGS|DO_NOREV)) == (DO_NONFLAGS|DO_NOREV)) {
- show(arg);
+ if (output_prefix) {
+ const char *prefix = startup_info->prefix;
+ show(prefix_filename(prefix,
+ prefix ? strlen(prefix) : 0,
+ arg));
+ } else
+ show(arg);
return 1;
}
return 0;
@@ -470,6 +476,7 @@ N_("git rev-parse --parseopt [options] -- [<args>...]\n"
int cmd_rev_parse(int argc, const char **argv, const char *prefix)
{
int i, as_is = 0, verify = 0, quiet = 0, revs_count = 0, type = 0;
+ int output_prefix = 0;
unsigned char sha1[20];
const char *name = NULL;
@@ -503,7 +510,7 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
const char *arg = argv[i];
if (as_is) {
- if (show_file(arg) && as_is < 2)
+ if (show_file(arg, output_prefix) && as_is < 2)
verify_filename(prefix, arg, 0);
continue;
}
@@ -527,7 +534,7 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
as_is = 2;
/* Pass on the "--" if we show anything but files.. */
if (filter & (DO_FLAGS | DO_REVS))
- show_file(arg);
+ show_file(arg, 0);
continue;
}
if (!strcmp(arg, "--default")) {
@@ -535,6 +542,13 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
i++;
continue;
}
+ if (!strcmp(arg, "--prefix")) {
+ prefix = argv[i+1];
+ startup_info->prefix = prefix;
+ output_prefix = 1;
+ i++;
+ continue;
+ }
if (!strcmp(arg, "--revs-only")) {
filter &= ~DO_NOREV;
continue;
@@ -754,7 +768,7 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
if (verify)
die_no_single_rev(quiet);
as_is = 1;
- if (!show_file(arg))
+ if (!show_file(arg, output_prefix))
continue;
verify_filename(prefix, arg, 1);
}
diff --git a/builtin/rm.c b/builtin/rm.c
index 7b91d52f39..06025a2e75 100644
--- a/builtin/rm.c
+++ b/builtin/rm.c
@@ -9,6 +9,7 @@
#include "cache-tree.h"
#include "tree-walk.h"
#include "parse-options.h"
+#include "string-list.h"
#include "submodule.h"
static const char * const builtin_rm_usage[] = {
@@ -36,10 +37,32 @@ static int get_ours_cache_pos(const char *path, int pos)
return -1;
}
+static void print_error_files(struct string_list *files_list,
+ const char *main_msg,
+ const char *hints_msg,
+ int *errs)
+{
+ if (files_list->nr) {
+ int i;
+ struct strbuf err_msg = STRBUF_INIT;
+
+ strbuf_addstr(&err_msg, main_msg);
+ for (i = 0; i < files_list->nr; i++)
+ strbuf_addf(&err_msg,
+ "\n %s",
+ files_list->items[i].string);
+ if (advice_rm_hints)
+ strbuf_addstr(&err_msg, hints_msg);
+ *errs = error("%s", err_msg.buf);
+ strbuf_release(&err_msg);
+ }
+}
+
static int check_submodules_use_gitfiles(void)
{
int i;
int errs = 0;
+ struct string_list files = STRING_LIST_INIT_NODUP;
for (i = 0; i < list.nr; i++) {
const char *name = list.entry[i].name;
@@ -61,11 +84,18 @@ static int check_submodules_use_gitfiles(void)
continue;
if (!submodule_uses_gitfile(name))
- errs = error(_("submodule '%s' (or one of its nested "
- "submodules) uses a .git directory\n"
- "(use 'rm -rf' if you really want to remove "
- "it including all of its history)"), name);
+ string_list_append(&files, name);
}
+ print_error_files(&files,
+ Q_("the following submodule (or one of its nested "
+ "submodules)\n uses a .git directory:",
+ "the following submodules (or one of its nested "
+ "submodules)\n use a .git directory:",
+ files.nr),
+ _("\n(use 'rm -rf' if you really want to remove "
+ "it including all of its history)"),
+ &errs);
+ string_list_clear(&files, 0);
return errs;
}
@@ -81,6 +111,10 @@ static int check_local_mod(unsigned char *head, int index_only)
*/
int i, no_head;
int errs = 0;
+ struct string_list files_staged = STRING_LIST_INIT_NODUP;
+ struct string_list files_cached = STRING_LIST_INIT_NODUP;
+ struct string_list files_submodule = STRING_LIST_INIT_NODUP;
+ struct string_list files_local = STRING_LIST_INIT_NODUP;
no_head = is_null_sha1(head);
for (i = 0; i < list.nr; i++) {
@@ -171,29 +205,58 @@ static int check_local_mod(unsigned char *head, int index_only)
*/
if (local_changes && staged_changes) {
if (!index_only || !(ce->ce_flags & CE_INTENT_TO_ADD))
- errs = error(_("'%s' has staged content different "
- "from both the file and the HEAD\n"
- "(use -f to force removal)"), name);
+ string_list_append(&files_staged, name);
}
else if (!index_only) {
if (staged_changes)
- errs = error(_("'%s' has changes staged in the index\n"
- "(use --cached to keep the file, "
- "or -f to force removal)"), name);
+ string_list_append(&files_cached, name);
if (local_changes) {
if (S_ISGITLINK(ce->ce_mode) &&
- !submodule_uses_gitfile(name)) {
- errs = error(_("submodule '%s' (or one of its nested "
- "submodules) uses a .git directory\n"
- "(use 'rm -rf' if you really want to remove "
- "it including all of its history)"), name);
- } else
- errs = error(_("'%s' has local modifications\n"
- "(use --cached to keep the file, "
- "or -f to force removal)"), name);
+ !submodule_uses_gitfile(name))
+ string_list_append(&files_submodule, name);
+ else
+ string_list_append(&files_local, name);
}
}
}
+ print_error_files(&files_staged,
+ Q_("the following file has staged content different "
+ "from both the\nfile and the HEAD:",
+ "the following files have staged content different"
+ " from both the\nfile and the HEAD:",
+ files_staged.nr),
+ _("\n(use -f to force removal)"),
+ &errs);
+ string_list_clear(&files_staged, 0);
+ print_error_files(&files_cached,
+ Q_("the following file has changes "
+ "staged in the index:",
+ "the following files have changes "
+ "staged in the index:", files_cached.nr),
+ _("\n(use --cached to keep the file,"
+ " or -f to force removal)"),
+ &errs);
+ string_list_clear(&files_cached, 0);
+ print_error_files(&files_submodule,
+ Q_("the following submodule (or one of its nested "
+ "submodule)\nuses a .git directory:",
+ "the following submodules (or one of its nested "
+ "submodule)\nuse a .git directory:",
+ files_submodule.nr),
+ _("\n(use 'rm -rf' if you really "
+ "want to remove it including all "
+ "of its history)"),
+ &errs);
+ string_list_clear(&files_submodule, 0);
+ print_error_files(&files_local,
+ Q_("the following file has local modifications:",
+ "the following files have local modifications:",
+ files_local.nr),
+ _("\n(use --cached to keep the file,"
+ " or -f to force removal)"),
+ &errs);
+ string_list_clear(&files_local, 0);
+
return errs;
}
diff --git a/builtin/show-branch.c b/builtin/show-branch.c
index 90fc6b1b9d..99ec4af224 100644
--- a/builtin/show-branch.c
+++ b/builtin/show-branch.c
@@ -630,7 +630,7 @@ int cmd_show_branch(int ac, const char **av, const char *prefix)
int num_rev, i, extra = 0;
int all_heads = 0, all_remotes = 0;
int all_mask, all_revs;
- int lifo = 1;
+ enum rev_sort_order sort_order = REV_SORT_IN_GRAPH_ORDER;
char head[128];
const char *head_p;
int head_len;
@@ -665,15 +665,17 @@ int cmd_show_branch(int ac, const char **av, const char *prefix)
N_("show possible merge bases")),
OPT_BOOLEAN(0, "independent", &independent,
N_("show refs unreachable from any other ref")),
- OPT_BOOLEAN(0, "topo-order", &lifo,
- N_("show commits in topological order")),
+ OPT_SET_INT(0, "topo-order", &sort_order,
+ N_("show commits in topological order"),
+ REV_SORT_IN_GRAPH_ORDER),
OPT_BOOLEAN(0, "topics", &topics,
N_("show only commits not on the first branch")),
OPT_SET_INT(0, "sparse", &dense,
N_("show merges reachable from only one tip"), 0),
- OPT_SET_INT(0, "date-order", &lifo,
+ OPT_SET_INT(0, "date-order", &sort_order,
N_("show commits where no parent comes before its "
- "children"), 0),
+ "children"),
+ REV_SORT_BY_COMMIT_DATE),
{ OPTION_CALLBACK, 'g', "reflog", &reflog_base, N_("<n>[,<base>]"),
N_("show <n> most recent ref-log entries starting at "
"base"),
@@ -900,7 +902,7 @@ int cmd_show_branch(int ac, const char **av, const char *prefix)
exit(0);
/* Sort topologically */
- sort_in_topological_order(&seen, lifo);
+ sort_in_topological_order(&seen, sort_order);
/* Give names to commits */
if (!sha1_name && !no_name)
diff --git a/cache.h b/cache.h
index ec8240f62a..dd0fb33a15 100644
--- a/cache.h
+++ b/cache.h
@@ -119,15 +119,19 @@ struct cache_time {
unsigned int nsec;
};
+struct stat_data {
+ struct cache_time sd_ctime;
+ struct cache_time sd_mtime;
+ unsigned int sd_dev;
+ unsigned int sd_ino;
+ unsigned int sd_uid;
+ unsigned int sd_gid;
+ unsigned int sd_size;
+};
+
struct cache_entry {
- struct cache_time ce_ctime;
- struct cache_time ce_mtime;
- unsigned int ce_dev;
- unsigned int ce_ino;
+ struct stat_data ce_stat_data;
unsigned int ce_mode;
- unsigned int ce_uid;
- unsigned int ce_gid;
- unsigned int ce_size;
unsigned int ce_flags;
unsigned int ce_namelen;
unsigned char sha1[20];
@@ -511,6 +515,21 @@ extern int limit_pathspec_to_literal(void);
#define HASH_FORMAT_CHECK 2
extern int index_fd(unsigned char *sha1, int fd, struct stat *st, enum object_type type, const char *path, unsigned flags);
extern int index_path(unsigned char *sha1, const char *path, struct stat *st, unsigned flags);
+
+/*
+ * Record to sd the data from st that we use to check whether a file
+ * might have changed.
+ */
+extern void fill_stat_data(struct stat_data *sd, struct stat *st);
+
+/*
+ * Return 0 if st is consistent with a file not having been changed
+ * since sd was filled. If there are differences, return a
+ * combination of MTIME_CHANGED, CTIME_CHANGED, OWNER_CHANGED,
+ * INODE_CHANGED, and DATA_CHANGED.
+ */
+extern int match_stat_data(const struct stat_data *sd, struct stat *st);
+
extern void fill_stat_cache_info(struct cache_entry *ce, struct stat *st);
#define REFRESH_REALLY 0x0001 /* ignore_valid */
@@ -1338,4 +1357,31 @@ int checkout_fast_forward(const unsigned char *from,
int sane_execvp(const char *file, char *const argv[]);
+/*
+ * A struct to encapsulate the concept of whether a file has changed
+ * since we last checked it. This uses criteria similar to those used
+ * for the index.
+ */
+struct stat_validity {
+ struct stat_data *sd;
+};
+
+void stat_validity_clear(struct stat_validity *sv);
+
+/*
+ * Returns 1 if the path is a regular file (or a symlink to a regular
+ * file) and matches the saved stat_validity, 0 otherwise. A missing
+ * or inaccessible file is considered a match if the struct was just
+ * initialized, or if the previous update found an inaccessible file.
+ */
+int stat_validity_check(struct stat_validity *sv, const char *path);
+
+/*
+ * Update the stat_validity from a file opened at descriptor fd. If
+ * the file is missing, inaccessible, or not a regular file, then
+ * future calls to stat_validity_check will match iff one of those
+ * conditions continues to be true.
+ */
+void stat_validity_update(struct stat_validity *sv, int fd);
+
#endif /* CACHE_H */
diff --git a/commit-slab.h b/commit-slab.h
new file mode 100644
index 0000000000..7d481638af
--- /dev/null
+++ b/commit-slab.h
@@ -0,0 +1,98 @@
+#ifndef COMMIT_SLAB_H
+#define COMMIT_SLAB_H
+
+/*
+ * define_commit_slab(slabname, elemtype) creates boilerplate code to define
+ * a new struct (struct slabname) that is used to associate a piece of data
+ * of elemtype to commits, and a few functions to use that struct.
+ *
+ * After including this header file, using:
+ *
+ * define_commit_slab(indegee, int);
+ *
+ * will let you call the following functions:
+ *
+ * - int *indegree_at(struct indegree *, struct commit *);
+ *
+ * This function locates the data associated with the given commit in
+ * the indegree slab, and returns the pointer to it.
+ *
+ * - void init_indegree(struct indegree *);
+ * void init_indegree_with_stride(struct indegree *, int);
+ *
+ * Initializes the indegree slab that associates an array of integers
+ * to each commit. 'stride' specifies how big each array is. The slab
+ * that id initialied by the variant without "_with_stride" associates
+ * each commit with an array of one integer.
+ */
+
+/* allocate ~512kB at once, allowing for malloc overhead */
+#ifndef COMMIT_SLAB_SIZE
+#define COMMIT_SLAB_SIZE (512*1024-32)
+#endif
+
+#define define_commit_slab(slabname, elemtype) \
+ \
+struct slabname { \
+ unsigned slab_size; \
+ unsigned stride; \
+ unsigned slab_count; \
+ elemtype **slab; \
+}; \
+static int stat_ ##slabname## realloc; \
+ \
+static void init_ ##slabname## _with_stride(struct slabname *s, \
+ unsigned stride) \
+{ \
+ unsigned int elem_size; \
+ if (!stride) \
+ stride = 1; \
+ s->stride = stride; \
+ elem_size = sizeof(struct slabname) * stride; \
+ s->slab_size = COMMIT_SLAB_SIZE / elem_size; \
+ s->slab_count = 0; \
+ s->slab = NULL; \
+} \
+ \
+static void init_ ##slabname(struct slabname *s) \
+{ \
+ init_ ##slabname## _with_stride(s, 1); \
+} \
+ \
+static void clear_ ##slabname(struct slabname *s) \
+{ \
+ int i; \
+ for (i = 0; i < s->slab_count; i++) \
+ free(s->slab[i]); \
+ s->slab_count = 0; \
+ free(s->slab); \
+ s->slab = NULL; \
+} \
+ \
+static elemtype *slabname## _at(struct slabname *s, \
+ const struct commit *c) \
+{ \
+ int nth_slab, nth_slot, ix; \
+ \
+ ix = c->index * s->stride; \
+ nth_slab = ix / s->slab_size; \
+ nth_slot = ix % s->slab_size; \
+ \
+ if (s->slab_count <= nth_slab) { \
+ int i; \
+ s->slab = xrealloc(s->slab, \
+ (nth_slab + 1) * sizeof(s->slab)); \
+ stat_ ##slabname## realloc++; \
+ for (i = s->slab_count; i <= nth_slab; i++) \
+ s->slab[i] = NULL; \
+ s->slab_count = nth_slab + 1; \
+ } \
+ if (!s->slab[nth_slab]) \
+ s->slab[nth_slab] = xcalloc(s->slab_size, \
+ sizeof(**s->slab)); \
+ return &s->slab[nth_slab][nth_slot]; \
+} \
+ \
+static int stat_ ##slabname## realloc
+
+#endif /* COMMIT_SLAB_H */
diff --git a/commit.c b/commit.c
index 888e02ae2f..521e49c309 100644
--- a/commit.c
+++ b/commit.c
@@ -8,12 +8,15 @@
#include "notes.h"
#include "gpg-interface.h"
#include "mergesort.h"
+#include "commit-slab.h"
+#include "prio-queue.h"
static struct commit_extra_header *read_commit_extra_header_lines(const char *buf, size_t len, const char **);
int save_commit_buffer = 1;
const char *commit_type = "commit";
+static int commit_count;
static struct commit *check_commit(struct object *obj,
const unsigned char *sha1,
@@ -58,8 +61,11 @@ struct commit *lookup_commit_or_die(const unsigned char *sha1, const char *ref_n
struct commit *lookup_commit(const unsigned char *sha1)
{
struct object *obj = lookup_object(sha1);
- if (!obj)
- return create_object(sha1, OBJ_COMMIT, alloc_commit_node());
+ if (!obj) {
+ struct commit *c = alloc_commit_node();
+ c->index = commit_count++;
+ return create_object(sha1, OBJ_COMMIT, c);
+ }
if (!obj->type)
obj->type = OBJ_COMMIT;
return check_commit(obj, sha1, 0);
@@ -507,32 +513,136 @@ struct commit *pop_commit(struct commit_list **stack)
}
/*
+ * Topological sort support
+ */
+
+/* count number of children that have not been emitted */
+define_commit_slab(indegree_slab, int);
+
+/* record author-date for each commit object */
+define_commit_slab(author_date_slab, unsigned long);
+
+static void record_author_date(struct author_date_slab *author_date,
+ struct commit *commit)
+{
+ const char *buf, *line_end;
+ char *buffer = NULL;
+ struct ident_split ident;
+ char *date_end;
+ unsigned long date;
+
+ if (!commit->buffer) {
+ unsigned long size;
+ enum object_type type;
+ buffer = read_sha1_file(commit->object.sha1, &type, &size);
+ if (!buffer)
+ return;
+ }
+
+ for (buf = commit->buffer ? commit->buffer : buffer;
+ buf;
+ buf = line_end + 1) {
+ line_end = strchrnul(buf, '\n');
+ if (prefixcmp(buf, "author ")) {
+ if (!line_end[0] || line_end[1] == '\n')
+ return; /* end of header */
+ continue;
+ }
+ if (split_ident_line(&ident,
+ buf + strlen("author "),
+ line_end - (buf + strlen("author "))) ||
+ !ident.date_begin || !ident.date_end)
+ goto fail_exit; /* malformed "author" line */
+ break;
+ }
+
+ date = strtoul(ident.date_begin, &date_end, 10);
+ if (date_end != ident.date_end)
+ goto fail_exit; /* malformed date */
+ *(author_date_slab_at(author_date, commit)) = date;
+
+fail_exit:
+ free(buffer);
+}
+
+static int compare_commits_by_author_date(const void *a_, const void *b_,
+ void *cb_data)
+{
+ const struct commit *a = a_, *b = b_;
+ struct author_date_slab *author_date = cb_data;
+ unsigned long a_date = *(author_date_slab_at(author_date, a));
+ unsigned long b_date = *(author_date_slab_at(author_date, b));
+
+ /* newer commits with larger date first */
+ if (a_date < b_date)
+ return 1;
+ else if (a_date > b_date)
+ return -1;
+ return 0;
+}
+
+static int compare_commits_by_commit_date(const void *a_, const void *b_, void *unused)
+{
+ const struct commit *a = a_, *b = b_;
+ /* newer commits with larger date first */
+ if (a->date < b->date)
+ return 1;
+ else if (a->date > b->date)
+ return -1;
+ return 0;
+}
+
+/*
* Performs an in-place topological sort on the list supplied.
*/
-void sort_in_topological_order(struct commit_list ** list, int lifo)
+void sort_in_topological_order(struct commit_list **list, enum rev_sort_order sort_order)
{
struct commit_list *next, *orig = *list;
- struct commit_list *work, **insert;
struct commit_list **pptr;
+ struct indegree_slab indegree;
+ struct prio_queue queue;
+ struct commit *commit;
+ struct author_date_slab author_date;
if (!orig)
return;
*list = NULL;
+ init_indegree_slab(&indegree);
+ memset(&queue, '\0', sizeof(queue));
+
+ switch (sort_order) {
+ default: /* REV_SORT_IN_GRAPH_ORDER */
+ queue.compare = NULL;
+ break;
+ case REV_SORT_BY_COMMIT_DATE:
+ queue.compare = compare_commits_by_commit_date;
+ break;
+ case REV_SORT_BY_AUTHOR_DATE:
+ init_author_date_slab(&author_date);
+ queue.compare = compare_commits_by_author_date;
+ queue.cb_data = &author_date;
+ break;
+ }
+
/* Mark them and clear the indegree */
for (next = orig; next; next = next->next) {
struct commit *commit = next->item;
- commit->indegree = 1;
+ *(indegree_slab_at(&indegree, commit)) = 1;
+ /* also record the author dates, if needed */
+ if (sort_order == REV_SORT_BY_AUTHOR_DATE)
+ record_author_date(&author_date, commit);
}
/* update the indegree */
for (next = orig; next; next = next->next) {
- struct commit_list * parents = next->item->parents;
+ struct commit_list *parents = next->item->parents;
while (parents) {
struct commit *parent = parents->item;
+ int *pi = indegree_slab_at(&indegree, parent);
- if (parent->indegree)
- parent->indegree++;
+ if (*pi)
+ (*pi)++;
parents = parents->next;
}
}
@@ -544,34 +654,33 @@ void sort_in_topological_order(struct commit_list ** list, int lifo)
*
* the tips serve as a starting set for the work queue.
*/
- work = NULL;
- insert = &work;
for (next = orig; next; next = next->next) {
struct commit *commit = next->item;
- if (commit->indegree == 1)
- insert = &commit_list_insert(commit, insert)->next;
+ if (*(indegree_slab_at(&indegree, commit)) == 1)
+ prio_queue_put(&queue, commit);
}
- /* process the list in topological order */
- if (!lifo)
- commit_list_sort_by_date(&work);
+ /*
+ * This is unfortunate; the initial tips need to be shown
+ * in the order given from the revision traversal machinery.
+ */
+ if (sort_order == REV_SORT_IN_GRAPH_ORDER)
+ prio_queue_reverse(&queue);
+
+ /* We no longer need the commit list */
+ free_commit_list(orig);
pptr = list;
*list = NULL;
- while (work) {
- struct commit *commit;
- struct commit_list *parents, *work_item;
-
- work_item = work;
- work = work_item->next;
- work_item->next = NULL;
+ while ((commit = prio_queue_get(&queue)) != NULL) {
+ struct commit_list *parents;
- commit = work_item->item;
for (parents = commit->parents; parents ; parents = parents->next) {
struct commit *parent = parents->item;
+ int *pi = indegree_slab_at(&indegree, parent);
- if (!parent->indegree)
+ if (!*pi)
continue;
/*
@@ -579,21 +688,22 @@ void sort_in_topological_order(struct commit_list ** list, int lifo)
* when all their children have been emitted thereby
* guaranteeing topological order.
*/
- if (--parent->indegree == 1) {
- if (!lifo)
- commit_list_insert_by_date(parent, &work);
- else
- commit_list_insert(parent, &work);
- }
+ if (--(*pi) == 1)
+ prio_queue_put(&queue, parent);
}
/*
- * work_item is a commit all of whose children
- * have already been emitted. we can emit it now.
+ * all children of commit have already been
+ * emitted. we can emit it now.
*/
- commit->indegree = 0;
- *pptr = work_item;
- pptr = &work_item->next;
+ *(indegree_slab_at(&indegree, commit)) = 0;
+
+ pptr = &commit_list_insert(commit, pptr)->next;
}
+
+ clear_indegree_slab(&indegree);
+ clear_prio_queue(&queue);
+ if (sort_order == REV_SORT_BY_AUTHOR_DATE)
+ clear_author_date_slab(&author_date);
}
/* merge-base stuff */
diff --git a/commit.h b/commit.h
index 6e9c7cd9d5..4d452dc96d 100644
--- a/commit.h
+++ b/commit.h
@@ -15,7 +15,7 @@ struct commit_list {
struct commit {
struct object object;
void *util;
- unsigned int indegree;
+ unsigned int index;
unsigned long date;
struct commit_list *parents;
struct tree *tree;
@@ -142,15 +142,24 @@ void clear_commit_marks(struct commit *commit, unsigned int mark);
void clear_commit_marks_many(int nr, struct commit **commit, unsigned int mark);
void clear_commit_marks_for_object_array(struct object_array *a, unsigned mark);
+
+enum rev_sort_order {
+ REV_SORT_IN_GRAPH_ORDER = 0,
+ REV_SORT_BY_COMMIT_DATE,
+ REV_SORT_BY_AUTHOR_DATE
+};
+
/*
* Performs an in-place topological sort of list supplied.
*
* invariant of resulting list is:
* a reachable from b => ord(b) < ord(a)
- * in addition, when lifo == 0, commits on parallel tracks are
- * sorted in the dates order.
+ * sort_order further specifies:
+ * REV_SORT_IN_GRAPH_ORDER: try to show a commit on a single-parent
+ * chain together.
+ * REV_SORT_BY_COMMIT_DATE: show eligible commits in committer-date order.
*/
-void sort_in_topological_order(struct commit_list ** list, int lifo);
+void sort_in_topological_order(struct commit_list **, enum rev_sort_order);
struct commit_graft {
unsigned char sha1[20];
diff --git a/configure.ac b/configure.ac
index f3462d9c81..2f433939dc 100644
--- a/configure.ac
+++ b/configure.ac
@@ -193,7 +193,7 @@ AC_ARG_ENABLE([pthreads],
[FLAGS is the value to pass to the compiler to enable POSIX Threads.]
[The default if FLAGS is not specified is to try first -pthread]
[and then -lpthread.]
- [--without-pthreads will disable threading.])],
+ [--disable-pthreads will disable threading.])],
[
if test "x$enableval" = "xyes"; then
AC_MSG_NOTICE([Will try -pthread then -lpthread to enable POSIX Threads])
diff --git a/contrib/completion/git-prompt.sh b/contrib/completion/git-prompt.sh
index 86a4f3fa49..07a6218d10 100644
--- a/contrib/completion/git-prompt.sh
+++ b/contrib/completion/git-prompt.sh
@@ -347,9 +347,9 @@ __git_ps1 ()
local step=""
local total=""
if [ -d "$g/rebase-merge" ]; then
- b="$(cat "$g/rebase-merge/head-name")"
- step=$(cat "$g/rebase-merge/msgnum")
- total=$(cat "$g/rebase-merge/end")
+ b="$(cat "$g/rebase-merge/head-name" 2>/dev/null)"
+ step=$(cat "$g/rebase-merge/msgnum" 2>/dev/null)
+ total=$(cat "$g/rebase-merge/end" 2>/dev/null)
if [ -f "$g/rebase-merge/interactive" ]; then
r="|REBASE-i"
else
@@ -357,10 +357,10 @@ __git_ps1 ()
fi
else
if [ -d "$g/rebase-apply" ]; then
- step=$(cat "$g/rebase-apply/next")
- total=$(cat "$g/rebase-apply/last")
+ step=$(cat "$g/rebase-apply/next" 2>/dev/null)
+ total=$(cat "$g/rebase-apply/last" 2>/dev/null)
if [ -f "$g/rebase-apply/rebasing" ]; then
- b="$(cat "$g/rebase-apply/head-name")"
+ b="$(cat "$g/rebase-apply/head-name" 2>/dev/null)"
r="|REBASE"
elif [ -f "$g/rebase-apply/applying" ]; then
r="|AM"
diff --git a/contrib/mw-to-git/.perlcriticrc b/contrib/mw-to-git/.perlcriticrc
new file mode 100644
index 0000000000..5a9955d757
--- /dev/null
+++ b/contrib/mw-to-git/.perlcriticrc
@@ -0,0 +1,28 @@
+# These 3 rules demand to add the s, m and x flag to *every* regexp. This is
+# overkill and would be harmful for readability.
+[-RegularExpressions::RequireExtendedFormatting]
+[-RegularExpressions::RequireDotMatchAnything]
+[-RegularExpressions::RequireLineBoundaryMatching]
+
+# This rule says that builtin functions should not be called with parentheses
+# e.g.: (taken from CPAN's documentation)
+# open($handle, '>', $filename); #not ok
+# open $handle, '>', $filename; #ok
+# Applying such a rule would mean modifying a huge number of lines for a
+# question of style.
+[-CodeLayout::ProhibitParensWithBuiltins]
+
+# This rule states that each system call should have its return value checked
+# The problem is that it includes the print call. Checking every print call's
+# return value would be harmful to the code readabilty.
+# This configuration keeps all default function but print.
+[InputOutput::RequireCheckedSyscalls]
+functions = open say close
+
+# This rules demands to add a dependancy for the Readonly module. This is not
+# wished.
+[-ValuesAndExpressions::ProhibitConstantPragma]
+
+# This rule is not really useful (rather a question of style) and produces many
+# warnings among the code.
+[-ValuesAndExpressions::ProhibitNoisyQuotes]
diff --git a/contrib/mw-to-git/Makefile b/contrib/mw-to-git/Makefile
index f14971987c..1fb2424481 100644
--- a/contrib/mw-to-git/Makefile
+++ b/contrib/mw-to-git/Makefile
@@ -15,3 +15,5 @@ all: build
build install clean:
$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL=$(SCRIPT_PERL_FULL) \
$@-perl-script
+perlcritic:
+ perlcritic -2 *.perl
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
index c1a967b3d1..71baf8ace8 100755
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -15,18 +15,16 @@ use strict;
use MediaWiki::API;
use Git;
use DateTime::Format::ISO8601;
+use warnings;
# By default, use UTF-8 to communicate with Git and the user
-binmode STDERR, ":utf8";
-binmode STDOUT, ":utf8";
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
use URI::Escape;
-use IPC::Open2;
-
-use warnings;
# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
-use constant SLASH_REPLACEMENT => "%2F";
+use constant SLASH_REPLACEMENT => '%2F';
# It's not always possible to delete pages (may require some
# privileges). Deleted pages are replaced with this content.
@@ -37,11 +35,23 @@ use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
use constant EMPTY_CONTENT => "<!-- empty page -->\n";
# used to reflect file creation or deletion in diff.
-use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
# Used on Git's side to reflect empty edit messages on the wiki
use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+use constant EMPTY => q{};
+
+# Number of pages taken into account at once in submodule get_mw_page_list
+use constant SLICE_SIZE => 50;
+
+# Number of linked mediafile to get at once in get_linked_mediafiles
+# The query is split in small batches because of the MW API limit of
+# the number of links to be returned (500 links max).
+use constant BATCH_SIZE => 10;
+
+use constant HTTP_CODE_OK => 200;
+
if (@ARGV != 2) {
exit_error_usage();
}
@@ -51,35 +61,35 @@ my $url = $ARGV[1];
# Accept both space-separated and multiple keys in config file.
# Spaces should be written as _ anyway because we'll use chomp.
-my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
+my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
chomp(@tracked_pages);
# Just like @tracked_pages, but for MediaWiki categories.
-my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
+my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
chomp(@tracked_categories);
# Import media files on pull
-my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
+my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
chomp($import_media);
-$import_media = ($import_media eq "true");
+$import_media = ($import_media eq 'true');
# Export media files on push
-my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
+my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
chomp($export_media);
-$export_media = !($export_media eq "false");
+$export_media = !($export_media eq 'false');
-my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
+my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
# Note: mwPassword is discourraged. Use the credential system instead.
-my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
-my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
+my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
+my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
chomp($wiki_login);
chomp($wiki_passwd);
chomp($wiki_domain);
# Import only last revisions (both for clone and fetch)
-my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
+my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
chomp($shallow_import);
-$shallow_import = ($shallow_import eq "true");
+$shallow_import = ($shallow_import eq 'true');
# Fetch (clone and pull) by revisions instead of by pages. This behavior
# is more efficient when we have a wiki with lots of pages and we fetch
@@ -87,15 +97,18 @@ $shallow_import = ($shallow_import eq "true");
# Possible values:
# - by_rev: perform one query per new revision on the remote wiki
# - by_page: query each tracked page for new revision
-my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
-unless ($fetch_strategy) {
- $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
+my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
+if (!$fetch_strategy) {
+ $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
}
chomp($fetch_strategy);
-unless ($fetch_strategy) {
- $fetch_strategy = "by_page";
+if (!$fetch_strategy) {
+ $fetch_strategy = 'by_page';
}
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
# Dumb push: don't update notes and mediawiki ref to reflect the last push.
#
# Configurable with mediawiki.dumbPush, or per-remote with
@@ -110,48 +123,25 @@ unless ($fetch_strategy) {
# will get the history with information lost). If the import is
# deterministic, this means everybody gets the same sha1 for each
# MediaWiki revision.
-my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
-unless ($dumb_push) {
- $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
+my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
+if (!$dumb_push) {
+ $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
}
chomp($dumb_push);
-$dumb_push = ($dumb_push eq "true");
+$dumb_push = ($dumb_push eq 'true');
my $wiki_name = $url;
-$wiki_name =~ s/[^\/]*:\/\///;
+$wiki_name =~ s{[^/]*://}{};
# If URL is like http://user:password@example.com/, we clearly don't
# want the password in $wiki_name. While we're there, also remove user
# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
$wiki_name =~ s/^.*@//;
# Commands parser
-my $entry;
-my @cmd;
while (<STDIN>) {
chomp;
- @cmd = split(/ /);
- if (defined($cmd[0])) {
- # Line not blank
- if ($cmd[0] eq "capabilities") {
- die("Too many arguments for capabilities") unless (!defined($cmd[1]));
- mw_capabilities();
- } elsif ($cmd[0] eq "list") {
- die("Too many arguments for list") unless (!defined($cmd[2]));
- mw_list($cmd[1]);
- } elsif ($cmd[0] eq "import") {
- die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
- mw_import($cmd[1]);
- } elsif ($cmd[0] eq "option") {
- die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
- mw_option($cmd[1],$cmd[2]);
- } elsif ($cmd[0] eq "push") {
- mw_push($cmd[1]);
- } else {
- print STDERR "Unknown command. Aborting...\n";
- last;
- }
- } else {
- # blank line: we should terminate
+
+ if (!parse_command($_)) {
last;
}
@@ -172,6 +162,40 @@ sub exit_error_usage {
"Then, use git commit, push and pull as with every normal git repository.\n";
}
+sub parse_command {
+ my ($line) = @_;
+ my @cmd = split(/ /, $line);
+ if (!defined $cmd[0]) {
+ return 0;
+ }
+ if ($cmd[0] eq 'capabilities') {
+ die("Too many arguments for capabilities\n")
+ if (defined($cmd[1]));
+ mw_capabilities();
+ } elsif ($cmd[0] eq 'list') {
+ die("Too many arguments for list\n") if (defined($cmd[2]));
+ mw_list($cmd[1]);
+ } elsif ($cmd[0] eq 'import') {
+ die("Invalid argument for import\n")
+ if ($cmd[1] eq EMPTY);
+ die("Too many arguments for import\n")
+ if (defined($cmd[2]));
+ mw_import($cmd[1]);
+ } elsif ($cmd[0] eq 'option') {
+ die("Invalid arguments for option\n")
+ if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
+ die("Too many arguments for option\n")
+ if (defined($cmd[3]));
+ mw_option($cmd[1],$cmd[2]);
+ } elsif ($cmd[0] eq 'push') {
+ mw_push($cmd[1]);
+ } else {
+ print {*STDERR} "Unknown command. Aborting...\n";
+ return 0;
+ }
+ return 1;
+}
+
# MediaWiki API instance, created lazily.
my $mediawiki;
@@ -180,7 +204,7 @@ sub mw_connect_maybe {
return;
}
$mediawiki = MediaWiki::API->new;
- $mediawiki->{config}->{api_url} = "$url/api.php";
+ $mediawiki->{config}->{api_url} = "${url}/api.php";
if ($wiki_login) {
my %credential = (
'url' => $url,
@@ -193,16 +217,17 @@ sub mw_connect_maybe {
lgdomain => $wiki_domain};
if ($mediawiki->login($request)) {
Git::credential(\%credential, 'approve');
- print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
+ print {*STDERR} qq(Logged in mediawiki user "$credential{username}".\n);
} else {
- print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
- print STDERR " (error " .
+ print {*STDERR} qq(Failed to log in mediawiki user "$credential{username}" on ${url}\n);
+ print {*STDERR} ' (error ' .
$mediawiki->{error}->{code} . ': ' .
$mediawiki->{error}->{details} . ")\n";
Git::credential(\%credential, 'reject');
exit 1;
}
}
+ return;
}
sub fatal_mw_error {
@@ -225,21 +250,23 @@ sub fatal_mw_error {
sub get_mw_tracked_pages {
my $pages = shift;
get_mw_page_list(\@tracked_pages, $pages);
+ return;
}
sub get_mw_page_list {
my $page_list = shift;
my $pages = shift;
- my @some_pages = @$page_list;
+ my @some_pages = @{$page_list};
while (@some_pages) {
- my $last = 50;
- if ($#some_pages < $last) {
- $last = $#some_pages;
+ my $last_page = SLICE_SIZE;
+ if ($#some_pages < $last_page) {
+ $last_page = $#some_pages;
}
- my @slice = @some_pages[0..$last];
+ my @slice = @some_pages[0..$last_page];
get_mw_first_pages(\@slice, $pages);
- @some_pages = @some_pages[51..$#some_pages];
+ @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
}
+ return;
}
sub get_mw_tracked_categories {
@@ -249,7 +276,7 @@ sub get_mw_tracked_categories {
# Mediawiki requires the Category
# prefix, but let's not force the user
# to specify it.
- $category = "Category:" . $category;
+ $category = "Category:${category}";
}
my $mw_pages = $mediawiki->list( {
action => 'query',
@@ -257,11 +284,12 @@ sub get_mw_tracked_categories {
cmtitle => $category,
cmlimit => 'max' } )
|| die $mediawiki->{error}->{code} . ': '
- . $mediawiki->{error}->{details};
+ . $mediawiki->{error}->{details} . "\n";
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
}
+ return;
}
sub get_mw_all_pages {
@@ -278,6 +306,7 @@ sub get_mw_all_pages {
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
+ return;
}
# queries the wiki for a set of pages. Meant to be used within a loop
@@ -300,18 +329,19 @@ sub get_mw_first_pages {
}
while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
if ($id < 0) {
- print STDERR "Warning: page $page->{title} not found on wiki\n";
+ print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
} else {
$pages->{$page->{title}} = $page;
}
}
+ return;
}
# Get the list of pages to be fetched according to configuration.
sub get_mw_pages {
mw_connect_maybe();
- print STDERR "Listing pages on remote wiki...\n";
+ print {*STDERR} "Listing pages on remote wiki...\n";
my %pages; # hash on page titles to avoid duplicates
my $user_defined;
@@ -329,14 +359,14 @@ sub get_mw_pages {
get_mw_all_pages(\%pages);
}
if ($import_media) {
- print STDERR "Getting media files for selected pages...\n";
+ print {*STDERR} "Getting media files for selected pages...\n";
if ($user_defined) {
get_linked_mediafiles(\%pages);
} else {
get_all_mediafiles(\%pages);
}
}
- print STDERR (scalar keys %pages) . " pages found.\n";
+ print {*STDERR} (scalar keys %pages) . " pages found.\n";
return %pages;
}
@@ -344,9 +374,13 @@ sub get_mw_pages {
# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
sub run_git {
my $args = shift;
- my $encoding = (shift || "encoding(UTF-8)");
- open(my $git, "-|:$encoding", "git " . $args);
- my $res = do { local $/; <$git> };
+ my $encoding = (shift || 'encoding(UTF-8)');
+ open(my $git, "-|:${encoding}", "git ${args}")
+ or die "Unable to fork: $!\n";
+ my $res = do {
+ local $/ = undef;
+ <$git>
+ };
close($git);
return $res;
@@ -361,27 +395,26 @@ sub get_all_mediafiles {
my $mw_pages = $mediawiki->list({
action => 'query',
list => 'allpages',
- apnamespace => get_mw_namespace_id("File"),
+ apnamespace => get_mw_namespace_id('File'),
aplimit => 'max'
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of pages for media files.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ print {*STDERR} "fatal: could not get the list of pages for media files.\n";
+ print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
+ print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
exit 1;
}
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
+ return;
}
sub get_linked_mediafiles {
my $pages = shift;
- my @titles = map $_->{title}, values(%{$pages});
+ my @titles = map { $_->{title} } values(%{$pages});
- # The query is split in small batches because of the MW API limit of
- # the number of links to be returned (500 links max).
- my $batch = 10;
+ my $batch = BATCH_SIZE;
while (@titles) {
if ($#titles < $batch) {
$batch = $#titles;
@@ -397,7 +430,7 @@ sub get_linked_mediafiles {
action => 'query',
prop => 'links|images',
titles => $mw_titles,
- plnamespace => get_mw_namespace_id("File"),
+ plnamespace => get_mw_namespace_id('File'),
pllimit => 'max'
};
my $result = $mediawiki->api($query);
@@ -405,11 +438,13 @@ sub get_linked_mediafiles {
while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
my @media_titles;
if (defined($page->{links})) {
- my @link_titles = map $_->{title}, @{$page->{links}};
+ my @link_titles
+ = map { $_->{title} } @{$page->{links}};
push(@media_titles, @link_titles);
}
if (defined($page->{images})) {
- my @image_titles = map $_->{title}, @{$page->{images}};
+ my @image_titles
+ = map { $_->{title} } @{$page->{images}};
push(@media_titles, @image_titles);
}
if (@media_titles) {
@@ -419,6 +454,7 @@ sub get_linked_mediafiles {
@titles = @titles[($batch+1)..$#titles];
}
+ return;
}
sub get_mw_mediafile_for_page_revision {
@@ -432,7 +468,7 @@ sub get_mw_mediafile_for_page_revision {
my $query = {
action => 'query',
prop => 'imageinfo',
- titles => "File:" . $filename,
+ titles => "File:${filename}",
iistart => $timestamp,
iiend => $timestamp,
iiprop => 'timestamp|archivename|url',
@@ -450,47 +486,44 @@ sub get_mw_mediafile_for_page_revision {
$mediafile{timestamp} = $fileinfo->{timestamp};
# Mediawiki::API's download function doesn't support https URLs
# and can't download old versions of files.
- print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+ print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
$mediafile{content} = download_mw_mediafile($fileinfo->{url});
}
return %mediafile;
}
sub download_mw_mediafile {
- my $url = shift;
+ my $download_url = shift;
- my $response = $mediawiki->{ua}->get($url);
- if ($response->code == 200) {
+ my $response = $mediawiki->{ua}->get($download_url);
+ if ($response->code == HTTP_CODE_OK) {
return $response->decoded_content;
} else {
- print STDERR "Error downloading mediafile from :\n";
- print STDERR "URL: $url\n";
- print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
+ print {*STDERR} "Error downloading mediafile from :\n";
+ print {*STDERR} "URL: ${download_url}\n";
+ print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
exit 1;
}
}
sub get_last_local_revision {
# Get note regarding last mediawiki revision
- my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
+ my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
my @note_info = split(/ /, $note);
my $lastrevision_number;
- if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
- print STDERR "No previous mediawiki revision found";
+ if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
+ print {*STDERR} 'No previous mediawiki revision found';
$lastrevision_number = 0;
} else {
# Notes are formatted : mediawiki_revision: #number
$lastrevision_number = $note_info[1];
chomp($lastrevision_number);
- print STDERR "Last local mediawiki revision found is $lastrevision_number";
+ print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
}
return $lastrevision_number;
}
-# Remember the timestamp corresponding to a revision id.
-my %basetimestamps;
-
# Get the last remote revision without taking in account which pages are
# tracked or not. This function makes a single request to the wiki thus
# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
@@ -519,7 +552,7 @@ sub get_last_remote_revision {
my $max_rev_num = 0;
- print STDERR "Getting last revision id on tracked pages...\n";
+ print {*STDERR} "Getting last revision id on tracked pages...\n";
foreach my $page (@pages) {
my $id = $page->{pageid};
@@ -540,7 +573,7 @@ sub get_last_remote_revision {
$max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
}
- print STDERR "Last remote revision found is $max_rev_num.\n";
+ print {*STDERR} "Last remote revision found is $max_rev_num.\n";
return $max_rev_num;
}
@@ -551,7 +584,7 @@ sub mediawiki_clean {
# Mediawiki does not allow blank space at the end of a page and ends with a single \n.
# This function right trims a string and adds a \n at the end to follow this rule
$string =~ s/\s+$//;
- if ($string eq "" && $page_created) {
+ if ($string eq EMPTY && $page_created) {
# Creating empty pages is forbidden.
$string = EMPTY_CONTENT;
}
@@ -562,15 +595,15 @@ sub mediawiki_clean {
sub mediawiki_smudge {
my $string = shift;
if ($string eq EMPTY_CONTENT) {
- $string = "";
+ $string = EMPTY;
}
# This \n is important. This is due to mediawiki's way to handle end of files.
- return $string."\n";
+ return "${string}\n";
}
sub mediawiki_clean_filename {
my $filename = shift;
- $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
+ $filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
# [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
# Do a variant of URL-encoding, i.e. looks like URL-encoding,
# but with _ added to prevent MediaWiki from thinking this is
@@ -584,16 +617,17 @@ sub mediawiki_clean_filename {
sub mediawiki_smudge_filename {
my $filename = shift;
- $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
+ $filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g;
$filename =~ s/ /_/g;
# Decode forbidden characters encoded in mediawiki_clean_filename
- $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
+ $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf('%c', hex($1))/ge;
return $filename;
}
sub literal_data {
my ($content) = @_;
- print STDOUT "data ", bytes::length($content), "\n", $content;
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ return;
}
sub literal_data_raw {
@@ -601,33 +635,37 @@ sub literal_data_raw {
my ($content) = @_;
# Avoid confusion between size in bytes and in characters
utf8::downgrade($content);
- binmode STDOUT, ":raw";
- print STDOUT "data ", bytes::length($content), "\n", $content;
- binmode STDOUT, ":utf8";
+ binmode {*STDOUT}, ':raw';
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ binmode {*STDOUT}, ':encoding(UTF-8)';
+ return;
}
sub mw_capabilities {
# Revisions are imported to the private namespace
# refs/mediawiki/$remotename/ by the helper and fetched into
# refs/remotes/$remotename later by fetch.
- print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
- print STDOUT "import\n";
- print STDOUT "list\n";
- print STDOUT "push\n";
- print STDOUT "\n";
+ print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
+ print {*STDOUT} "import\n";
+ print {*STDOUT} "list\n";
+ print {*STDOUT} "push\n";
+ print {*STDOUT} "\n";
+ return;
}
sub mw_list {
# MediaWiki do not have branches, we consider one branch arbitrarily
# called master, and HEAD pointing to it.
- print STDOUT "? refs/heads/master\n";
- print STDOUT "\@refs/heads/master HEAD\n";
- print STDOUT "\n";
+ print {*STDOUT} "? refs/heads/master\n";
+ print {*STDOUT} "\@refs/heads/master HEAD\n";
+ print {*STDOUT} "\n";
+ return;
}
sub mw_option {
- print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
- print STDOUT "unsupported\n";
+ print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
+ print {*STDOUT} "unsupported\n";
+ return;
}
sub fetch_mw_revisions_for_page {
@@ -658,15 +696,15 @@ sub fetch_mw_revisions_for_page {
push(@page_revs, $page_rev_ids);
$revnum++;
}
- last unless $result->{'query-continue'};
+ last if (!$result->{'query-continue'});
$query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
}
if ($shallow_import && @page_revs) {
- print STDERR " Found 1 revision (shallow import).\n";
+ print {*STDERR} " Found 1 revision (shallow import).\n";
@page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
return $page_revs[0];
}
- print STDERR " Found ", $revnum, " revision(s).\n";
+ print {*STDERR} " Found ${revnum} revision(s).\n";
return @page_revs;
}
@@ -678,8 +716,7 @@ sub fetch_mw_revisions {
my $n = 1;
foreach my $page (@pages) {
my $id = $page->{pageid};
-
- print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
+ print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
$n++;
my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
@revisions = (@page_revs, @revisions);
@@ -693,7 +730,7 @@ sub fe_escape_path {
$path =~ s/\\/\\\\/g;
$path =~ s/"/\\"/g;
$path =~ s/\n/\\n/g;
- return '"' . $path . '"';
+ return qq("${path}");
}
sub import_file_revision {
@@ -713,42 +750,43 @@ sub import_file_revision {
my $author = $commit{author};
my $date = $commit{date};
- print STDOUT "commit refs/mediawiki/$remotename/master\n";
- print STDOUT "mark :$n\n";
- print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
+ print {*STDOUT} "mark :${n}\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
literal_data($comment);
# If it's not a clone, we need to know where to start from
if (!$full_import && $n == 1) {
- print STDOUT "from refs/mediawiki/$remotename/master^0\n";
+ print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
}
if ($content ne DELETED_CONTENT) {
- print STDOUT "M 644 inline " .
- fe_escape_path($title . ".mw") . "\n";
+ print {*STDOUT} 'M 644 inline ' .
+ fe_escape_path("${title}.mw") . "\n";
literal_data($content);
if (%mediafile) {
- print STDOUT "M 644 inline "
+ print {*STDOUT} 'M 644 inline '
. fe_escape_path($mediafile{title}) . "\n";
literal_data_raw($mediafile{content});
}
- print STDOUT "\n\n";
+ print {*STDOUT} "\n\n";
} else {
- print STDOUT "D " . fe_escape_path($title . ".mw") . "\n";
+ print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
}
# mediawiki revision number in the git note
if ($full_import && $n == 1) {
- print STDOUT "reset refs/notes/$remotename/mediawiki\n";
+ print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
}
- print STDOUT "commit refs/notes/$remotename/mediawiki\n";
- print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
- literal_data("Note added by git-mediawiki during import");
+ print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+ literal_data('Note added by git-mediawiki during import');
if (!$full_import && $n == 1) {
- print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
+ print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
}
- print STDOUT "N inline :$n\n";
- literal_data("mediawiki_revision: " . $commit{mw_revision});
- print STDOUT "\n\n";
+ print {*STDOUT} "N inline :${n}\n";
+ literal_data("mediawiki_revision: $commit{mw_revision}");
+ print {*STDOUT} "\n\n";
+ return;
}
# parse a sequence of
@@ -761,23 +799,25 @@ sub get_more_refs {
my @refs;
while (1) {
my $line = <STDIN>;
- if ($line =~ m/^$cmd (.*)$/) {
+ if ($line =~ /^$cmd (.*)$/) {
push(@refs, $1);
} elsif ($line eq "\n") {
return @refs;
} else {
- die("Invalid command in a '$cmd' batch: ". $_);
+ die("Invalid command in a '$cmd' batch: $_\n");
}
}
+ return;
}
sub mw_import {
# multiple import commands can follow each other.
- my @refs = (shift, get_more_refs("import"));
+ my @refs = (shift, get_more_refs('import'));
foreach my $ref (@refs) {
mw_import_ref($ref);
}
- print STDOUT "done\n";
+ print {*STDOUT} "done\n";
+ return;
}
sub mw_import_ref {
@@ -787,40 +827,41 @@ sub mw_import_ref {
# Since HEAD is a symbolic ref to master (by convention,
# followed by the output of the command "list" that we gave),
# we don't need to do anything in this case.
- if ($ref eq "HEAD") {
+ if ($ref eq 'HEAD') {
return;
}
mw_connect_maybe();
- print STDERR "Searching revisions...\n";
+ print {*STDERR} "Searching revisions...\n";
my $last_local = get_last_local_revision();
my $fetch_from = $last_local + 1;
if ($fetch_from == 1) {
- print STDERR ", fetching from beginning.\n";
+ print {*STDERR} ", fetching from beginning.\n";
} else {
- print STDERR ", fetching from here.\n";
+ print {*STDERR} ", fetching from here.\n";
}
my $n = 0;
- if ($fetch_strategy eq "by_rev") {
- print STDERR "Fetching & writing export data by revs...\n";
+ if ($fetch_strategy eq 'by_rev') {
+ print {*STDERR} "Fetching & writing export data by revs...\n";
$n = mw_import_ref_by_revs($fetch_from);
- } elsif ($fetch_strategy eq "by_page") {
- print STDERR "Fetching & writing export data by pages...\n";
+ } elsif ($fetch_strategy eq 'by_page') {
+ print {*STDERR} "Fetching & writing export data by pages...\n";
$n = mw_import_ref_by_pages($fetch_from);
} else {
- print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
- print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
+ print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
+ print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
exit 1;
}
if ($fetch_from == 1 && $n == 0) {
- print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
# Something has to be done remote-helper side. If nothing is done, an error is
# thrown saying that HEAD is referring to unknown object 0000000000000000000
# and the clone fails.
}
+ return;
}
sub mw_import_ref_by_pages {
@@ -832,7 +873,7 @@ sub mw_import_ref_by_pages {
my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
@revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
- my @revision_ids = map $_->{revid}, @revisions;
+ my @revision_ids = map { $_->{revid} } @revisions;
return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
}
@@ -859,7 +900,7 @@ sub mw_import_revids {
my $n_actual = 0;
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
- foreach my $pagerevid (@$revision_ids) {
+ foreach my $pagerevid (@{$revision_ids}) {
# Count page even if we skip it, since we display
# $n/$total and $total includes skipped pages.
$n++;
@@ -875,7 +916,7 @@ sub mw_import_revids {
my $result = $mediawiki->api($query);
if (!$result) {
- die "Failed to retrieve modified page for revision $pagerevid";
+ die "Failed to retrieve modified page for revision $pagerevid\n";
}
if (defined($result->{query}->{badrevids}->{$pagerevid})) {
@@ -884,7 +925,7 @@ sub mw_import_revids {
}
if (!defined($result->{query}->{pages})) {
- die "Invalid revision $pagerevid.";
+ die "Invalid revision ${pagerevid}.\n";
}
my @result_pages = values(%{$result->{query}->{pages}});
@@ -894,8 +935,8 @@ sub mw_import_revids {
my $page_title = $result_page->{title};
if (!exists($pages->{$page_title})) {
- print STDERR "$n/", scalar(@$revision_ids),
- ": Skipping revision #$rev->{revid} of $page_title\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}),
+ ": Skipping revision #$rev->{revid} of ${page_title}\n";
next;
}
@@ -920,14 +961,14 @@ sub mw_import_revids {
my %mediafile;
if ($namespace) {
my $id = get_mw_namespace_id($namespace);
- if ($id && $id == get_mw_namespace_id("File")) {
+ if ($id && $id == get_mw_namespace_id('File')) {
%mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
}
}
# If this is a revision of the media page for new version
# of a file do one common commit for both file and media page.
# Else do commit only for that page.
- print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}
@@ -935,17 +976,17 @@ sub mw_import_revids {
}
sub error_non_fast_forward {
- my $advice = run_git("config --bool advice.pushNonFastForward");
+ my $advice = run_git('config --bool advice.pushNonFastForward');
chomp($advice);
- if ($advice ne "false") {
+ if ($advice ne 'false') {
# Native git-push would show this after the summary.
# We can't ask it to display it cleanly, so print it
# ourselves before.
- print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
- print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
- print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
+ print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
+ print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+ print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
}
- print STDOUT "error $_[0] \"non-fast-forward\"\n";
+ print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
return 0;
}
@@ -956,11 +997,11 @@ sub mw_upload_file {
my $file_deleted = shift;
my $summary = shift;
my $newrevid;
- my $path = "File:" . $complete_file_name;
+ my $path = "File:${complete_file_name}";
my %hashFiles = get_allowed_file_extensions();
if (!exists($hashFiles{$extension})) {
- print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
- print STDERR "Check the configuration of file uploads in your mediawiki.\n";
+ print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
+ print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
return $newrevid;
}
# Deleting and uploading a file requires a priviledged user
@@ -972,18 +1013,18 @@ sub mw_upload_file {
reason => $summary
};
if (!$mediawiki->edit($query)) {
- print STDERR "Failed to delete file on remote wiki\n";
- print STDERR "Check your permissions on the remote site. Error code:\n";
- print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+ print {*STDERR} "Failed to delete file on remote wiki\n";
+ print {*STDERR} "Check your permissions on the remote site. Error code:\n";
+ print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
exit 1;
}
} else {
# Don't let perl try to interpret file content as UTF-8 => use "raw"
- my $content = run_git("cat-file blob $new_sha1", "raw");
- if ($content ne "") {
+ my $content = run_git("cat-file blob ${new_sha1}", 'raw');
+ if ($content ne EMPTY) {
mw_connect_maybe();
$mediawiki->{config}->{upload_url} =
- "$url/index.php/Special:Upload";
+ "${url}/index.php/Special:Upload";
$mediawiki->edit({
action => 'upload',
filename => $complete_file_name,
@@ -995,12 +1036,12 @@ sub mw_upload_file {
}, {
skip_encoding => 1
} ) || die $mediawiki->{error}->{code} . ':'
- . $mediawiki->{error}->{details};
+ . $mediawiki->{error}->{details} . "\n";
my $last_file_page = $mediawiki->get_page({title => $path});
$newrevid = $last_file_page->{revid};
- print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
} else {
- print STDERR "Empty file $complete_file_name not pushed.\n";
+ print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
}
}
return $newrevid;
@@ -1022,7 +1063,7 @@ sub mw_push_file {
my $newrevid;
if ($summary eq EMPTY_MESSAGE) {
- $summary = '';
+ $summary = EMPTY;
}
my $new_sha1 = $diff_info_split[3];
@@ -1033,13 +1074,13 @@ sub mw_push_file {
my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
if (!defined($extension)) {
- $extension = "";
+ $extension = EMPTY;
}
- if ($extension eq "mw") {
+ if ($extension eq 'mw') {
my $ns = get_mw_namespace_id_for_page($complete_file_name);
- if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
- print STDERR "Ignoring media file related page: $complete_file_name\n";
- return ($oldrevid, "ok");
+ if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
+ print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
+ return ($oldrevid, 'ok');
}
my $file_content;
if ($page_deleted) {
@@ -1049,7 +1090,7 @@ sub mw_push_file {
# with this content instead:
$file_content = DELETED_CONTENT;
} else {
- $file_content = run_git("cat-file blob $new_sha1");
+ $file_content = run_git("cat-file blob ${new_sha1}");
}
mw_connect_maybe();
@@ -1066,49 +1107,49 @@ sub mw_push_file {
if (!$result) {
if ($mediawiki->{error}->{code} == 3) {
# edit conflicts, considered as non-fast-forward
- print STDERR 'Warning: Error ' .
+ print {*STDERR} 'Warning: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details} .
+ ' from mediawiki: ' . $mediawiki->{error}->{details} .
".\n";
- return ($oldrevid, "non-fast-forward");
+ return ($oldrevid, 'non-fast-forward');
} else {
# Other errors. Shouldn't happen => just die()
die 'Fatal: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details};
+ ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
}
}
$newrevid = $result->{edit}->{newrevid};
- print STDERR "Pushed file: $new_sha1 - $title\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
} elsif ($export_media) {
$newrevid = mw_upload_file($complete_file_name, $new_sha1,
$extension, $page_deleted,
$summary);
} else {
- print STDERR "Ignoring media file $title\n";
+ print {*STDERR} "Ignoring media file ${title}\n";
}
$newrevid = ($newrevid or $oldrevid);
- return ($newrevid, "ok");
+ return ($newrevid, 'ok');
}
sub mw_push {
# multiple push statements can follow each other
- my @refsspecs = (shift, get_more_refs("push"));
+ my @refsspecs = (shift, get_more_refs('push'));
my $pushed;
for my $refspec (@refsspecs) {
my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
- or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
+ or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
if ($force) {
- print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
+ print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
}
- if ($local eq "") {
- print STDERR "Cannot delete remote branch on a MediaWiki\n";
- print STDOUT "error $remote cannot delete\n";
+ if ($local eq EMPTY) {
+ print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} cannot delete\n";
next;
}
- if ($remote ne "refs/heads/master") {
- print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
- print STDOUT "error $remote only master allowed\n";
+ if ($remote ne 'refs/heads/master') {
+ print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} only master allowed\n";
next;
}
if (mw_push_revision($local, $remote)) {
@@ -1117,30 +1158,32 @@ sub mw_push {
}
# Notify Git that the push is done
- print STDOUT "\n";
+ print {*STDOUT} "\n";
if ($pushed && $dumb_push) {
- print STDERR "Just pushed some revisions to MediaWiki.\n";
- print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
- print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
- print STDERR "\n";
- print STDERR " git pull --rebase\n";
- print STDERR "\n";
+ print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
+ print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
+ print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
+ print {*STDERR} "\n";
+ print {*STDERR} " git pull --rebase\n";
+ print {*STDERR} "\n";
}
+ return;
}
sub mw_push_revision {
my $local = shift;
my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
my $last_local_revid = get_last_local_revision();
- print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
+ print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
my $last_remote_revid = get_last_remote_revision();
my $mw_revision = $last_remote_revid;
# Get sha1 of commit pointed by local HEAD
- my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
+ my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
+ chomp($HEAD_sha1);
# Get sha1 of commit pointed by remotes/$remotename/master
- my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
+ my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
chomp($remoteorigin_sha1);
if ($last_local_revid > 0 &&
@@ -1159,22 +1202,22 @@ sub mw_push_revision {
if ($last_local_revid > 0) {
my $parsed_sha1 = $remoteorigin_sha1;
# Find a path from last MediaWiki commit to pushed commit
- print STDERR "Computing path from local to remote ...\n";
- my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
+ print {*STDERR} "Computing path from local to remote ...\n";
+ my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
my %local_ancestry;
foreach my $line (@local_ancestry) {
- if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
- foreach my $parent (split(' ', $parents)) {
+ if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
+ foreach my $parent (split(/ /, $parents)) {
$local_ancestry{$parent} = $child;
}
- } elsif (!$line =~ m/^([a-f0-9]+)/) {
- die "Unexpected output from git rev-list: $line";
+ } elsif (!$line =~ /^([a-f0-9]+)/) {
+ die "Unexpected output from git rev-list: ${line}\n";
}
}
while ($parsed_sha1 ne $HEAD_sha1) {
my $child = $local_ancestry{$parsed_sha1};
if (!$child) {
- printf STDERR "Cannot find a path in history from remote commit to last commit\n";
+ print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
return error_non_fast_forward($remote);
}
push(@commit_pairs, [$parsed_sha1, $child]);
@@ -1183,12 +1226,12 @@ sub mw_push_revision {
} else {
# No remote mediawiki revision. Export the whole
# history (linearized with --first-parent)
- print STDERR "Warning: no common ancestor, pushing complete history\n";
- my $history = run_git("rev-list --first-parent --children $local");
- my @history = split('\n', $history);
+ print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
+ my $history = run_git("rev-list --first-parent --children ${local}");
+ my @history = split(/\n/, $history);
@history = @history[1..$#history];
foreach my $line (reverse @history) {
- my @commit_info_split = split(/ |\n/, $line);
+ my @commit_info_split = split(/[ \n]/, $line);
push(@commit_pairs, \@commit_info_split);
}
}
@@ -1196,12 +1239,12 @@ sub mw_push_revision {
foreach my $commit_info_split (@commit_pairs) {
my $sha1_child = @{$commit_info_split}[0];
my $sha1_commit = @{$commit_info_split}[1];
- my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
+ my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
# TODO: we could detect rename, and encode them with a #redirect on the wiki.
# TODO: for now, it's just a delete+add
my @diff_info_list = split(/\0/, $diff_infos);
# Keep the subject line of the commit message as mediawiki comment for the revision
- my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
+ my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
chomp($commit_msg);
# Push every blob
while (@diff_info_list) {
@@ -1213,7 +1256,7 @@ sub mw_push_revision {
my $info = shift(@diff_info_list);
my $file = shift(@diff_info_list);
($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
- if ($status eq "non-fast-forward") {
+ if ($status eq 'non-fast-forward') {
# we may already have sent part of the
# commit to MediaWiki, but it's too
# late to cancel it. Stop the push in
@@ -1221,17 +1264,17 @@ sub mw_push_revision {
# accurate error message.
return error_non_fast_forward($remote);
}
- if ($status ne "ok") {
- die("Unknown error from mw_push_file()");
+ if ($status ne 'ok') {
+ die("Unknown error from mw_push_file()\n");
}
}
- unless ($dumb_push) {
- run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
- run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
+ if (!$dumb_push) {
+ run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
+ run_git(qq(update-ref -m "Git-MediaWiki push" refs/mediawiki/${remotename}/master ${sha1_commit} ${sha1_child}));
}
}
- print STDOUT "ok $remote\n";
+ print {*STDOUT} "ok ${remote}\n";
return 1;
}
@@ -1244,8 +1287,8 @@ sub get_allowed_file_extensions {
siprop => 'fileextensions'
};
my $result = $mediawiki->api($query);
- my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
- my %hashFile = map {$_ => 1}@file_extensions;
+ my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
+ my %hashFile = map { $_ => 1 } @file_extensions;
return %hashFile;
}
@@ -1267,8 +1310,8 @@ sub get_mw_namespace_id {
# Look at configuration file, if the record for that namespace is
# already cached. Namespaces are stored in form:
# "Name_of_namespace:Id_namespace", ex.: "File:6".
- my @temp = split(/[\n]/, run_git("config --get-all remote."
- . $remotename .".namespaceCache"));
+ my @temp = split(/\n/,
+ run_git("config --get-all remote.${remotename}.namespaceCache"));
chomp(@temp);
foreach my $ns (@temp) {
my ($n, $id) = split(/:/, $ns);
@@ -1282,7 +1325,7 @@ sub get_mw_namespace_id {
}
if (!exists $namespace_id{$name}) {
- print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
+ print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
# NS not found => get namespace id from MW and store it in
# configuration file.
my $query = {
@@ -1306,8 +1349,8 @@ sub get_mw_namespace_id {
my $ns = $namespace_id{$name};
my $id;
- unless (defined $ns) {
- print STDERR "No such namespace $name on MediaWiki.\n";
+ if (!defined $ns) {
+ print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
$ns = {is_namespace => 0};
$namespace_id{$name} = $ns;
}
@@ -1321,15 +1364,15 @@ sub get_mw_namespace_id {
# Store explicitely requested namespaces on disk
if (!exists $cached_mw_namespace_id{$name}) {
- run_git("config --add remote.". $remotename
- .".namespaceCache \"". $name .":". $store_id ."\"");
+ run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
$cached_mw_namespace_id{$name} = 1;
}
return $id;
}
sub get_mw_namespace_id_for_page {
- if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
+ my $namespace = shift;
+ if ($namespace =~ /^([^:]*):/) {
return get_mw_namespace_id($namespace);
} else {
return;
diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh
index 3b2cfacf51..bb76cee379 100755
--- a/contrib/mw-to-git/t/test-gitmw-lib.sh
+++ b/contrib/mw-to-git/t/test-gitmw-lib.sh
@@ -336,20 +336,21 @@ wiki_install () {
fi
# Fetch MediaWiki's archive if not already present in the TMP directory
+ MW_FILENAME="mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
cd "$TMP"
- if [ ! -f "$MW_VERSION.tar.gz" ] ; then
- echo "Downloading $MW_VERSION sources ..."
- wget "http://download.wikimedia.org/mediawiki/1.19/mediawiki-1.19.0.tar.gz" ||
+ if [ ! -f $MW_FILENAME ] ; then
+ echo "Downloading $MW_VERSION_MAJOR.$MW_VERSION_MINOR sources ..."
+ wget "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/$MW_FILENAME" ||
error "Unable to download "\
- "http://download.wikimedia.org/mediawiki/1.19/"\
- "mediawiki-1.19.0.tar.gz. "\
+ "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\
+ "$MW_FILENAME. "\
"Please fix your connection and launch the script again."
- echo "$MW_VERSION.tar.gz downloaded in `pwd`. "\
+ echo "$MW_FILENAME downloaded in `pwd`. "\
"You can delete it later if you want."
else
- echo "Reusing existing $MW_VERSION.tar.gz downloaded in `pwd`."
+ echo "Reusing existing $MW_FILENAME downloaded in `pwd`."
fi
- archive_abs_path=$(pwd)/"$MW_VERSION.tar.gz"
+ archive_abs_path=$(pwd)/$MW_FILENAME
cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
tar xzf "$archive_abs_path" --strip-components=1 ||
@@ -431,5 +432,5 @@ wiki_delete () {
# Delete the wiki's SQLite database
rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
rm -f "$FILES_FOLDER/$DB_FILE"
- rm -rf "$TMP/$MW_VERSION"
+ rm -rf "$TMP/mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
}
diff --git a/contrib/mw-to-git/t/test.config b/contrib/mw-to-git/t/test.config
index 958b37b4a7..4cfebe9c69 100644
--- a/contrib/mw-to-git/t/test.config
+++ b/contrib/mw-to-git/t/test.config
@@ -30,6 +30,8 @@ WEB_WWW=$WEB/www
# The variables below are used by the script to install a wiki.
# You should not modify these unless you are modifying the script itself.
-MW_VERSION=mediawiki-1.19.0
+# tested versions: 1.19.X -> 1.21.1
+MW_VERSION_MAJOR=1.21
+MW_VERSION_MINOR=1
FILES_FOLDER=install-wiki
DB_INSTALL_SCRIPT=db_install.php
diff --git a/diff.c b/diff.c
index f0b3e7cfe3..208094f6b7 100644
--- a/diff.c
+++ b/diff.c
@@ -3593,6 +3593,8 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac)
DIFF_XDL_SET(options, IGNORE_WHITESPACE_CHANGE);
else if (!strcmp(arg, "--ignore-space-at-eol"))
DIFF_XDL_SET(options, IGNORE_WHITESPACE_AT_EOL);
+ else if (!strcmp(arg, "--ignore-blank-lines"))
+ DIFF_XDL_SET(options, IGNORE_BLANK_LINES);
else if (!strcmp(arg, "--patience"))
options->xdl_opts = DIFF_WITH_ALG(options, PATIENCE_DIFF);
else if (!strcmp(arg, "--histogram"))
diff --git a/git-add--interactive.perl b/git-add--interactive.perl
index d2c4ce6e1e..75a991f7ec 100755
--- a/git-add--interactive.perl
+++ b/git-add--interactive.perl
@@ -44,6 +44,8 @@ my ($diff_new_color) =
my $normal_color = $repo->get_color("", "reset");
+my $diff_algorithm = $repo->config('diff.algorithm');
+
my $use_readkey = 0;
my $use_termcap = 0;
my %term_escapes;
@@ -731,6 +733,9 @@ sub run_git_apply {
sub parse_diff {
my ($path) = @_;
my @diff_cmd = split(" ", $patch_mode_flavour{DIFF});
+ if (defined $diff_algorithm) {
+ splice @diff_cmd, 1, 0, "--diff-algorithm=${diff_algorithm}";
+ }
if (defined $patch_mode_revision) {
push @diff_cmd, $patch_mode_revision;
}
diff --git a/git-am.sh b/git-am.sh
index 1cf3d1dacf..9f4450916c 100755
--- a/git-am.sh
+++ b/git-am.sh
@@ -506,6 +506,23 @@ then
esac
rm -f "$dotest/dirtyindex"
else
+ # Possible stray $dotest directory in the independent-run
+ # case; in the --rebasing case, it is upto the caller
+ # (git-rebase--am) to take care of stray directories.
+ if test -d "$dotest" && test -z "$rebasing"
+ then
+ case "$skip,$resolved,$abort" in
+ ,,t)
+ rm -fr "$dotest"
+ exit 0
+ ;;
+ *)
+ die "$(eval_gettext "Stray \$dotest directory found.
+Use \"git am --abort\" to remove it.")"
+ ;;
+ esac
+ fi
+
# Make sure we are not given --skip, --resolved, nor --abort
test "$skip$resolved$abort" = "" ||
die "$(gettext "Resolve operation not in progress, we are not resuming.")"
diff --git a/git-mergetool--lib.sh b/git-mergetool--lib.sh
index e338be5e57..6a721064c2 100644
--- a/git-mergetool--lib.sh
+++ b/git-mergetool--lib.sh
@@ -114,6 +114,33 @@ valid_tool () {
test -n "$cmd"
}
+setup_user_tool () {
+ merge_tool_cmd=$(get_merge_tool_cmd "$tool")
+ test -n "$merge_tool_cmd" || return 1
+
+ diff_cmd () {
+ ( eval $merge_tool_cmd )
+ status=$?
+ return $status
+ }
+
+ merge_cmd () {
+ trust_exit_code=$(git config --bool \
+ "mergetool.$1.trustExitCode" || echo false)
+ if test "$trust_exit_code" = "false"
+ then
+ touch "$BACKUP"
+ ( eval $merge_tool_cmd )
+ status=$?
+ check_unchanged
+ else
+ ( eval $merge_tool_cmd )
+ status=$?
+ fi
+ return $status
+ }
+}
+
setup_tool () {
tool="$1"
@@ -142,15 +169,15 @@ setup_tool () {
if ! test -f "$MERGE_TOOLS_DIR/$tool"
then
- # Use a special return code for this case since we want to
- # source "defaults" even when an explicit tool path is
- # configured since the user can use that to override the
- # default path in the scriptlet.
- return 2
+ setup_user_tool
+ return $?
fi
# Load the redefined functions
. "$MERGE_TOOLS_DIR/$tool"
+ # Now let the user override the default command for the tool. If
+ # they have not done so then this will return 1 which we ignore.
+ setup_user_tool
if merge_mode && ! can_merge
then
@@ -187,20 +214,7 @@ run_merge_tool () {
status=0
# Bring tool-specific functions into scope
- setup_tool "$1"
- exitcode=$?
- case $exitcode in
- 0)
- :
- ;;
- 2)
- # The configured tool is not a built-in tool.
- test -n "$merge_tool_path" || return 1
- ;;
- *)
- return $exitcode
- ;;
- esac
+ setup_tool "$1" || return 1
if merge_mode
then
@@ -213,38 +227,12 @@ run_merge_tool () {
# Run a either a configured or built-in diff tool
run_diff_cmd () {
- merge_tool_cmd=$(get_merge_tool_cmd "$1")
- if test -n "$merge_tool_cmd"
- then
- ( eval $merge_tool_cmd )
- status=$?
- return $status
- else
- diff_cmd "$1"
- fi
+ diff_cmd "$1"
}
# Run a either a configured or built-in merge tool
run_merge_cmd () {
- merge_tool_cmd=$(get_merge_tool_cmd "$1")
- if test -n "$merge_tool_cmd"
- then
- trust_exit_code=$(git config --bool \
- "mergetool.$1.trustExitCode" || echo false)
- if test "$trust_exit_code" = "false"
- then
- touch "$BACKUP"
- ( eval $merge_tool_cmd )
- status=$?
- check_unchanged
- else
- ( eval $merge_tool_cmd )
- status=$?
- fi
- return $status
- else
- merge_cmd "$1"
- fi
+ merge_cmd "$1"
}
list_merge_tool_candidates () {
diff --git a/git-p4.py b/git-p4.py
index 911bbce6c5..88fcf232e5 100755
--- a/git-p4.py
+++ b/git-p4.py
@@ -3168,7 +3168,7 @@ class P4Rebase(Command):
if os.system("git update-index --refresh") != 0:
die("Some files in your working directory are modified and different than what is in your index. You can use git update-index <filename> to bring the index up-to-date or stash away all your changes with git stash.");
if len(read_pipe("git diff-index HEAD --")) > 0:
- die("You have uncommited changes. Please commit them before rebasing or stash them away with git stash.");
+ die("You have uncommitted changes. Please commit them before rebasing or stash them away with git stash.");
[upstream, settings] = findUpstreamBranchPoint()
if len(upstream) == 0:
diff --git a/git-pull.sh b/git-pull.sh
index 638aabb7b3..6828e2c715 100755
--- a/git-pull.sh
+++ b/git-pull.sh
@@ -266,10 +266,17 @@ case "$merge_head" in
;;
esac
+# Pulling into unborn branch: a shorthand for branching off
+# FETCH_HEAD, for lazy typers.
if test -z "$orig_head"
then
- git update-ref -m "initial pull" HEAD $merge_head "$curr_head" &&
- git read-tree -m -u HEAD || exit 1
+ # Two-way merge: we claim the index is based on an empty tree,
+ # and try to fast-forward to HEAD. This ensures we will not
+ # lose index/worktree changes that the user already made on
+ # the unborn branch.
+ empty_tree=4b825dc642cb6eb9a060e54bf8d69288fbee4904
+ git read-tree -m -u $empty_tree $merge_head &&
+ git update-ref -m "initial pull" HEAD $merge_head "$curr_head"
exit
fi
diff --git a/git-rebase.sh b/git-rebase.sh
index d0c11a910a..81b0346a5d 100755
--- a/git-rebase.sh
+++ b/git-rebase.sh
@@ -84,6 +84,8 @@ keep_empty=
test "$(git config --bool rebase.autosquash)" = "true" && autosquash=t
read_basic_state () {
+ test -f "$state_dir/head-name" &&
+ test -f "$state_dir/onto" &&
head_name=$(cat "$state_dir"/head-name) &&
onto=$(cat "$state_dir"/onto) &&
# We always write to orig-head, but interactive rebase used to write to
@@ -153,11 +155,8 @@ finish_rebase () {
then
echo "$(gettext 'Applied autostash.')"
else
- ref_stash=refs/stash &&
- >>"$GIT_DIR/logs/$ref_stash" &&
- git update-ref -m "autostash" $ref_stash $stash_sha1 ||
- die "$(eval_gettext 'Cannot store $stash_sha1')"
-
+ git stash store -m "autostash" -q $stash_sha1 ||
+ die "$(eval_gettext "Cannot store \$stash_sha1")"
gettext 'Applying autostash resulted in conflicts.
Your changes are safe in the stash.
You can run "git stash pop" or "git stash drop" it at any time.
@@ -434,7 +433,7 @@ then
shift
;;
esac
- upstream=`git rev-parse --verify "${upstream_name}^0"` ||
+ upstream=$(peel_committish "${upstream_name}") ||
die "$(eval_gettext "invalid upstream \$upstream_name")"
upstream_arg="$upstream_name"
else
@@ -470,7 +469,7 @@ case "$onto_name" in
fi
;;
*)
- onto=$(git rev-parse --verify "${onto_name}^0") ||
+ onto=$(peel_committish "$onto_name") ||
die "$(eval_gettext "Does not point to a valid commit: \$onto_name")"
;;
esac
@@ -545,6 +544,7 @@ then
# Lazily switch to the target branch if needed...
test -z "$switch_to" || git checkout "$switch_to" --
say "$(eval_gettext "Current branch \$branch_name is up to date.")"
+ finish_rebase
exit 0
else
say "$(eval_gettext "Current branch \$branch_name is up to date, rebase forced.")"
@@ -577,6 +577,7 @@ if test "$mb" = "$orig_head"
then
say "$(eval_gettext "Fast-forwarded \$branch_name to \$onto_name.")"
move_to_original_branch
+ finish_rebase
exit 0
fi
diff --git a/git-send-email.perl b/git-send-email.perl
index 671762b930..ecbf56f693 100755
--- a/git-send-email.perl
+++ b/git-send-email.perl
@@ -1259,6 +1259,7 @@ foreach my $t (@files) {
open my $fh, "<", $t or die "can't open file $t";
my $author = undef;
+ my $sauthor = undef;
my $author_encoding;
my $has_content_type;
my $body_encoding;
@@ -1297,7 +1298,7 @@ foreach my $t (@files) {
}
elsif (/^From:\s+(.*)$/i) {
($author, $author_encoding) = unquote_rfc2047($1);
- my $sauthor = sanitize_address($author);
+ $sauthor = sanitize_address($author);
next if $suppress_cc{'author'};
next if $suppress_cc{'self'} and $sauthor eq $sender;
printf("(mbox) Adding cc: %s from line '%s'\n",
@@ -1393,7 +1394,7 @@ foreach my $t (@files) {
$subject = quote_subject($subject, $auto_8bit_encoding);
}
- if (defined $author and $author ne $sender) {
+ if (defined $sauthor and $sauthor ne $sender) {
$message = "From: $author\n\n$message";
if (defined $author_encoding) {
if ($has_content_type) {
diff --git a/git-sh-setup.sh b/git-sh-setup.sh
index 2f7835941e..7a964ad2ff 100644
--- a/git-sh-setup.sh
+++ b/git-sh-setup.sh
@@ -313,3 +313,15 @@ then
}
: ${GIT_OBJECT_DIRECTORY="$GIT_DIR/objects"}
fi
+
+peel_committish () {
+ case "$1" in
+ :/*)
+ peeltmp=$(git rev-parse --verify "$1") &&
+ git rev-parse --verify "${peeltmp}^0"
+ ;;
+ *)
+ git rev-parse --verify "${1}^0"
+ ;;
+ esac
+}
diff --git a/git-stash.sh b/git-stash.sh
index bbefdf6424..1e541a2125 100755
--- a/git-stash.sh
+++ b/git-stash.sh
@@ -156,6 +156,41 @@ create_stash () {
die "$(gettext "Cannot record working tree state")"
}
+store_stash () {
+ while test $# != 0
+ do
+ case "$1" in
+ -m|--message)
+ shift
+ stash_msg="$1"
+ ;;
+ -q|--quiet)
+ quiet=t
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+ test $# = 1 ||
+ die "$(eval_gettext "\"$dashless store\" requires one <commit> argument")"
+
+ w_commit="$1"
+ if test -z "$stash_msg"
+ then
+ stash_msg="Created via \"git stash store\"."
+ fi
+
+ # Make sure the reflog for stash is kept.
+ : >>"$GIT_DIR/logs/$ref_stash"
+ git update-ref -m "$stash_msg" $ref_stash $w_commit
+ ret=$?
+ test $ret != 0 && test -z $quiet &&
+ die "$(eval_gettext "Cannot update \$ref_stash with \$w_commit")"
+ return $ret
+}
+
save_stash () {
keep_index=
patch_mode=
@@ -227,12 +262,8 @@ save_stash () {
clear_stash || die "$(gettext "Cannot initialize stash")"
create_stash "$stash_msg" $untracked
-
- # Make sure the reflog for stash is kept.
- : >>"$GIT_DIR/logs/$ref_stash"
-
- git update-ref -m "$stash_msg" $ref_stash $w_commit ||
- die "$(gettext "Cannot save the current status")"
+ store_stash -m "$stash_msg" -q $w_commit ||
+ die "$(gettext "Cannot save the current status")"
say Saved working directory and index state "$stash_msg"
if test -z "$patch_mode"
@@ -546,12 +577,13 @@ clear)
clear_stash "$@"
;;
create)
- if test $# -gt 0 && test "$1" = create
- then
- shift
- fi
+ shift
create_stash "$*" && echo "$w_commit"
;;
+store)
+ shift
+ store_stash "$@"
+ ;;
drop)
shift
drop_stash "$@"
diff --git a/git-submodule.sh b/git-submodule.sh
index 79bfaac9d4..945e296d30 100755
--- a/git-submodule.sh
+++ b/git-submodule.sh
@@ -14,10 +14,13 @@ USAGE="[--quiet] add [-b <branch>] [-f|--force] [--name <name>] [--reference <re
or: $dashless [--quiet] foreach [--recursive] <command>
or: $dashless [--quiet] sync [--recursive] [--] [<path>...]"
OPTIONS_SPEC=
+SUBDIRECTORY_OK=Yes
. git-sh-setup
. git-sh-i18n
. git-parse-remote
require_work_tree
+wt_prefix=$(git rev-parse --show-prefix)
+cd_to_toplevel
command=
branch=
@@ -106,14 +109,50 @@ resolve_relative_url ()
echo "${is_relative:+${up_path}}${remoteurl#./}"
}
+# Resolve a path to be relative to another path. This is intended for
+# converting submodule paths when git-submodule is run in a subdirectory
+# and only handles paths where the directory separator is '/'.
+#
+# The output is the first argument as a path relative to the second argument,
+# which defaults to $wt_prefix if it is omitted.
+relative_path ()
+{
+ local target curdir result
+ target=$1
+ curdir=${2-$wt_prefix}
+ curdir=${curdir%/}
+ result=
+
+ while test -n "$curdir"
+ do
+ case "$target" in
+ "$curdir/"*)
+ target=${target#"$curdir"/}
+ break
+ ;;
+ esac
+
+ result="${result}../"
+ if test "$curdir" = "${curdir%/*}"
+ then
+ curdir=
+ else
+ curdir="${curdir%/*}"
+ fi
+ done
+
+ echo "$result$target"
+}
+
#
# Get submodule info for registered submodules
# $@ = path to limit submodule list
#
module_list()
{
+ eval "set $(git rev-parse --sq --prefix "$wt_prefix" -- "$@")"
(
- git ls-files --error-unmatch --stage -- "$@" ||
+ git ls-files -z --error-unmatch --stage -- "$@" ||
echo "unmatched pathspec exists"
) |
perl -e '
@@ -121,6 +160,7 @@ module_list()
my ($null_sha1) = ("0" x 40);
my @out = ();
my $unmatched = 0;
+ $/ = "\0";
while (<STDIN>) {
if (/^unmatched pathspec/) {
$unmatched = 1;
@@ -282,6 +322,7 @@ isnumber()
cmd_add()
{
# parse $args after "submodule ... add".
+ reference_path=
while test $# -ne 0
do
case "$1" in
@@ -298,11 +339,11 @@ cmd_add()
;;
--reference)
case "$2" in '') usage ;; esac
- reference="--reference=$2"
+ reference_path=$2
shift
;;
--reference=*)
- reference="$1"
+ reference_path="${1#--reference=}"
;;
--name)
case "$2" in '') usage ;; esac
@@ -323,6 +364,14 @@ cmd_add()
shift
done
+ if test -n "$reference_path"
+ then
+ is_absolute_path "$reference_path" ||
+ reference_path="$wt_prefix$reference_path"
+
+ reference="--reference=$reference_path"
+ fi
+
repo=$1
sm_path=$2
@@ -335,9 +384,14 @@ cmd_add()
usage
fi
+ is_absolute_path "$sm_path" || sm_path="$wt_prefix$sm_path"
+
# assure repo is absolute or relative to parent
case "$repo" in
./*|../*)
+ test -z "$wt_prefix" ||
+ die "$(gettext "Relative path can only be used from the toplevel of the working tree")"
+
# dereference source url relative to parent's url
realrepo=$(resolve_relative_url "$repo") || exit
;;
@@ -471,21 +525,23 @@ cmd_foreach()
die_if_unmatched "$mode"
if test -e "$sm_path"/.git
then
- say "$(eval_gettext "Entering '\$prefix\$sm_path'")"
+ displaypath=$(relative_path "$sm_path")
+ say "$(eval_gettext "Entering '\$prefix\$displaypath'")"
name=$(module_name "$sm_path")
(
prefix="$prefix$sm_path/"
clear_local_git_env
- # we make $path available to scripts ...
- path=$sm_path
cd "$sm_path" &&
+ sm_path=$(relative_path "$sm_path") &&
+ # we make $path available to scripts ...
+ path=$sm_path &&
eval "$@" &&
if test -n "$recursive"
then
cmd_foreach "--recursive" "$@"
fi
) <&3 3<&- ||
- die "$(eval_gettext "Stopping at '\$sm_path'; script returned non-zero status.")"
+ die "$(eval_gettext "Stopping at '\$prefix\$displaypath'; script returned non-zero status.")"
fi
done
}
@@ -524,12 +580,14 @@ cmd_init()
die_if_unmatched "$mode"
name=$(module_name "$sm_path") || exit
+ displaypath=$(relative_path "$sm_path")
+
# Copy url setting when it is not set yet
if test -z "$(git config "submodule.$name.url")"
then
url=$(git config -f .gitmodules submodule."$name".url)
test -z "$url" &&
- die "$(eval_gettext "No url found for submodule path '\$sm_path' in .gitmodules")"
+ die "$(eval_gettext "No url found for submodule path '\$displaypath' in .gitmodules")"
# Possibly a url relative to parent
case "$url" in
@@ -538,9 +596,9 @@ cmd_init()
;;
esac
git config submodule."$name".url "$url" ||
- die "$(eval_gettext "Failed to register url for submodule path '\$sm_path'")"
+ die "$(eval_gettext "Failed to register url for submodule path '\$displaypath'")"
- say "$(eval_gettext "Submodule '\$name' (\$url) registered for path '\$sm_path'")"
+ say "$(eval_gettext "Submodule '\$name' (\$url) registered for path '\$displaypath'")"
fi
# Copy "update" setting when it is not set yet
@@ -548,7 +606,7 @@ cmd_init()
test -z "$upd" ||
test -n "$(git config submodule."$name".update)" ||
git config submodule."$name".update "$upd" ||
- die "$(eval_gettext "Failed to register update mode for submodule path '\$sm_path'")"
+ die "$(eval_gettext "Failed to register update mode for submodule path '\$displaypath'")"
done
}
@@ -594,27 +652,29 @@ cmd_deinit()
die_if_unmatched "$mode"
name=$(module_name "$sm_path") || exit
+ displaypath=$(relative_path "$sm_path")
+
# Remove the submodule work tree (unless the user already did it)
if test -d "$sm_path"
then
# Protect submodules containing a .git directory
if test -d "$sm_path/.git"
then
- echo >&2 "$(eval_gettext "Submodule work tree '\$sm_path' contains a .git directory")"
+ echo >&2 "$(eval_gettext "Submodule work tree '\$displaypath' contains a .git directory")"
die "$(eval_gettext "(use 'rm -rf' if you really want to remove it including all of its history)")"
fi
if test -z "$force"
then
git rm -qn "$sm_path" ||
- die "$(eval_gettext "Submodule work tree '\$sm_path' contains local modifications; use '-f' to discard them")"
+ die "$(eval_gettext "Submodule work tree '\$displaypath' contains local modifications; use '-f' to discard them")"
fi
rm -rf "$sm_path" &&
- say "$(eval_gettext "Cleared directory '\$sm_path'")" ||
- say "$(eval_gettext "Could not remove submodule work tree '\$sm_path'")"
+ say "$(eval_gettext "Cleared directory '\$displaypath'")" ||
+ say "$(eval_gettext "Could not remove submodule work tree '\$displaypath'")"
fi
- mkdir "$sm_path" || say "$(eval_gettext "Could not create empty submodule directory '\$sm_path'")"
+ mkdir "$sm_path" || say "$(eval_gettext "Could not create empty submodule directory '\$displaypath'")"
# Remove the .git/config entries (unless the user already did it)
if test -n "$(git config --get-regexp submodule."$name\.")"
@@ -623,7 +683,7 @@ cmd_deinit()
# the user later decides to init this submodule again
url=$(git config submodule."$name".url)
git config --remove-section submodule."$name" 2>/dev/null &&
- say "$(eval_gettext "Submodule '\$name' (\$url) unregistered for path '\$sm_path'")"
+ say "$(eval_gettext "Submodule '\$name' (\$url) unregistered for path '\$displaypath'")"
fi
done
}
@@ -717,9 +777,11 @@ cmd_update()
update_module=$(git config submodule."$name".update)
fi
+ displaypath=$(relative_path "$prefix$sm_path")
+
if test "$update_module" = "none"
then
- echo "Skipping submodule '$prefix$sm_path'"
+ echo "Skipping submodule '$displaypath'"
continue
fi
@@ -728,7 +790,7 @@ cmd_update()
# Only mention uninitialized submodules when its
# path have been specified
test "$#" != "0" &&
- say "$(eval_gettext "Submodule path '\$prefix\$sm_path' not initialized
+ say "$(eval_gettext "Submodule path '\$displaypath' not initialized
Maybe you want to use 'update --init'?")"
continue
fi
@@ -741,7 +803,7 @@ Maybe you want to use 'update --init'?")"
else
subsha1=$(clear_local_git_env; cd "$sm_path" &&
git rev-parse --verify HEAD) ||
- die "$(eval_gettext "Unable to find current revision in submodule path '\$prefix\$sm_path'")"
+ die "$(eval_gettext "Unable to find current revision in submodule path '\$displaypath'")"
fi
if test -n "$remote"
@@ -774,7 +836,7 @@ Maybe you want to use 'update --init'?")"
(clear_local_git_env; cd "$sm_path" &&
( (rev=$(git rev-list -n 1 $sha1 --not --all 2>/dev/null) &&
test -z "$rev") || git-fetch)) ||
- die "$(eval_gettext "Unable to fetch in submodule path '\$prefix\$sm_path'")"
+ die "$(eval_gettext "Unable to fetch in submodule path '\$displaypath'")"
fi
# Is this something we just cloned?
@@ -788,20 +850,20 @@ Maybe you want to use 'update --init'?")"
case "$update_module" in
rebase)
command="git rebase"
- die_msg="$(eval_gettext "Unable to rebase '\$sha1' in submodule path '\$prefix\$sm_path'")"
- say_msg="$(eval_gettext "Submodule path '\$prefix\$sm_path': rebased into '\$sha1'")"
+ die_msg="$(eval_gettext "Unable to rebase '\$sha1' in submodule path '\$displaypath'")"
+ say_msg="$(eval_gettext "Submodule path '\$displaypath': rebased into '\$sha1'")"
must_die_on_failure=yes
;;
merge)
command="git merge"
- die_msg="$(eval_gettext "Unable to merge '\$sha1' in submodule path '\$prefix\$sm_path'")"
- say_msg="$(eval_gettext "Submodule path '\$prefix\$sm_path': merged in '\$sha1'")"
+ die_msg="$(eval_gettext "Unable to merge '\$sha1' in submodule path '\$displaypath'")"
+ say_msg="$(eval_gettext "Submodule path '\$displaypath': merged in '\$sha1'")"
must_die_on_failure=yes
;;
*)
command="git checkout $subforce -q"
- die_msg="$(eval_gettext "Unable to checkout '\$sha1' in submodule path '\$prefix\$sm_path'")"
- say_msg="$(eval_gettext "Submodule path '\$prefix\$sm_path': checked out '\$sha1'")"
+ die_msg="$(eval_gettext "Unable to checkout '\$sha1' in submodule path '\$displaypath'")"
+ say_msg="$(eval_gettext "Submodule path '\$displaypath': checked out '\$sha1'")"
;;
esac
@@ -828,7 +890,7 @@ Maybe you want to use 'update --init'?")"
res=$?
if test $res -gt 0
then
- die_msg="$(eval_gettext "Failed to recurse into submodule path '\$prefix\$sm_path'")"
+ die_msg="$(eval_gettext "Failed to recurse into submodule path '\$displaypath'")"
if test $res -eq 1
then
err="${err};$die_msg"
@@ -942,6 +1004,7 @@ cmd_summary() {
fi
cd_to_toplevel
+ eval "set $(git rev-parse --sq --prefix "$wt_prefix" -- "$@")"
# Get modified modules cared by user
modules=$(git $diff_cmd $cached --ignore-submodules=dirty --raw $head -- "$@" |
sane_egrep '^:([0-7]* )?160000' |
@@ -991,16 +1054,18 @@ cmd_summary() {
! GIT_DIR="$name/.git" git-rev-parse -q --verify $sha1_dst^0 >/dev/null &&
missing_dst=t
+ display_name=$(relative_path "$name")
+
total_commits=
case "$missing_src,$missing_dst" in
t,)
- errmsg="$(eval_gettext " Warn: \$name doesn't contain commit \$sha1_src")"
+ errmsg="$(eval_gettext " Warn: \$display_name doesn't contain commit \$sha1_src")"
;;
,t)
- errmsg="$(eval_gettext " Warn: \$name doesn't contain commit \$sha1_dst")"
+ errmsg="$(eval_gettext " Warn: \$display_name doesn't contain commit \$sha1_dst")"
;;
t,t)
- errmsg="$(eval_gettext " Warn: \$name doesn't contain commits \$sha1_src and \$sha1_dst")"
+ errmsg="$(eval_gettext " Warn: \$display_name doesn't contain commits \$sha1_src and \$sha1_dst")"
;;
*)
errmsg=
@@ -1029,12 +1094,12 @@ cmd_summary() {
submodule="$(gettext "submodule")"
if test $mod_dst = 160000
then
- echo "* $name $sha1_abbr_src($blob)->$sha1_abbr_dst($submodule)$total_commits:"
+ echo "* $display_name $sha1_abbr_src($blob)->$sha1_abbr_dst($submodule)$total_commits:"
else
- echo "* $name $sha1_abbr_src($submodule)->$sha1_abbr_dst($blob)$total_commits:"
+ echo "* $display_name $sha1_abbr_src($submodule)->$sha1_abbr_dst($blob)$total_commits:"
fi
else
- echo "* $name $sha1_abbr_src...$sha1_abbr_dst$total_commits:"
+ echo "* $display_name $sha1_abbr_src...$sha1_abbr_dst$total_commits:"
fi
if test -n "$errmsg"
then
@@ -1118,7 +1183,7 @@ cmd_status()
die_if_unmatched "$mode"
name=$(module_name "$sm_path") || exit
url=$(git config submodule."$name".url)
- displaypath="$prefix$sm_path"
+ displaypath=$(relative_path "$prefix$sm_path")
if test "$stage" = U
then
say "U$sha1 $displaypath"
@@ -1129,16 +1194,16 @@ cmd_status()
say "-$sha1 $displaypath"
continue;
fi
- set_name_rev "$sm_path" "$sha1"
if git diff-files --ignore-submodules=dirty --quiet -- "$sm_path"
then
+ set_name_rev "$sm_path" "$sha1"
say " $sha1 $displaypath$revname"
else
if test -z "$cached"
then
sha1=$(clear_local_git_env; cd "$sm_path" && git rev-parse --verify HEAD)
- set_name_rev "$sm_path" "$sha1"
fi
+ set_name_rev "$sm_path" "$sha1"
say "+$sha1 $displaypath$revname"
fi
@@ -1213,7 +1278,8 @@ cmd_sync()
if git config "submodule.$name.url" >/dev/null 2>/dev/null
then
- say "$(eval_gettext "Synchronizing submodule url for '\$prefix\$sm_path'")"
+ displaypath=$(relative_path "$prefix$sm_path")
+ say "$(eval_gettext "Synchronizing submodule url for '\$displaypath'")"
git config submodule."$name".url "$super_config_url"
if test -e "$sm_path"/.git
diff --git a/git-svn.perl b/git-svn.perl
index 36083c1072..ff1ce3d351 100755
--- a/git-svn.perl
+++ b/git-svn.perl
@@ -1246,7 +1246,7 @@ sub cmd_rebase {
return;
}
if (command(qw/diff-index HEAD --/)) {
- print STDERR "Cannot rebase with uncommited changes:\n";
+ print STDERR "Cannot rebase with uncommitted changes:\n";
command_noisy('status');
exit 1;
}
diff --git a/git-web--browse.sh b/git-web--browse.sh
index 9f446798d4..1d72ec760e 100755
--- a/git-web--browse.sh
+++ b/git-web--browse.sh
@@ -32,8 +32,9 @@ valid_custom_tool()
valid_tool() {
case "$1" in
firefox | iceweasel | seamonkey | iceape | \
- chrome | google-chrome | chromium | chromium-browser |\
- konqueror | opera | w3m | elinks | links | lynx | dillo | open | start)
+ chrome | google-chrome | chromium | chromium-browser | \
+ konqueror | opera | w3m | elinks | links | lynx | dillo | open | \
+ start | cygstart)
;; # happy
*)
valid_custom_tool "$1" || return 1
@@ -127,6 +128,10 @@ if test -z "$browser" ; then
if test -x /bin/start; then
browser_candidates="start $browser_candidates"
fi
+ # /usr/bin/cygstart indicates Cygwin
+ if test -x /usr/bin/cygstart; then
+ browser_candidates="cygstart $browser_candidates"
+ fi
for i in $browser_candidates; do
init_browser_path $i
@@ -174,7 +179,7 @@ konqueror)
;;
esac
;;
-w3m|elinks|links|lynx|open)
+w3m|elinks|links|lynx|open|cygstart)
"$browser_path" "$@"
;;
start)
diff --git a/http.c b/http.c
index 92aba59082..2d086aedfa 100644
--- a/http.c
+++ b/http.c
@@ -228,9 +228,15 @@ static void init_curl_http_auth(CURL *result)
#else
{
static struct strbuf up = STRBUF_INIT;
- strbuf_reset(&up);
- strbuf_addf(&up, "%s:%s",
- http_auth.username, http_auth.password);
+ /*
+ * Note that we assume we only ever have a single set of
+ * credentials in a given program run, so we do not have
+ * to worry about updating this buffer, only setting its
+ * initial value.
+ */
+ if (!up.len)
+ strbuf_addf(&up, "%s:%s",
+ http_auth.username, http_auth.password);
curl_easy_setopt(result, CURLOPT_USERPWD, up.buf);
}
#endif
diff --git a/notes-merge.c b/notes-merge.c
index 0f67bd3f96..ab18857074 100644
--- a/notes-merge.c
+++ b/notes-merge.c
@@ -9,6 +9,7 @@
#include "notes.h"
#include "notes-merge.h"
#include "strbuf.h"
+#include "notes-utils.h"
struct notes_merge_pair {
unsigned char obj[20], base[20], local[20], remote[20];
@@ -530,32 +531,6 @@ static int merge_from_diffs(struct notes_merge_options *o,
return conflicts ? -1 : 1;
}
-void create_notes_commit(struct notes_tree *t, struct commit_list *parents,
- const struct strbuf *msg, unsigned char *result_sha1)
-{
- unsigned char tree_sha1[20];
-
- assert(t->initialized);
-
- if (write_notes_tree(t, tree_sha1))
- die("Failed to write notes tree to database");
-
- if (!parents) {
- /* Deduce parent commit from t->ref */
- unsigned char parent_sha1[20];
- if (!read_ref(t->ref, parent_sha1)) {
- struct commit *parent = lookup_commit(parent_sha1);
- if (!parent || parse_commit(parent))
- die("Failed to find/parse commit %s", t->ref);
- commit_list_insert(parent, &parents);
- }
- /* else: t->ref points to nothing, assume root/orphan commit */
- }
-
- if (commit_tree(msg, tree_sha1, parents, result_sha1, NULL, NULL))
- die("Failed to commit notes tree to database");
-}
-
int notes_merge(struct notes_merge_options *o,
struct notes_tree *local_tree,
unsigned char *result_sha1)
diff --git a/notes-merge.h b/notes-merge.h
index 0c11b173a1..1d01f6aacf 100644
--- a/notes-merge.h
+++ b/notes-merge.h
@@ -26,20 +26,6 @@ struct notes_merge_options {
void init_notes_merge_options(struct notes_merge_options *o);
/*
- * Create new notes commit from the given notes tree
- *
- * Properties of the created commit:
- * - tree: the result of converting t to a tree object with write_notes_tree().
- * - parents: the given parents OR (if NULL) the commit referenced by t->ref.
- * - author/committer: the default determined by commmit_tree().
- * - commit message: msg
- *
- * The resulting commit SHA1 is stored in result_sha1.
- */
-void create_notes_commit(struct notes_tree *t, struct commit_list *parents,
- const struct strbuf *msg, unsigned char *result_sha1);
-
-/*
* Merge notes from o->remote_ref into o->local_ref
*
* The given notes_tree 'local_tree' must be the notes_tree referenced by the
diff --git a/notes-utils.c b/notes-utils.c
new file mode 100644
index 0000000000..9107c379d9
--- /dev/null
+++ b/notes-utils.c
@@ -0,0 +1,157 @@
+#include "cache.h"
+#include "commit.h"
+#include "refs.h"
+#include "notes-utils.h"
+
+void create_notes_commit(struct notes_tree *t, struct commit_list *parents,
+ const struct strbuf *msg, unsigned char *result_sha1)
+{
+ unsigned char tree_sha1[20];
+
+ assert(t->initialized);
+
+ if (write_notes_tree(t, tree_sha1))
+ die("Failed to write notes tree to database");
+
+ if (!parents) {
+ /* Deduce parent commit from t->ref */
+ unsigned char parent_sha1[20];
+ if (!read_ref(t->ref, parent_sha1)) {
+ struct commit *parent = lookup_commit(parent_sha1);
+ if (!parent || parse_commit(parent))
+ die("Failed to find/parse commit %s", t->ref);
+ commit_list_insert(parent, &parents);
+ }
+ /* else: t->ref points to nothing, assume root/orphan commit */
+ }
+
+ if (commit_tree(msg, tree_sha1, parents, result_sha1, NULL, NULL))
+ die("Failed to commit notes tree to database");
+}
+
+void commit_notes(struct notes_tree *t, const char *msg)
+{
+ struct strbuf buf = STRBUF_INIT;
+ unsigned char commit_sha1[20];
+
+ if (!t)
+ t = &default_notes_tree;
+ if (!t->initialized || !t->ref || !*t->ref)
+ die(_("Cannot commit uninitialized/unreferenced notes tree"));
+ if (!t->dirty)
+ return; /* don't have to commit an unchanged tree */
+
+ /* Prepare commit message and reflog message */
+ strbuf_addstr(&buf, msg);
+ if (buf.buf[buf.len - 1] != '\n')
+ strbuf_addch(&buf, '\n'); /* Make sure msg ends with newline */
+
+ create_notes_commit(t, NULL, &buf, commit_sha1);
+ strbuf_insert(&buf, 0, "notes: ", 7); /* commit message starts at index 7 */
+ update_ref(buf.buf, t->ref, commit_sha1, NULL, 0, DIE_ON_ERR);
+
+ strbuf_release(&buf);
+}
+
+static combine_notes_fn parse_combine_notes_fn(const char *v)
+{
+ if (!strcasecmp(v, "overwrite"))
+ return combine_notes_overwrite;
+ else if (!strcasecmp(v, "ignore"))
+ return combine_notes_ignore;
+ else if (!strcasecmp(v, "concatenate"))
+ return combine_notes_concatenate;
+ else if (!strcasecmp(v, "cat_sort_uniq"))
+ return combine_notes_cat_sort_uniq;
+ else
+ return NULL;
+}
+
+static int notes_rewrite_config(const char *k, const char *v, void *cb)
+{
+ struct notes_rewrite_cfg *c = cb;
+ if (!prefixcmp(k, "notes.rewrite.") && !strcmp(k+14, c->cmd)) {
+ c->enabled = git_config_bool(k, v);
+ return 0;
+ } else if (!c->mode_from_env && !strcmp(k, "notes.rewritemode")) {
+ if (!v)
+ config_error_nonbool(k);
+ c->combine = parse_combine_notes_fn(v);
+ if (!c->combine) {
+ error(_("Bad notes.rewriteMode value: '%s'"), v);
+ return 1;
+ }
+ return 0;
+ } else if (!c->refs_from_env && !strcmp(k, "notes.rewriteref")) {
+ /* note that a refs/ prefix is implied in the
+ * underlying for_each_glob_ref */
+ if (!prefixcmp(v, "refs/notes/"))
+ string_list_add_refs_by_glob(c->refs, v);
+ else
+ warning(_("Refusing to rewrite notes in %s"
+ " (outside of refs/notes/)"), v);
+ return 0;
+ }
+
+ return 0;
+}
+
+
+struct notes_rewrite_cfg *init_copy_notes_for_rewrite(const char *cmd)
+{
+ struct notes_rewrite_cfg *c = xmalloc(sizeof(struct notes_rewrite_cfg));
+ const char *rewrite_mode_env = getenv(GIT_NOTES_REWRITE_MODE_ENVIRONMENT);
+ const char *rewrite_refs_env = getenv(GIT_NOTES_REWRITE_REF_ENVIRONMENT);
+ c->cmd = cmd;
+ c->enabled = 1;
+ c->combine = combine_notes_concatenate;
+ c->refs = xcalloc(1, sizeof(struct string_list));
+ c->refs->strdup_strings = 1;
+ c->refs_from_env = 0;
+ c->mode_from_env = 0;
+ if (rewrite_mode_env) {
+ c->mode_from_env = 1;
+ c->combine = parse_combine_notes_fn(rewrite_mode_env);
+ if (!c->combine)
+ /* TRANSLATORS: The first %s is the name of the
+ environment variable, the second %s is its value */
+ error(_("Bad %s value: '%s'"), GIT_NOTES_REWRITE_MODE_ENVIRONMENT,
+ rewrite_mode_env);
+ }
+ if (rewrite_refs_env) {
+ c->refs_from_env = 1;
+ string_list_add_refs_from_colon_sep(c->refs, rewrite_refs_env);
+ }
+ git_config(notes_rewrite_config, c);
+ if (!c->enabled || !c->refs->nr) {
+ string_list_clear(c->refs, 0);
+ free(c->refs);
+ free(c);
+ return NULL;
+ }
+ c->trees = load_notes_trees(c->refs);
+ string_list_clear(c->refs, 0);
+ free(c->refs);
+ return c;
+}
+
+int copy_note_for_rewrite(struct notes_rewrite_cfg *c,
+ const unsigned char *from_obj, const unsigned char *to_obj)
+{
+ int ret = 0;
+ int i;
+ for (i = 0; c->trees[i]; i++)
+ ret = copy_note(c->trees[i], from_obj, to_obj, 1, c->combine) || ret;
+ return ret;
+}
+
+void finish_copy_notes_for_rewrite(struct notes_rewrite_cfg *c, const char *msg)
+{
+ int i;
+ for (i = 0; c->trees[i]; i++) {
+ commit_notes(c->trees[i], msg);
+ free_notes(c->trees[i]);
+ }
+ free(c->trees);
+ free(c);
+}
diff --git a/notes-utils.h b/notes-utils.h
new file mode 100644
index 0000000000..b4cb1bfb43
--- /dev/null
+++ b/notes-utils.h
@@ -0,0 +1,37 @@
+#ifndef NOTES_UTILS_H
+#define NOTES_UTILS_H
+
+#include "notes.h"
+
+/*
+ * Create new notes commit from the given notes tree
+ *
+ * Properties of the created commit:
+ * - tree: the result of converting t to a tree object with write_notes_tree().
+ * - parents: the given parents OR (if NULL) the commit referenced by t->ref.
+ * - author/committer: the default determined by commmit_tree().
+ * - commit message: msg
+ *
+ * The resulting commit SHA1 is stored in result_sha1.
+ */
+void create_notes_commit(struct notes_tree *t, struct commit_list *parents,
+ const struct strbuf *msg, unsigned char *result_sha1);
+
+void commit_notes(struct notes_tree *t, const char *msg);
+
+struct notes_rewrite_cfg {
+ struct notes_tree **trees;
+ const char *cmd;
+ int enabled;
+ combine_notes_fn combine;
+ struct string_list *refs;
+ int refs_from_env;
+ int mode_from_env;
+};
+
+struct notes_rewrite_cfg *init_copy_notes_for_rewrite(const char *cmd);
+int copy_note_for_rewrite(struct notes_rewrite_cfg *c,
+ const unsigned char *from_obj, const unsigned char *to_obj);
+void finish_copy_notes_for_rewrite(struct notes_rewrite_cfg *c, const char *msg);
+
+#endif
diff --git a/prio-queue.c b/prio-queue.c
new file mode 100644
index 0000000000..c9f8c6d253
--- /dev/null
+++ b/prio-queue.c
@@ -0,0 +1,84 @@
+#include "cache.h"
+#include "commit.h"
+#include "prio-queue.h"
+
+void prio_queue_reverse(struct prio_queue *queue)
+{
+ int i, j;
+
+ if (queue->compare != NULL)
+ die("BUG: prio_queue_reverse() on non-LIFO queue");
+ for (i = 0; i <= (j = (queue->nr - 1) - i); i++) {
+ struct commit *swap = queue->array[i];
+ queue->array[i] = queue->array[j];
+ queue->array[j] = swap;
+ }
+}
+
+void clear_prio_queue(struct prio_queue *queue)
+{
+ free(queue->array);
+ queue->nr = 0;
+ queue->alloc = 0;
+ queue->array = NULL;
+}
+
+void prio_queue_put(struct prio_queue *queue, void *thing)
+{
+ prio_queue_compare_fn compare = queue->compare;
+ int ix, parent;
+
+ /* Append at the end */
+ ALLOC_GROW(queue->array, queue->nr + 1, queue->alloc);
+ queue->array[queue->nr++] = thing;
+ if (!compare)
+ return; /* LIFO */
+
+ /* Bubble up the new one */
+ for (ix = queue->nr - 1; ix; ix = parent) {
+ parent = (ix - 1) / 2;
+ if (compare(queue->array[parent], queue->array[ix],
+ queue->cb_data) <= 0)
+ break;
+
+ thing = queue->array[parent];
+ queue->array[parent] = queue->array[ix];
+ queue->array[ix] = thing;
+ }
+}
+
+void *prio_queue_get(struct prio_queue *queue)
+{
+ void *result, *swap;
+ int ix, child;
+ prio_queue_compare_fn compare = queue->compare;
+
+ if (!queue->nr)
+ return NULL;
+ if (!compare)
+ return queue->array[--queue->nr]; /* LIFO */
+
+ result = queue->array[0];
+ if (!--queue->nr)
+ return result;
+
+ queue->array[0] = queue->array[queue->nr];
+
+ /* Push down the one at the root */
+ for (ix = 0; ix * 2 + 1 < queue->nr; ix = child) {
+ child = ix * 2 + 1; /* left */
+ if ((child + 1 < queue->nr) &&
+ (compare(queue->array[child], queue->array[child + 1],
+ queue->cb_data) >= 0))
+ child++; /* use right child */
+
+ if (compare(queue->array[ix], queue->array[child],
+ queue->cb_data) <= 0)
+ break;
+
+ swap = queue->array[child];
+ queue->array[child] = queue->array[ix];
+ queue->array[ix] = swap;
+ }
+ return result;
+}
diff --git a/prio-queue.h b/prio-queue.h
new file mode 100644
index 0000000000..9c3cd1f875
--- /dev/null
+++ b/prio-queue.h
@@ -0,0 +1,48 @@
+#ifndef PRIO_QUEUE_H
+#define PRIO_QUEUE_H
+
+/*
+ * A priority queue implementation, primarily for keeping track of
+ * commits in the 'date-order' so that we process them from new to old
+ * as they are discovered, but can be used to hold any pointer to
+ * struct. The caller is responsible for supplying a function to
+ * compare two "things".
+ *
+ * Alternatively, this data structure can also be used as a LIFO stack
+ * by specifying NULL as the comparison function.
+ */
+
+/*
+ * Compare two "things", one and two; the third parameter is cb_data
+ * in the prio_queue structure. The result is returned as a sign of
+ * the return value, being the same as the sign of the result of
+ * subtracting "two" from "one" (i.e. negative if "one" sorts earlier
+ * than "two").
+ */
+typedef int (*prio_queue_compare_fn)(const void *one, const void *two, void *cb_data);
+
+struct prio_queue {
+ prio_queue_compare_fn compare;
+ void *cb_data;
+ int alloc, nr;
+ void **array;
+};
+
+/*
+ * Add the "thing" to the queue.
+ */
+extern void prio_queue_put(struct prio_queue *, void *thing);
+
+/*
+ * Extract the "thing" that compares the smallest out of the queue,
+ * or NULL. If compare function is NULL, the queue acts as a LIFO
+ * stack.
+ */
+extern void *prio_queue_get(struct prio_queue *);
+
+extern void clear_prio_queue(struct prio_queue *);
+
+/* Reverse the LIFO elements */
+extern void prio_queue_reverse(struct prio_queue *);
+
+#endif /* PRIO_QUEUE_H */
diff --git a/read-cache.c b/read-cache.c
index b297addb57..d5201f9b06 100644
--- a/read-cache.c
+++ b/read-cache.c
@@ -67,6 +67,61 @@ void rename_index_entry_at(struct index_state *istate, int nr, const char *new_n
add_index_entry(istate, new, ADD_CACHE_OK_TO_ADD|ADD_CACHE_OK_TO_REPLACE);
}
+void fill_stat_data(struct stat_data *sd, struct stat *st)
+{
+ sd->sd_ctime.sec = (unsigned int)st->st_ctime;
+ sd->sd_mtime.sec = (unsigned int)st->st_mtime;
+ sd->sd_ctime.nsec = ST_CTIME_NSEC(*st);
+ sd->sd_mtime.nsec = ST_MTIME_NSEC(*st);
+ sd->sd_dev = st->st_dev;
+ sd->sd_ino = st->st_ino;
+ sd->sd_uid = st->st_uid;
+ sd->sd_gid = st->st_gid;
+ sd->sd_size = st->st_size;
+}
+
+int match_stat_data(const struct stat_data *sd, struct stat *st)
+{
+ int changed = 0;
+
+ if (sd->sd_mtime.sec != (unsigned int)st->st_mtime)
+ changed |= MTIME_CHANGED;
+ if (trust_ctime && check_stat &&
+ sd->sd_ctime.sec != (unsigned int)st->st_ctime)
+ changed |= CTIME_CHANGED;
+
+#ifdef USE_NSEC
+ if (check_stat && sd->sd_mtime.nsec != ST_MTIME_NSEC(*st))
+ changed |= MTIME_CHANGED;
+ if (trust_ctime && check_stat &&
+ sd->sd_ctime.nsec != ST_CTIME_NSEC(*st))
+ changed |= CTIME_CHANGED;
+#endif
+
+ if (check_stat) {
+ if (sd->sd_uid != (unsigned int) st->st_uid ||
+ sd->sd_gid != (unsigned int) st->st_gid)
+ changed |= OWNER_CHANGED;
+ if (sd->sd_ino != (unsigned int) st->st_ino)
+ changed |= INODE_CHANGED;
+ }
+
+#ifdef USE_STDEV
+ /*
+ * st_dev breaks on network filesystems where different
+ * clients will have different views of what "device"
+ * the filesystem is on
+ */
+ if (check_stat && sd->sd_dev != (unsigned int) st->st_dev)
+ changed |= INODE_CHANGED;
+#endif
+
+ if (sd->sd_size != (unsigned int) st->st_size)
+ changed |= DATA_CHANGED;
+
+ return changed;
+}
+
/*
* This only updates the "non-critical" parts of the directory
* cache, ie the parts that aren't tracked by GIT, and only used
@@ -74,15 +129,7 @@ void rename_index_entry_at(struct index_state *istate, int nr, const char *new_n
*/
void fill_stat_cache_info(struct cache_entry *ce, struct stat *st)
{
- ce->ce_ctime.sec = (unsigned int)st->st_ctime;
- ce->ce_mtime.sec = (unsigned int)st->st_mtime;
- ce->ce_ctime.nsec = ST_CTIME_NSEC(*st);
- ce->ce_mtime.nsec = ST_MTIME_NSEC(*st);
- ce->ce_dev = st->st_dev;
- ce->ce_ino = st->st_ino;
- ce->ce_uid = st->st_uid;
- ce->ce_gid = st->st_gid;
- ce->ce_size = st->st_size;
+ fill_stat_data(&ce->ce_stat_data, st);
if (assume_unchanged)
ce->ce_flags |= CE_VALID;
@@ -195,43 +242,11 @@ static int ce_match_stat_basic(const struct cache_entry *ce, struct stat *st)
default:
die("internal error: ce_mode is %o", ce->ce_mode);
}
- if (ce->ce_mtime.sec != (unsigned int)st->st_mtime)
- changed |= MTIME_CHANGED;
- if (trust_ctime && check_stat &&
- ce->ce_ctime.sec != (unsigned int)st->st_ctime)
- changed |= CTIME_CHANGED;
-
-#ifdef USE_NSEC
- if (check_stat && ce->ce_mtime.nsec != ST_MTIME_NSEC(*st))
- changed |= MTIME_CHANGED;
- if (trust_ctime && check_stat &&
- ce->ce_ctime.nsec != ST_CTIME_NSEC(*st))
- changed |= CTIME_CHANGED;
-#endif
-
- if (check_stat) {
- if (ce->ce_uid != (unsigned int) st->st_uid ||
- ce->ce_gid != (unsigned int) st->st_gid)
- changed |= OWNER_CHANGED;
- if (ce->ce_ino != (unsigned int) st->st_ino)
- changed |= INODE_CHANGED;
- }
-#ifdef USE_STDEV
- /*
- * st_dev breaks on network filesystems where different
- * clients will have different views of what "device"
- * the filesystem is on
- */
- if (check_stat && ce->ce_dev != (unsigned int) st->st_dev)
- changed |= INODE_CHANGED;
-#endif
-
- if (ce->ce_size != (unsigned int) st->st_size)
- changed |= DATA_CHANGED;
+ changed |= match_stat_data(&ce->ce_stat_data, st);
/* Racily smudged entry? */
- if (!ce->ce_size) {
+ if (!ce->ce_stat_data.sd_size) {
if (!is_empty_blob_sha1(ce->sha1))
changed |= DATA_CHANGED;
}
@@ -246,11 +261,11 @@ static int is_racy_timestamp(const struct index_state *istate,
istate->timestamp.sec &&
#ifdef USE_NSEC
/* nanosecond timestamped files can also be racy! */
- (istate->timestamp.sec < ce->ce_mtime.sec ||
- (istate->timestamp.sec == ce->ce_mtime.sec &&
- istate->timestamp.nsec <= ce->ce_mtime.nsec))
+ (istate->timestamp.sec < ce->ce_stat_data.sd_mtime.sec ||
+ (istate->timestamp.sec == ce->ce_stat_data.sd_mtime.sec &&
+ istate->timestamp.nsec <= ce->ce_stat_data.sd_mtime.nsec))
#else
- istate->timestamp.sec <= ce->ce_mtime.sec
+ istate->timestamp.sec <= ce->ce_stat_data.sd_mtime.sec
#endif
);
}
@@ -342,7 +357,7 @@ int ie_modified(const struct index_state *istate,
* then we know it is.
*/
if ((changed & DATA_CHANGED) &&
- (S_ISGITLINK(ce->ce_mode) || ce->ce_size != 0))
+ (S_ISGITLINK(ce->ce_mode) || ce->ce_stat_data.sd_size != 0))
return changed;
changed_fs = ce_modified_check_fs(ce, st);
@@ -1324,16 +1339,16 @@ static struct cache_entry *cache_entry_from_ondisk(struct ondisk_cache_entry *on
{
struct cache_entry *ce = xmalloc(cache_entry_size(len));
- ce->ce_ctime.sec = ntoh_l(ondisk->ctime.sec);
- ce->ce_mtime.sec = ntoh_l(ondisk->mtime.sec);
- ce->ce_ctime.nsec = ntoh_l(ondisk->ctime.nsec);
- ce->ce_mtime.nsec = ntoh_l(ondisk->mtime.nsec);
- ce->ce_dev = ntoh_l(ondisk->dev);
- ce->ce_ino = ntoh_l(ondisk->ino);
+ ce->ce_stat_data.sd_ctime.sec = ntoh_l(ondisk->ctime.sec);
+ ce->ce_stat_data.sd_mtime.sec = ntoh_l(ondisk->mtime.sec);
+ ce->ce_stat_data.sd_ctime.nsec = ntoh_l(ondisk->ctime.nsec);
+ ce->ce_stat_data.sd_mtime.nsec = ntoh_l(ondisk->mtime.nsec);
+ ce->ce_stat_data.sd_dev = ntoh_l(ondisk->dev);
+ ce->ce_stat_data.sd_ino = ntoh_l(ondisk->ino);
ce->ce_mode = ntoh_l(ondisk->mode);
- ce->ce_uid = ntoh_l(ondisk->uid);
- ce->ce_gid = ntoh_l(ondisk->gid);
- ce->ce_size = ntoh_l(ondisk->size);
+ ce->ce_stat_data.sd_uid = ntoh_l(ondisk->uid);
+ ce->ce_stat_data.sd_gid = ntoh_l(ondisk->gid);
+ ce->ce_stat_data.sd_size = ntoh_l(ondisk->size);
ce->ce_flags = flags & ~CE_NAMEMASK;
ce->ce_namelen = len;
hashcpy(ce->sha1, ondisk->sha1);
@@ -1611,7 +1626,7 @@ static void ce_smudge_racily_clean_entry(struct cache_entry *ce)
* The only thing we care about in this function is to smudge the
* falsely clean entry due to touch-update-touch race, so we leave
* everything else as they are. We are called for entries whose
- * ce_mtime match the index file mtime.
+ * ce_stat_data.sd_mtime match the index file mtime.
*
* Note that this actually does not do much for gitlinks, for
* which ce_match_stat_basic() always goes to the actual
@@ -1650,7 +1665,7 @@ static void ce_smudge_racily_clean_entry(struct cache_entry *ce)
* file, and never calls us, so the cached size information
* for "frotz" stays 6 which does not match the filesystem.
*/
- ce->ce_size = 0;
+ ce->ce_stat_data.sd_size = 0;
}
}
@@ -1660,16 +1675,16 @@ static char *copy_cache_entry_to_ondisk(struct ondisk_cache_entry *ondisk,
{
short flags;
- ondisk->ctime.sec = htonl(ce->ce_ctime.sec);
- ondisk->mtime.sec = htonl(ce->ce_mtime.sec);
- ondisk->ctime.nsec = htonl(ce->ce_ctime.nsec);
- ondisk->mtime.nsec = htonl(ce->ce_mtime.nsec);
- ondisk->dev = htonl(ce->ce_dev);
- ondisk->ino = htonl(ce->ce_ino);
+ ondisk->ctime.sec = htonl(ce->ce_stat_data.sd_ctime.sec);
+ ondisk->mtime.sec = htonl(ce->ce_stat_data.sd_mtime.sec);
+ ondisk->ctime.nsec = htonl(ce->ce_stat_data.sd_ctime.nsec);
+ ondisk->mtime.nsec = htonl(ce->ce_stat_data.sd_mtime.nsec);
+ ondisk->dev = htonl(ce->ce_stat_data.sd_dev);
+ ondisk->ino = htonl(ce->ce_stat_data.sd_ino);
ondisk->mode = htonl(ce->ce_mode);
- ondisk->uid = htonl(ce->ce_uid);
- ondisk->gid = htonl(ce->ce_gid);
- ondisk->size = htonl(ce->ce_size);
+ ondisk->uid = htonl(ce->ce_stat_data.sd_uid);
+ ondisk->gid = htonl(ce->ce_stat_data.sd_gid);
+ ondisk->size = htonl(ce->ce_stat_data.sd_size);
hashcpy(ondisk->sha1, ce->sha1);
flags = ce->ce_flags;
@@ -1936,3 +1951,33 @@ void *read_blob_data_from_index(struct index_state *istate, const char *path, un
*size = sz;
return data;
}
+
+void stat_validity_clear(struct stat_validity *sv)
+{
+ free(sv->sd);
+ sv->sd = NULL;
+}
+
+int stat_validity_check(struct stat_validity *sv, const char *path)
+{
+ struct stat st;
+
+ if (stat(path, &st) < 0)
+ return sv->sd == NULL;
+ if (!sv->sd)
+ return 0;
+ return S_ISREG(st.st_mode) && !match_stat_data(sv->sd, &st);
+}
+
+void stat_validity_update(struct stat_validity *sv, int fd)
+{
+ struct stat st;
+
+ if (fstat(fd, &st) < 0 || !S_ISREG(st.st_mode))
+ stat_validity_clear(sv);
+ else {
+ if (!sv->sd)
+ sv->sd = xcalloc(1, sizeof(struct stat_data));
+ fill_stat_data(sv->sd, &st);
+ }
+}
diff --git a/refs.c b/refs.c
index 42a7e17f6b..4302206649 100644
--- a/refs.c
+++ b/refs.c
@@ -750,6 +750,21 @@ static int do_for_each_entry_in_dirs(struct ref_dir *dir1,
}
/*
+ * Load all of the refs from the dir into our in-memory cache. The hard work
+ * of loading loose refs is done by get_ref_dir(), so we just need to recurse
+ * through all of the sub-directories. We do not even need to care about
+ * sorting, as traversal order does not matter to us.
+ */
+static void prime_ref_dir(struct ref_dir *dir)
+{
+ int i;
+ for (i = 0; i < dir->nr; i++) {
+ struct ref_entry *entry = dir->entries[i];
+ if (entry->flag & REF_DIR)
+ prime_ref_dir(get_ref_dir(entry));
+ }
+}
+/*
* Return true iff refname1 and refname2 conflict with each other.
* Two reference names conflict if one of them exactly matches the
* leading components of the other; e.g., "foo/bar" conflicts with
@@ -806,6 +821,30 @@ static int is_refname_available(const char *refname, const char *oldrefname,
return 1;
}
+struct packed_ref_cache {
+ struct ref_entry *root;
+
+ /*
+ * Count of references to the data structure in this instance,
+ * including the pointer from ref_cache::packed if any. The
+ * data will not be freed as long as the reference count is
+ * nonzero.
+ */
+ unsigned int referrers;
+
+ /*
+ * Iff the packed-refs file associated with this instance is
+ * currently locked for writing, this points at the associated
+ * lock (which is owned by somebody else). The referrer count
+ * is also incremented when the file is locked and decremented
+ * when it is unlocked.
+ */
+ struct lock_file *lock;
+
+ /* The metadata from when this packed-refs cache was read */
+ struct stat_validity validity;
+};
+
/*
* Future: need to be in "struct repository"
* when doing a full libification.
@@ -813,7 +852,7 @@ static int is_refname_available(const char *refname, const char *oldrefname,
static struct ref_cache {
struct ref_cache *next;
struct ref_entry *loose;
- struct ref_entry *packed;
+ struct packed_ref_cache *packed;
/*
* The submodule name, or "" for the main repo. We allocate
* length 1 rather than FLEX_ARRAY so that the main ref_cache
@@ -822,11 +861,42 @@ static struct ref_cache {
char name[1];
} ref_cache, *submodule_ref_caches;
+/* Lock used for the main packed-refs file: */
+static struct lock_file packlock;
+
+/*
+ * Increment the reference count of *packed_refs.
+ */
+static void acquire_packed_ref_cache(struct packed_ref_cache *packed_refs)
+{
+ packed_refs->referrers++;
+}
+
+/*
+ * Decrease the reference count of *packed_refs. If it goes to zero,
+ * free *packed_refs and return true; otherwise return false.
+ */
+static int release_packed_ref_cache(struct packed_ref_cache *packed_refs)
+{
+ if (!--packed_refs->referrers) {
+ free_ref_entry(packed_refs->root);
+ stat_validity_clear(&packed_refs->validity);
+ free(packed_refs);
+ return 1;
+ } else {
+ return 0;
+ }
+}
+
static void clear_packed_ref_cache(struct ref_cache *refs)
{
if (refs->packed) {
- free_ref_entry(refs->packed);
+ struct packed_ref_cache *packed_refs = refs->packed;
+
+ if (packed_refs->lock)
+ die("internal error: packed-ref cache cleared while locked");
refs->packed = NULL;
+ release_packed_ref_cache(packed_refs);
}
}
@@ -996,29 +1066,57 @@ static void read_packed_refs(FILE *f, struct ref_dir *dir)
}
}
-static struct ref_dir *get_packed_refs(struct ref_cache *refs)
+/*
+ * Get the packed_ref_cache for the specified ref_cache, creating it
+ * if necessary.
+ */
+static struct packed_ref_cache *get_packed_ref_cache(struct ref_cache *refs)
{
+ const char *packed_refs_file;
+
+ if (*refs->name)
+ packed_refs_file = git_path_submodule(refs->name, "packed-refs");
+ else
+ packed_refs_file = git_path("packed-refs");
+
+ if (refs->packed &&
+ !stat_validity_check(&refs->packed->validity, packed_refs_file))
+ clear_packed_ref_cache(refs);
+
if (!refs->packed) {
- const char *packed_refs_file;
FILE *f;
- refs->packed = create_dir_entry(refs, "", 0, 0);
- if (*refs->name)
- packed_refs_file = git_path_submodule(refs->name, "packed-refs");
- else
- packed_refs_file = git_path("packed-refs");
+ refs->packed = xcalloc(1, sizeof(*refs->packed));
+ acquire_packed_ref_cache(refs->packed);
+ refs->packed->root = create_dir_entry(refs, "", 0, 0);
f = fopen(packed_refs_file, "r");
if (f) {
- read_packed_refs(f, get_ref_dir(refs->packed));
+ stat_validity_update(&refs->packed->validity, fileno(f));
+ read_packed_refs(f, get_ref_dir(refs->packed->root));
fclose(f);
}
}
- return get_ref_dir(refs->packed);
+ return refs->packed;
+}
+
+static struct ref_dir *get_packed_ref_dir(struct packed_ref_cache *packed_ref_cache)
+{
+ return get_ref_dir(packed_ref_cache->root);
+}
+
+static struct ref_dir *get_packed_refs(struct ref_cache *refs)
+{
+ return get_packed_ref_dir(get_packed_ref_cache(refs));
}
void add_packed_ref(const char *refname, const unsigned char *sha1)
{
- add_ref(get_packed_refs(&ref_cache),
+ struct packed_ref_cache *packed_ref_cache =
+ get_packed_ref_cache(&ref_cache);
+
+ if (!packed_ref_cache->lock)
+ die("internal error: packed refs not locked");
+ add_ref(get_packed_ref_dir(packed_ref_cache),
create_ref_entry(refname, sha1, REF_ISPACKED, 1));
}
@@ -1197,6 +1295,37 @@ static struct ref_entry *get_packed_ref(const char *refname)
return find_ref(get_packed_refs(&ref_cache), refname);
}
+/*
+ * A loose ref file doesn't exist; check for a packed ref. The
+ * options are forwarded from resolve_safe_unsafe().
+ */
+static const char *handle_missing_loose_ref(const char *refname,
+ unsigned char *sha1,
+ int reading,
+ int *flag)
+{
+ struct ref_entry *entry;
+
+ /*
+ * The loose reference file does not exist; check for a packed
+ * reference.
+ */
+ entry = get_packed_ref(refname);
+ if (entry) {
+ hashcpy(sha1, entry->u.value.sha1);
+ if (flag)
+ *flag |= REF_ISPACKED;
+ return refname;
+ }
+ /* The reference is not a packed reference, either. */
+ if (reading) {
+ return NULL;
+ } else {
+ hashclr(sha1);
+ return refname;
+ }
+}
+
const char *resolve_ref_unsafe(const char *refname, unsigned char *sha1, int reading, int *flag)
{
int depth = MAXDEPTH;
@@ -1221,36 +1350,34 @@ const char *resolve_ref_unsafe(const char *refname, unsigned char *sha1, int rea
git_snpath(path, sizeof(path), "%s", refname);
+ /*
+ * We might have to loop back here to avoid a race
+ * condition: first we lstat() the file, then we try
+ * to read it as a link or as a file. But if somebody
+ * changes the type of the file (file <-> directory
+ * <-> symlink) between the lstat() and reading, then
+ * we don't want to report that as an error but rather
+ * try again starting with the lstat().
+ */
+ stat_ref:
if (lstat(path, &st) < 0) {
- struct ref_entry *entry;
-
- if (errno != ENOENT)
+ if (errno == ENOENT)
+ return handle_missing_loose_ref(refname, sha1,
+ reading, flag);
+ else
return NULL;
- /*
- * The loose reference file does not exist;
- * check for a packed reference.
- */
- entry = get_packed_ref(refname);
- if (entry) {
- hashcpy(sha1, entry->u.value.sha1);
- if (flag)
- *flag |= REF_ISPACKED;
- return refname;
- }
- /* The reference is not a packed reference, either. */
- if (reading) {
- return NULL;
- } else {
- hashclr(sha1);
- return refname;
- }
}
/* Follow "normalized" - ie "refs/.." symlinks by hand */
if (S_ISLNK(st.st_mode)) {
len = readlink(path, buffer, sizeof(buffer)-1);
- if (len < 0)
- return NULL;
+ if (len < 0) {
+ if (errno == ENOENT || errno == EINVAL)
+ /* inconsistent with lstat; retry */
+ goto stat_ref;
+ else
+ return NULL;
+ }
buffer[len] = 0;
if (!prefixcmp(buffer, "refs/") &&
!check_refname_format(buffer, 0)) {
@@ -1273,8 +1400,13 @@ const char *resolve_ref_unsafe(const char *refname, unsigned char *sha1, int rea
* a ref
*/
fd = open(path, O_RDONLY);
- if (fd < 0)
- return NULL;
+ if (fd < 0) {
+ if (errno == ENOENT)
+ /* inconsistent with lstat; retry */
+ goto stat_ref;
+ else
+ return NULL;
+ }
len = read_in_full(fd, buffer, sizeof(buffer)-1);
close(fd);
if (len < 0)
@@ -1286,8 +1418,19 @@ const char *resolve_ref_unsafe(const char *refname, unsigned char *sha1, int rea
/*
* Is it a symbolic ref?
*/
- if (prefixcmp(buffer, "ref:"))
- break;
+ if (prefixcmp(buffer, "ref:")) {
+ /*
+ * Please note that FETCH_HEAD has a second
+ * line containing other data.
+ */
+ if (get_sha1_hex(buffer, sha1) ||
+ (buffer[40] != '\0' && !isspace(buffer[40]))) {
+ if (flag)
+ *flag |= REF_ISBROKEN;
+ return NULL;
+ }
+ return refname;
+ }
if (flag)
*flag |= REF_ISSYMREF;
buf = buffer + 4;
@@ -1300,13 +1443,6 @@ const char *resolve_ref_unsafe(const char *refname, unsigned char *sha1, int rea
}
refname = strcpy(refname_buffer, buf);
}
- /* Please note that FETCH_HEAD has a second line containing other data. */
- if (get_sha1_hex(buffer, sha1) || (buffer[40] != '\0' && !isspace(buffer[40]))) {
- if (flag)
- *flag |= REF_ISBROKEN;
- return NULL;
- }
- return refname;
}
char *resolve_refdup(const char *ref, unsigned char *sha1, int reading, int *flag)
@@ -1520,14 +1656,32 @@ void warn_dangling_symref(FILE *fp, const char *msg_fmt, const char *refname)
static int do_for_each_entry(struct ref_cache *refs, const char *base,
each_ref_entry_fn fn, void *cb_data)
{
- struct ref_dir *packed_dir = get_packed_refs(refs);
- struct ref_dir *loose_dir = get_loose_refs(refs);
+ struct packed_ref_cache *packed_ref_cache;
+ struct ref_dir *loose_dir;
+ struct ref_dir *packed_dir;
int retval = 0;
+ /*
+ * We must make sure that all loose refs are read before accessing the
+ * packed-refs file; this avoids a race condition in which loose refs
+ * are migrated to the packed-refs file by a simultaneous process, but
+ * our in-memory view is from before the migration. get_packed_ref_cache()
+ * takes care of making sure our view is up to date with what is on
+ * disk.
+ */
+ loose_dir = get_loose_refs(refs);
if (base && *base) {
- packed_dir = find_containing_dir(packed_dir, base, 0);
loose_dir = find_containing_dir(loose_dir, base, 0);
}
+ if (loose_dir)
+ prime_ref_dir(loose_dir);
+
+ packed_ref_cache = get_packed_ref_cache(refs);
+ acquire_packed_ref_cache(packed_ref_cache);
+ packed_dir = get_packed_ref_dir(packed_ref_cache);
+ if (base && *base) {
+ packed_dir = find_containing_dir(packed_dir, base, 0);
+ }
if (packed_dir && loose_dir) {
sort_ref_dir(packed_dir);
@@ -1544,6 +1698,7 @@ static int do_for_each_entry(struct ref_cache *refs, const char *base,
loose_dir, 0, fn, cb_data);
}
+ release_packed_ref_cache(packed_ref_cache);
return retval;
}
@@ -1998,6 +2153,73 @@ static void write_packed_entry(int fd, char *refname, unsigned char *sha1,
}
}
+/*
+ * An each_ref_entry_fn that writes the entry to a packed-refs file.
+ */
+static int write_packed_entry_fn(struct ref_entry *entry, void *cb_data)
+{
+ int *fd = cb_data;
+ enum peel_status peel_status = peel_entry(entry, 0);
+
+ if (peel_status != PEEL_PEELED && peel_status != PEEL_NON_TAG)
+ error("internal error: %s is not a valid packed reference!",
+ entry->name);
+ write_packed_entry(*fd, entry->name, entry->u.value.sha1,
+ peel_status == PEEL_PEELED ?
+ entry->u.value.peeled : NULL);
+ return 0;
+}
+
+int lock_packed_refs(int flags)
+{
+ struct packed_ref_cache *packed_ref_cache;
+
+ /* Discard the old cache because it might be invalid: */
+ clear_packed_ref_cache(&ref_cache);
+ if (hold_lock_file_for_update(&packlock, git_path("packed-refs"), flags) < 0)
+ return -1;
+ /* Read the current packed-refs while holding the lock: */
+ packed_ref_cache = get_packed_ref_cache(&ref_cache);
+ packed_ref_cache->lock = &packlock;
+ /* Increment the reference count to prevent it from being freed: */
+ acquire_packed_ref_cache(packed_ref_cache);
+ return 0;
+}
+
+int commit_packed_refs(void)
+{
+ struct packed_ref_cache *packed_ref_cache =
+ get_packed_ref_cache(&ref_cache);
+ int error = 0;
+
+ if (!packed_ref_cache->lock)
+ die("internal error: packed-refs not locked");
+ write_or_die(packed_ref_cache->lock->fd,
+ PACKED_REFS_HEADER, strlen(PACKED_REFS_HEADER));
+
+ do_for_each_entry_in_dir(get_packed_ref_dir(packed_ref_cache),
+ 0, write_packed_entry_fn,
+ &packed_ref_cache->lock->fd);
+ if (commit_lock_file(packed_ref_cache->lock))
+ error = -1;
+ packed_ref_cache->lock = NULL;
+ release_packed_ref_cache(packed_ref_cache);
+ return error;
+}
+
+void rollback_packed_refs(void)
+{
+ struct packed_ref_cache *packed_ref_cache =
+ get_packed_ref_cache(&ref_cache);
+
+ if (!packed_ref_cache->lock)
+ die("internal error: packed-refs not locked");
+ rollback_lock_file(packed_ref_cache->lock);
+ packed_ref_cache->lock = NULL;
+ release_packed_ref_cache(packed_ref_cache);
+ clear_packed_ref_cache(&ref_cache);
+}
+
struct ref_to_prune {
struct ref_to_prune *next;
unsigned char sha1[20];
@@ -2006,35 +2228,50 @@ struct ref_to_prune {
struct pack_refs_cb_data {
unsigned int flags;
+ struct ref_dir *packed_refs;
struct ref_to_prune *ref_to_prune;
- int fd;
};
-static int pack_one_ref(struct ref_entry *entry, void *cb_data)
+/*
+ * An each_ref_entry_fn that is run over loose references only. If
+ * the loose reference can be packed, add an entry in the packed ref
+ * cache. If the reference should be pruned, also add it to
+ * ref_to_prune in the pack_refs_cb_data.
+ */
+static int pack_if_possible_fn(struct ref_entry *entry, void *cb_data)
{
struct pack_refs_cb_data *cb = cb_data;
enum peel_status peel_status;
+ struct ref_entry *packed_entry;
int is_tag_ref = !prefixcmp(entry->name, "refs/tags/");
- /* ALWAYS pack refs that were already packed or are tags */
- if (!(cb->flags & PACK_REFS_ALL) && !is_tag_ref &&
- !(entry->flag & REF_ISPACKED))
+ /* ALWAYS pack tags */
+ if (!(cb->flags & PACK_REFS_ALL) && !is_tag_ref)
return 0;
/* Do not pack symbolic or broken refs: */
if ((entry->flag & REF_ISSYMREF) || !ref_resolves_to_object(entry))
return 0;
+ /* Add a packed ref cache entry equivalent to the loose entry. */
peel_status = peel_entry(entry, 1);
if (peel_status != PEEL_PEELED && peel_status != PEEL_NON_TAG)
die("internal error peeling reference %s (%s)",
entry->name, sha1_to_hex(entry->u.value.sha1));
- write_packed_entry(cb->fd, entry->name, entry->u.value.sha1,
- peel_status == PEEL_PEELED ?
- entry->u.value.peeled : NULL);
+ packed_entry = find_ref(cb->packed_refs, entry->name);
+ if (packed_entry) {
+ /* Overwrite existing packed entry with info from loose entry */
+ packed_entry->flag = REF_ISPACKED | REF_KNOWS_PEELED;
+ hashcpy(packed_entry->u.value.sha1, entry->u.value.sha1);
+ } else {
+ packed_entry = create_ref_entry(entry->name, entry->u.value.sha1,
+ REF_ISPACKED | REF_KNOWS_PEELED, 0);
+ add_ref(cb->packed_refs, packed_entry);
+ }
+ hashcpy(packed_entry->u.value.peeled, entry->u.value.peeled);
- /* If the ref was already packed, there is no need to prune it. */
- if ((cb->flags & PACK_REFS_PRUNE) && !(entry->flag & REF_ISPACKED)) {
+ /* Schedule the loose reference for pruning if requested. */
+ if ((cb->flags & PACK_REFS_PRUNE)) {
int namelen = strlen(entry->name) + 1;
struct ref_to_prune *n = xcalloc(1, sizeof(*n) + namelen);
hashcpy(n->sha1, entry->u.value.sha1);
@@ -2096,8 +2333,6 @@ static void prune_refs(struct ref_to_prune *r)
}
}
-static struct lock_file packlock;
-
int pack_refs(unsigned int flags)
{
struct pack_refs_cb_data cbdata;
@@ -2105,26 +2340,38 @@ int pack_refs(unsigned int flags)
memset(&cbdata, 0, sizeof(cbdata));
cbdata.flags = flags;
- cbdata.fd = hold_lock_file_for_update(&packlock, git_path("packed-refs"),
- LOCK_DIE_ON_ERROR);
+ lock_packed_refs(LOCK_DIE_ON_ERROR);
+ cbdata.packed_refs = get_packed_refs(&ref_cache);
- write_or_die(cbdata.fd, PACKED_REFS_HEADER, strlen(PACKED_REFS_HEADER));
+ do_for_each_entry_in_dir(get_loose_refs(&ref_cache), 0,
+ pack_if_possible_fn, &cbdata);
- do_for_each_entry(&ref_cache, "", pack_one_ref, &cbdata);
- if (commit_lock_file(&packlock) < 0)
+ if (commit_packed_refs())
die_errno("unable to overwrite old ref-pack file");
+
prune_refs(cbdata.ref_to_prune);
return 0;
}
-static int repack_ref_fn(struct ref_entry *entry, void *cb_data)
+/*
+ * If entry is no longer needed in packed-refs, add it to the string
+ * list pointed to by cb_data. Reasons for deleting entries:
+ *
+ * - Entry is broken.
+ * - Entry is overridden by a loose ref.
+ * - Entry does not point at a valid object.
+ *
+ * In the first and third cases, also emit an error message because these
+ * are indications of repository corruption.
+ */
+static int curate_packed_ref_fn(struct ref_entry *entry, void *cb_data)
{
- int *fd = cb_data;
- enum peel_status peel_status;
+ struct string_list *refs_to_delete = cb_data;
if (entry->flag & REF_ISBROKEN) {
/* This shouldn't happen to packed refs. */
error("%s is broken!", entry->name);
+ string_list_append(refs_to_delete, entry->name);
return 0;
}
if (!has_sha1_file(entry->u.value.sha1)) {
@@ -2134,7 +2381,7 @@ static int repack_ref_fn(struct ref_entry *entry, void *cb_data)
if (read_ref_full(entry->name, sha1, 0, &flags))
/* We should at least have found the packed ref. */
die("Internal error");
- if ((flags & REF_ISSYMREF) || !(flags & REF_ISPACKED))
+ if ((flags & REF_ISSYMREF) || !(flags & REF_ISPACKED)) {
/*
* This packed reference is overridden by a
* loose reference, so it is OK that its value
@@ -2143,9 +2390,11 @@ static int repack_ref_fn(struct ref_entry *entry, void *cb_data)
* collected. For this purpose we don't even
* care whether the loose reference itself is
* invalid, broken, symbolic, etc. Silently
- * omit the packed reference from the output.
+ * remove the packed reference.
*/
+ string_list_append(refs_to_delete, entry->name);
return 0;
+ }
/*
* There is no overriding loose reference, so the fact
* that this reference doesn't refer to a valid object
@@ -2154,44 +2403,47 @@ static int repack_ref_fn(struct ref_entry *entry, void *cb_data)
* the output.
*/
error("%s does not point to a valid object!", entry->name);
+ string_list_append(refs_to_delete, entry->name);
return 0;
}
- peel_status = peel_entry(entry, 0);
- write_packed_entry(*fd, entry->name, entry->u.value.sha1,
- peel_status == PEEL_PEELED ?
- entry->u.value.peeled : NULL);
-
return 0;
}
static int repack_without_ref(const char *refname)
{
- int fd;
struct ref_dir *packed;
+ struct string_list refs_to_delete = STRING_LIST_INIT_DUP;
+ struct string_list_item *ref_to_delete;
if (!get_packed_ref(refname))
return 0; /* refname does not exist in packed refs */
- fd = hold_lock_file_for_update(&packlock, git_path("packed-refs"), 0);
- if (fd < 0) {
+ if (lock_packed_refs(0)) {
unable_to_lock_error(git_path("packed-refs"), errno);
return error("cannot delete '%s' from packed refs", refname);
}
- clear_packed_ref_cache(&ref_cache);
packed = get_packed_refs(&ref_cache);
- /* Remove refname from the cache. */
+
+ /* Remove refname from the cache: */
if (remove_entry(packed, refname) == -1) {
/*
* The packed entry disappeared while we were
* acquiring the lock.
*/
- rollback_lock_file(&packlock);
+ rollback_packed_refs();
return 0;
}
- write_or_die(fd, PACKED_REFS_HEADER, strlen(PACKED_REFS_HEADER));
- do_for_each_entry_in_dir(packed, 0, repack_ref_fn, &fd);
- return commit_lock_file(&packlock);
+
+ /* Remove any other accumulated cruft: */
+ do_for_each_entry_in_dir(packed, 0, curate_packed_ref_fn, &refs_to_delete);
+ for_each_string_list_item(ref_to_delete, &refs_to_delete) {
+ if (remove_entry(packed, ref_to_delete->string) == -1)
+ die("internal error");
+ }
+
+ /* Write what remains: */
+ return commit_packed_refs();
}
int delete_ref(const char *refname, const unsigned char *sha1, int delopt)
diff --git a/refs.h b/refs.h
index 246bf6096d..9e5db3ae26 100644
--- a/refs.h
+++ b/refs.h
@@ -77,12 +77,34 @@ extern int for_each_rawref(each_ref_fn, void *);
extern void warn_dangling_symref(FILE *fp, const char *msg_fmt, const char *refname);
/*
- * Add a reference to the in-memory packed reference cache. To actually
- * write the reference to the packed-refs file, call pack_refs().
+ * Lock the packed-refs file for writing. Flags is passed to
+ * hold_lock_file_for_update(). Return 0 on success.
+ */
+extern int lock_packed_refs(int flags);
+
+/*
+ * Add a reference to the in-memory packed reference cache. This may
+ * only be called while the packed-refs file is locked (see
+ * lock_packed_refs()). To actually write the packed-refs file, call
+ * commit_packed_refs().
*/
extern void add_packed_ref(const char *refname, const unsigned char *sha1);
/*
+ * Write the current version of the packed refs cache from memory to
+ * disk. The packed-refs file must already be locked for writing (see
+ * lock_packed_refs()). Return zero on success.
+ */
+extern int commit_packed_refs(void);
+
+/*
+ * Rollback the lockfile for the packed-refs file, and discard the
+ * in-memory packed reference cache. (The packed-refs file will be
+ * read anew if it is needed again after this function is called.)
+ */
+extern void rollback_packed_refs(void);
+
+/*
* Flags for controlling behaviour of pack_refs()
* PACK_REFS_PRUNE: Prune loose refs after packing
* PACK_REFS_ALL: Pack _all_ refs, not just tags and already packed refs
diff --git a/revision.c b/revision.c
index f1bb731fd7..2f0142f22e 100644
--- a/revision.c
+++ b/revision.c
@@ -1296,7 +1296,7 @@ void init_revisions(struct rev_info *revs, const char *prefix)
DIFF_OPT_SET(&revs->pruning, QUICK);
revs->pruning.add_remove = file_add_remove;
revs->pruning.change = file_change;
- revs->lifo = 1;
+ revs->sort_order = REV_SORT_IN_GRAPH_ORDER;
revs->dense = 1;
revs->prefix = prefix;
revs->max_age = -1;
@@ -1638,7 +1638,7 @@ static int handle_revision_opt(struct rev_info *revs, int argc, const char **arg
} else if (!strcmp(arg, "--merge")) {
revs->show_merge = 1;
} else if (!strcmp(arg, "--topo-order")) {
- revs->lifo = 1;
+ revs->sort_order = REV_SORT_IN_GRAPH_ORDER;
revs->topo_order = 1;
} else if (!strcmp(arg, "--simplify-merges")) {
revs->simplify_merges = 1;
@@ -1656,7 +1656,10 @@ static int handle_revision_opt(struct rev_info *revs, int argc, const char **arg
revs->prune = 1;
load_ref_decorations(DECORATE_SHORT_REFS);
} else if (!strcmp(arg, "--date-order")) {
- revs->lifo = 0;
+ revs->sort_order = REV_SORT_BY_COMMIT_DATE;
+ revs->topo_order = 1;
+ } else if (!strcmp(arg, "--author-date-order")) {
+ revs->sort_order = REV_SORT_BY_AUTHOR_DATE;
revs->topo_order = 1;
} else if (!prefixcmp(arg, "--early-output")) {
int count = 100;
@@ -2606,7 +2609,7 @@ int prepare_revision_walk(struct rev_info *revs)
if (limit_list(revs) < 0)
return -1;
if (revs->topo_order)
- sort_in_topological_order(&revs->commits, revs->lifo);
+ sort_in_topological_order(&revs->commits, revs->sort_order);
if (revs->line_level_traverse)
line_log_filter(revs);
if (revs->simplify_merges)
@@ -2924,7 +2927,7 @@ static void create_boundary_commit_list(struct rev_info *revs)
* If revs->topo_order is set, sort the boundary commits
* in topological order
*/
- sort_in_topological_order(&revs->commits, revs->lifo);
+ sort_in_topological_order(&revs->commits, revs->sort_order);
}
static struct commit *get_revision_internal(struct rev_info *revs)
diff --git a/revision.h b/revision.h
index eeea6fba3c..92d6614af6 100644
--- a/revision.h
+++ b/revision.h
@@ -4,6 +4,7 @@
#include "parse-options.h"
#include "grep.h"
#include "notes.h"
+#include "commit.h"
#define SEEN (1u<<0)
#define UNINTERESTING (1u<<1)
@@ -62,6 +63,10 @@ struct rev_info {
const char *prefix;
const char *def;
struct pathspec prune_data;
+
+ /* topo-sort */
+ enum rev_sort_order sort_order;
+
unsigned int early_output:1,
ignore_missing:1;
@@ -72,7 +77,6 @@ struct rev_info {
show_all:1,
remove_empty_trees:1,
simplify_history:1,
- lifo:1,
topo_order:1,
simplify_merges:1,
simplify_by_decoration:1,
diff --git a/sequencer.c b/sequencer.c
index ab6f8a722d..425207ad5f 100644
--- a/sequencer.c
+++ b/sequencer.c
@@ -270,15 +270,20 @@ static int error_dirty_index(struct replay_opts *opts)
}
static int fast_forward_to(const unsigned char *to, const unsigned char *from,
- int unborn)
+ int unborn, struct replay_opts *opts)
{
struct ref_lock *ref_lock;
+ struct strbuf sb = STRBUF_INIT;
+ int ret;
read_cache();
if (checkout_fast_forward(from, to, 1))
exit(1); /* the callee should have complained already */
ref_lock = lock_any_ref_for_update("HEAD", unborn ? null_sha1 : from, 0);
- return write_ref_sha1(ref_lock, to, "cherry-pick");
+ strbuf_addf(&sb, "%s: fast-forward", action_name(opts));
+ ret = write_ref_sha1(ref_lock, to, sb.buf);
+ strbuf_release(&sb);
+ return ret;
}
static int do_recursive_merge(struct commit *base, struct commit *next,
@@ -474,7 +479,7 @@ static int do_pick_commit(struct commit *commit, struct replay_opts *opts)
struct commit_message msg = { NULL, NULL, NULL, NULL, NULL };
char *defmsg = NULL;
struct strbuf msgbuf = STRBUF_INIT;
- int res, unborn = 0;
+ int res, unborn = 0, allow;
if (opts->no_commit) {
/*
@@ -523,7 +528,7 @@ static int do_pick_commit(struct commit *commit, struct replay_opts *opts)
if (opts->allow_ff &&
((parent && !hashcmp(parent->object.sha1, head)) ||
(!parent && unborn)))
- return fast_forward_to(commit->object.sha1, head, unborn);
+ return fast_forward_to(commit->object.sha1, head, unborn, opts);
if (parent && parse_commit(parent) < 0)
/* TRANSLATORS: The first %s will be "revert" or
@@ -624,14 +629,18 @@ static int do_pick_commit(struct commit *commit, struct replay_opts *opts)
msg.subject);
print_advice(res == 1, opts);
rerere(opts->allow_rerere_auto);
- } else {
- int allow = allow_empty(opts, commit);
- if (allow < 0)
- return allow;
- if (!opts->no_commit)
- res = run_git_commit(defmsg, opts, allow);
+ goto leave;
+ }
+
+ allow = allow_empty(opts, commit);
+ if (allow < 0) {
+ res = allow;
+ goto leave;
}
+ if (!opts->no_commit)
+ res = run_git_commit(defmsg, opts, allow);
+leave:
free_message(&msg);
free(defmsg);
diff --git a/sha1_file.c b/sha1_file.c
index 16f08d475c..0af19c00f1 100644
--- a/sha1_file.c
+++ b/sha1_file.c
@@ -2145,8 +2145,17 @@ void *unpack_entry(struct packed_git *p, off_t obj_offset,
data = patch_delta(base, base_size,
delta_data, delta_size,
&size);
+
+ /*
+ * We could not apply the delta; warn the user, but keep going.
+ * Our failure will be noticed either in the next iteration of
+ * the loop, or if this is the final delta, in the caller when
+ * we return NULL. Those code paths will take care of making
+ * a more explicit warning and retrying with another copy of
+ * the object.
+ */
if (!data)
- die("failed to apply delta");
+ error("failed to apply delta");
free(delta_data);
}
diff --git a/t/lib-httpd/apache.conf b/t/lib-httpd/apache.conf
index b5bce459b6..dd17e3a09d 100644
--- a/t/lib-httpd/apache.conf
+++ b/t/lib-httpd/apache.conf
@@ -1,5 +1,4 @@
ServerName dummy
-LockFile accept.lock
PidFile httpd.pid
DocumentRoot www
LogFormat "%h %l %u %t \"%r\" %>s %b" common
@@ -24,6 +23,10 @@ ErrorLog error.log
LoadModule version_module modules/mod_version.so
</IfModule>
+<IfVersion < 2.4>
+LockFile accept.lock
+</IfVersion>
+
<IfVersion < 2.1>
<IfModule !mod_auth.c>
LoadModule auth_module modules/mod_auth.so
@@ -45,6 +48,21 @@ ErrorLog error.log
</IfModule>
</IfVersion>
+<IfVersion >= 2.4>
+<IfModule !mod_authn_core.c>
+ LoadModule authn_core_module modules/mod_authn_core.so
+</IfModule>
+<IfModule !mod_authz_core.c>
+ LoadModule authz_core_module modules/mod_authz_core.so
+</IfModule>
+<IfModule !mod_access_compat.c>
+ LoadModule access_compat_module modules/mod_access_compat.so
+</IfModule>
+<IfModule !mod_mpm_prefork.c>
+ LoadModule mpm_prefork_module modules/mod_mpm_prefork.so
+</IfModule>
+</IfVersion>
+
PassEnv GIT_VALGRIND
PassEnv GIT_VALGRIND_OPTIONS
diff --git a/t/lib-rebase.sh b/t/lib-rebase.sh
index 6ccf797091..4b74ae460b 100644
--- a/t/lib-rebase.sh
+++ b/t/lib-rebase.sh
@@ -65,3 +65,36 @@ EOF
test_set_editor "$(pwd)/fake-editor.sh"
chmod a+x fake-editor.sh
}
+
+# checks that the revisions in "$2" represent a linear range with the
+# subjects in "$1"
+test_linear_range () {
+ revlist_merges=$(git rev-list --merges "$2") &&
+ test -z "$revlist_merges" &&
+ expected=$1
+ set -- $(git log --reverse --format=%s "$2")
+ test "$expected" = "$*"
+}
+
+reset_rebase () {
+ test_might_fail git rebase --abort &&
+ git reset --hard &&
+ git clean -f
+}
+
+cherry_pick () {
+ git cherry-pick -n "$2" &&
+ git commit -m "$1" &&
+ git tag "$1"
+}
+
+revert () {
+ git revert -n "$2" &&
+ git commit -m "$1" &&
+ git tag "$1"
+}
+
+make_empty () {
+ git commit --allow-empty -m "$1" &&
+ git tag "$1"
+}
diff --git a/t/lib-t6000.sh b/t/lib-t6000.sh
index ea25dd89e5..4ffd90127e 100644
--- a/t/lib-t6000.sh
+++ b/t/lib-t6000.sh
@@ -1,55 +1,50 @@
: included from 6002 and others
-[ -d .git/refs/tags ] || mkdir -p .git/refs/tags
+mkdir -p .git/refs/tags
-:> sed.script
+>sed.script
-# Answer the sha1 has associated with the tag. The tag must exist in .git or .git/refs/tags
-tag()
-{
+# Answer the sha1 has associated with the tag. The tag must exist in .git/refs/tags
+tag () {
_tag=$1
- [ -f .git/refs/tags/$_tag ] || error "tag: \"$_tag\" does not exist"
- cat .git/refs/tags/$_tag
+ test -f ".git/refs/tags/$_tag" || error "tag: \"$_tag\" does not exist"
+ cat ".git/refs/tags/$_tag"
}
# Generate a commit using the text specified to make it unique and the tree
# named by the tag specified.
-unique_commit()
-{
+unique_commit () {
_text=$1
- _tree=$2
+ _tree=$2
shift 2
- echo $_text | git commit-tree $(tag $_tree) "$@"
+ echo "$_text" | git commit-tree $(tag "$_tree") "$@"
}
# Save the output of a command into the tag specified. Prepend
# a substitution script for the tag onto the front of sed.script
-save_tag()
-{
+save_tag () {
_tag=$1
- [ -n "$_tag" ] || error "usage: save_tag tag commit-args ..."
+ test -n "$_tag" || error "usage: save_tag tag commit-args ..."
shift 1
- "$@" >.git/refs/tags/$_tag
+ "$@" >".git/refs/tags/$_tag"
- echo "s/$(tag $_tag)/$_tag/g" > sed.script.tmp
- cat sed.script >> sed.script.tmp
+ echo "s/$(tag $_tag)/$_tag/g" >sed.script.tmp
+ cat sed.script >>sed.script.tmp
rm sed.script
mv sed.script.tmp sed.script
}
# Replace unhelpful sha1 hashses with their symbolic equivalents
-entag()
-{
+entag () {
sed -f sed.script
}
# Execute a command after first saving, then setting the GIT_AUTHOR_EMAIL
# tag to a specified value. Restore the original value on return.
-as_author()
-{
+as_author () {
_author=$1
shift 1
- _save=$GIT_AUTHOR_EMAIL
+ _save=$GIT_AUTHOR_EMAIL
GIT_AUTHOR_EMAIL="$_author"
export GIT_AUTHOR_EMAIL
@@ -63,45 +58,58 @@ as_author()
fi
}
-commit_date()
-{
- _commit=$1
- git cat-file commit $_commit | sed -n "s/^committer .*> \([0-9]*\) .*/\1/p"
+commit_date () {
+ _commit=$1
+ git cat-file commit $_commit |
+ sed -n "s/^committer .*> \([0-9]*\) .*/\1/p"
}
-on_committer_date()
-{
- _date=$1
- shift 1
- GIT_COMMITTER_DATE="$_date"
- export GIT_COMMITTER_DATE
- "$@"
- unset GIT_COMMITTER_DATE
+# Assign the value of fake date to a variable, but
+# allow fairly common "1971-08-16 00:00" to be omittd
+assign_fake_date () {
+ case "$2" in
+ ??:??:??) eval "$1='1971-08-16 $2'" ;;
+ ??:??) eval "$1='1971-08-16 00:$2'" ;;
+ ??) eval "$1='1971-08-16 00:00:$2'" ;;
+ *) eval "$1='$2'" ;;
+ esac
+}
+
+on_committer_date () {
+ assign_fake_date GIT_COMMITTER_DATE "$1"
+ export GIT_COMMITTER_DATE
+ shift 1
+ "$@"
+}
+
+on_dates () {
+ assign_fake_date GIT_COMMITTER_DATE "$1"
+ assign_fake_date GIT_AUTHOR_DATE "$2"
+ export GIT_COMMITTER_DATE GIT_AUTHOR_DATE
+ shift 2
+ "$@"
}
# Execute a command and suppress any error output.
-hide_error()
-{
+hide_error () {
"$@" 2>/dev/null
}
-check_output()
-{
+check_output () {
_name=$1
shift 1
- if eval "$*" | entag > $_name.actual
+ if eval "$*" | entag >"$_name.actual"
then
- test_cmp $_name.expected $_name.actual
+ test_cmp "$_name.expected" "$_name.actual"
else
- return 1;
+ return 1
fi
}
# Turn a reasonable test description into a reasonable test name.
# All alphanums translated into -'s which are then compressed and stripped
# from front and back.
-name_from_description()
-{
+name_from_description () {
perl -pe '
s/[^A-Za-z0-9.]/-/g;
s/-+/-/g;
@@ -119,9 +127,11 @@ name_from_description()
test_output_expect_success()
{
_description=$1
- _test=$2
- [ $# -eq 2 ] || error "usage: test_output_expect_success description test <<EOF ... EOF"
- _name=$(echo $_description | name_from_description)
- cat > $_name.expected
+ _test=$2
+ test $# -eq 2 ||
+ error "usage: test_output_expect_success description test <<EOF ... EOF"
+
+ _name=$(echo $_description | name_from_description)
+ cat >"$_name.expected"
test_expect_success "$_description" "check_output $_name \"$_test\""
}
diff --git a/t/t0009-prio-queue.sh b/t/t0009-prio-queue.sh
new file mode 100755
index 0000000000..94045c3fad
--- /dev/null
+++ b/t/t0009-prio-queue.sh
@@ -0,0 +1,50 @@
+#!/bin/sh
+
+test_description='basic tests for priority queue implementation'
+. ./test-lib.sh
+
+cat >expect <<'EOF'
+1
+2
+3
+4
+5
+5
+6
+7
+8
+9
+10
+EOF
+test_expect_success 'basic ordering' '
+ test-prio-queue 2 6 3 10 9 5 7 4 5 8 1 dump >actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<'EOF'
+2
+3
+4
+1
+5
+6
+EOF
+test_expect_success 'mixed put and get' '
+ test-prio-queue 6 2 4 get 5 3 get get 1 dump >actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<'EOF'
+1
+2
+NULL
+1
+2
+NULL
+EOF
+test_expect_success 'notice empty queue' '
+ test-prio-queue 1 2 get get get 1 2 get get get >actual &&
+ test_cmp expect actual
+'
+
+test_done
diff --git a/t/t1513-rev-parse-prefix.sh b/t/t1513-rev-parse-prefix.sh
new file mode 100755
index 0000000000..87ec3ae714
--- /dev/null
+++ b/t/t1513-rev-parse-prefix.sh
@@ -0,0 +1,96 @@
+#!/bin/sh
+
+test_description='Tests for rev-parse --prefix'
+
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ mkdir -p sub1/sub2 &&
+ echo top >top &&
+ echo file1 >sub1/file1 &&
+ echo file2 >sub1/sub2/file2 &&
+ git add top sub1/file1 sub1/sub2/file2 &&
+ git commit -m commit
+'
+
+test_expect_success 'empty prefix -- file' '
+ git rev-parse --prefix "" -- top sub1/file1 >actual &&
+ cat <<-\EOF >expected &&
+ --
+ top
+ sub1/file1
+ EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'valid prefix -- file' '
+ git rev-parse --prefix sub1/ -- file1 sub2/file2 >actual &&
+ cat <<-\EOF >expected &&
+ --
+ sub1/file1
+ sub1/sub2/file2
+ EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'valid prefix -- ../file' '
+ git rev-parse --prefix sub1/ -- ../top sub2/file2 >actual &&
+ cat <<-\EOF >expected &&
+ --
+ sub1/../top
+ sub1/sub2/file2
+ EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'empty prefix HEAD:./path' '
+ git rev-parse --prefix "" HEAD:./top >actual &&
+ git rev-parse HEAD:top >expected &&
+ test_cmp expected actual
+'
+
+test_expect_success 'valid prefix HEAD:./path' '
+ git rev-parse --prefix sub1/ HEAD:./file1 >actual &&
+ git rev-parse HEAD:sub1/file1 >expected &&
+ test_cmp expected actual
+'
+
+test_expect_success 'valid prefix HEAD:../path' '
+ git rev-parse --prefix sub1/ HEAD:../top >actual &&
+ git rev-parse HEAD:top >expected &&
+ test_cmp expected actual
+'
+
+test_expect_success 'prefix ignored with HEAD:top' '
+ git rev-parse --prefix sub1/ HEAD:top >actual &&
+ git rev-parse HEAD:top >expected &&
+ test_cmp expected actual
+'
+
+test_expect_success 'disambiguate path with valid prefix' '
+ git rev-parse --prefix sub1/ file1 >actual &&
+ cat <<-\EOF >expected &&
+ sub1/file1
+ EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'file and refs with prefix' '
+ git rev-parse --prefix sub1/ master file1 >actual &&
+ cat <<-EOF >expected &&
+ $(git rev-parse master)
+ sub1/file1
+ EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'two-levels deep' '
+ git rev-parse --prefix sub1/sub2/ -- file2 >actual &&
+ cat <<-\EOF >expected &&
+ --
+ sub1/sub2/file2
+ EOF
+ test_cmp expected actual
+'
+
+test_done
diff --git a/t/t3400-rebase.sh b/t/t3400-rebase.sh
index 6a5ec32a26..ebf93b0695 100755
--- a/t/t3400-rebase.sh
+++ b/t/t3400-rebase.sh
@@ -40,13 +40,6 @@ test_expect_success 'prepare repository with topic branches' '
echo Side >>C &&
git add C &&
git commit -m "Add C" &&
- git checkout -b nonlinear my-topic-branch &&
- echo Edit >>B &&
- git add B &&
- git commit -m "Modify B" &&
- git merge side &&
- git checkout -b upstream-merged-nonlinear &&
- git merge master &&
git checkout -f my-topic-branch &&
git tag topic
'
@@ -66,26 +59,15 @@ test_expect_success 'rebase against master' '
git rebase master
'
-test_expect_success 'rebase against master twice' '
- git rebase master >out &&
- test_i18ngrep "Current branch my-topic-branch is up to date" out
-'
-
-test_expect_success 'rebase against master twice with --force' '
- git rebase --force-rebase master >out &&
- test_i18ngrep "Current branch my-topic-branch is up to date, rebase forced" out
-'
-
-test_expect_success 'rebase against master twice from another branch' '
- git checkout my-topic-branch^ &&
- git rebase master my-topic-branch >out &&
- test_i18ngrep "Current branch my-topic-branch is up to date" out
-'
-
-test_expect_success 'rebase fast-forward to master' '
- git checkout my-topic-branch^ &&
- git rebase my-topic-branch >out &&
- test_i18ngrep "Fast-forwarded HEAD to my-topic-branch" out
+test_expect_success 'rebase, with <onto> and <upstream> specified as :/quuxery' '
+ test_when_finished "git branch -D torebase" &&
+ git checkout -b torebase my-topic-branch^ &&
+ upstream=$(git rev-parse ":/Add B") &&
+ onto=$(git rev-parse ":/Add A") &&
+ git rebase --onto $onto $upstream &&
+ git reset --hard my-topic-branch^ &&
+ git rebase --onto ":/Add A" ":/Add B" &&
+ git checkout my-topic-branch
'
test_expect_success 'the rebase operation should not have destroyed author information' '
@@ -106,31 +88,9 @@ test_expect_success 'rebase from ambiguous branch name' '
git rebase master
'
-test_expect_success 'rebase after merge master' '
- git checkout --detach refs/tags/topic &&
- git branch -D topic &&
- git reset --hard topic &&
- git merge master &&
- git rebase master &&
- ! (git show | grep "^Merge:")
-'
-
-test_expect_success 'rebase of history with merges is linearized' '
- git checkout nonlinear &&
- test 4 = $(git rev-list master.. | wc -l) &&
- git rebase master &&
- test 3 = $(git rev-list master.. | wc -l)
-'
-
-test_expect_success 'rebase of history with merges after upstream merge is linearized' '
- git checkout upstream-merged-nonlinear &&
- test 5 = $(git rev-list master.. | wc -l) &&
- git rebase master &&
- test 3 = $(git rev-list master.. | wc -l)
-'
-
test_expect_success 'rebase a single mode change' '
git checkout master &&
+ git branch -D topic &&
echo 1 >X &&
git add X &&
test_tick &&
diff --git a/t/t3401-rebase-partial.sh b/t/t3401-rebase-partial.sh
deleted file mode 100755
index 58f4823783..0000000000
--- a/t/t3401-rebase-partial.sh
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/bin/sh
-#
-# Copyright (c) 2006 Yann Dirson, based on t3400 by Amos Waterland
-#
-
-test_description='git rebase should detect patches integrated upstream
-
-This test cherry-picks one local change of two into master branch, and
-checks that git rebase succeeds with only the second patch in the
-local branch.
-'
-. ./test-lib.sh
-
-test_expect_success 'prepare repository with topic branch' '
- test_commit A &&
- git checkout -b my-topic-branch &&
- test_commit B &&
- test_commit C &&
- git checkout -f master &&
- test_commit A2 A.t
-'
-
-test_expect_success 'pick top patch from topic branch into master' '
- git cherry-pick C &&
- git checkout -f my-topic-branch
-'
-
-test_debug '
- git cherry master &&
- git format-patch -k --stdout --full-index master >/dev/null &&
- gitk --all & sleep 1
-'
-
-test_expect_success 'rebase topic branch against new master and check git am did not get halted' '
- git rebase master &&
- test_path_is_missing .git/rebase-apply
-'
-
-test_expect_success 'rebase --merge topic branch that was partially merged upstream' '
- git reset --hard C &&
- git rebase --merge master &&
- test_path_is_missing .git/rebase-merge
-'
-
-test_expect_success 'rebase ignores empty commit' '
- git reset --hard A &&
- git commit --allow-empty -m empty &&
- test_commit D &&
- git rebase C &&
- test "$(git log --format=%s C..)" = "D"
-'
-
-test_expect_success 'rebase --keep-empty' '
- git reset --hard D &&
- git rebase --keep-empty C &&
- test "$(git log --format=%s C..)" = "D
-empty"
-'
-
-test_expect_success 'rebase --keep-empty keeps empty even if already in upstream' '
- git reset --hard A &&
- git commit --allow-empty -m also-empty &&
- git rebase --keep-empty D &&
- test "$(git log --format=%s A..)" = "also-empty
-D
-empty"
-'
-
-test_done
diff --git a/t/t3404-rebase-interactive.sh b/t/t3404-rebase-interactive.sh
index 79e8d3c596..d6b4143773 100755
--- a/t/t3404-rebase-interactive.sh
+++ b/t/t3404-rebase-interactive.sh
@@ -477,19 +477,11 @@ test_expect_success 'interrupted squash works as expected (case 2)' '
test $one = $(git rev-parse HEAD~2)
'
-test_expect_success 'ignore patch if in upstream' '
- HEAD=$(git rev-parse HEAD) &&
- git checkout -b has-cherry-picked HEAD^ &&
+test_expect_success '--continue tries to commit, even for "edit"' '
echo unrelated > file7 &&
git add file7 &&
test_tick &&
git commit -m "unrelated change" &&
- git cherry-pick $HEAD &&
- EXPECT_COUNT=1 git rebase -i $HEAD &&
- test $HEAD = $(git rev-parse HEAD^)
-'
-
-test_expect_success '--continue tries to commit, even for "edit"' '
parent=$(git rev-parse HEAD^) &&
test_tick &&
FAKE_LINES="edit 1" git rebase -i HEAD^ &&
@@ -947,4 +939,15 @@ test_expect_success 'rebase -i respects core.commentchar' '
test B = $(git cat-file commit HEAD^ | sed -ne \$p)
'
+test_expect_success 'rebase -i, with <onto> and <upstream> specified as :/quuxery' '
+ test_when_finished "git branch -D torebase" &&
+ git checkout -b torebase branch1 &&
+ upstream=$(git rev-parse ":/J") &&
+ onto=$(git rev-parse ":/A") &&
+ git rebase --onto $onto $upstream &&
+ git reset --hard branch1 &&
+ git rebase --onto ":/A" ":/J" &&
+ git checkout branch1
+'
+
test_done
diff --git a/t/t3406-rebase-message.sh b/t/t3406-rebase-message.sh
index e6a9a0d436..0392e36d23 100755
--- a/t/t3406-rebase-message.sh
+++ b/t/t3406-rebase-message.sh
@@ -4,27 +4,17 @@ test_description='messages from rebase operation'
. ./test-lib.sh
-quick_one () {
- echo "$1" >"file$1" &&
- git add "file$1" &&
- test_tick &&
- git commit -m "$1"
-}
+test_expect_success 'setup' '
+ test_commit O fileO &&
+ test_commit X fileX &&
+ test_commit A fileA &&
+ test_commit B fileB &&
+ test_commit Y fileY &&
-test_expect_success setup '
- quick_one O &&
- git branch topic &&
- quick_one X &&
- quick_one A &&
- quick_one B &&
- quick_one Y &&
-
- git checkout topic &&
- quick_one A &&
- quick_one B &&
- quick_one Z &&
+ git checkout -b topic O &&
+ git cherry-pick A B &&
+ test_commit Z fileZ &&
git tag start
-
'
cat >expect <<\EOF
@@ -34,12 +24,32 @@ Committed: 0003 Z
EOF
test_expect_success 'rebase -m' '
-
git rebase -m master >report &&
sed -n -e "/^Already applied: /p" \
-e "/^Committed: /p" report >actual &&
test_cmp expect actual
+'
+
+test_expect_success 'rebase against master twice' '
+ git rebase master >out &&
+ test_i18ngrep "Current branch topic is up to date" out
+'
+
+test_expect_success 'rebase against master twice with --force' '
+ git rebase --force-rebase master >out &&
+ test_i18ngrep "Current branch topic is up to date, rebase forced" out
+'
+
+test_expect_success 'rebase against master twice from another branch' '
+ git checkout topic^ &&
+ git rebase master topic >out &&
+ test_i18ngrep "Current branch topic is up to date" out
+'
+test_expect_success 'rebase fast-forward to master' '
+ git checkout topic^ &&
+ git rebase topic >out &&
+ test_i18ngrep "Fast-forwarded HEAD to topic" out
'
test_expect_success 'rebase --stat' '
diff --git a/t/t3409-rebase-preserve-merges.sh b/t/t3409-rebase-preserve-merges.sh
index 6de4e2263f..2e0c36415f 100755
--- a/t/t3409-rebase-preserve-merges.sh
+++ b/t/t3409-rebase-preserve-merges.sh
@@ -11,14 +11,6 @@ Run "git rebase -p" and check that merges are properly carried along
GIT_AUTHOR_EMAIL=bogus_email_address
export GIT_AUTHOR_EMAIL
-# Clone 1 (trivial merge):
-#
-# A1--A2 <-- origin/master
-# \ \
-# B1--M <-- topic
-# \
-# B2 <-- origin/topic
-#
# Clone 2 (conflicting merge):
#
# A1--A2--B3 <-- origin/master
@@ -36,16 +28,6 @@ export GIT_AUTHOR_EMAIL
# \--A3 <-- topic2
# \
# B2 <-- origin/topic
-#
-# Clone 4 (merge using second parent as base):
-#
-# A1--A2--B3 <-- origin/master
-# \
-# B1--A3--M <-- topic
-# \ /
-# \--A4 <-- topic2
-# \
-# B2 <-- origin/topic
test_expect_success 'setup for merge-preserving rebase' \
'echo First > A &&
@@ -58,20 +40,6 @@ test_expect_success 'setup for merge-preserving rebase' \
git checkout -f master &&
echo Third >> A &&
git commit -a -m "Modify A2" &&
-
- git clone ./. clone1 &&
- (cd clone1 &&
- git checkout -b topic origin/topic &&
- git merge origin/master
- ) &&
-
- git clone ./. clone4 &&
- (
- cd clone4 &&
- git checkout -b topic origin/topic &&
- git merge origin/master
- ) &&
-
echo Fifth > B &&
git add B &&
git commit -m "Add different B" &&
@@ -101,16 +69,6 @@ test_expect_success 'setup for merge-preserving rebase' \
git commit -a -m "Modify B2"
'
-test_expect_success 'rebase -p fakes interactive rebase' '
- (
- cd clone1 &&
- git fetch &&
- git rebase -p origin/topic &&
- test 1 = $(git rev-list --all --pretty=oneline | grep "Modify A" | wc -l) &&
- test 1 = $(git rev-list --all --pretty=oneline | grep "Merge remote-tracking branch " | wc -l)
- )
-'
-
test_expect_success '--continue works after a conflict' '
(
cd clone2 &&
@@ -138,15 +96,4 @@ test_expect_success 'rebase -p preserves no-ff merges' '
)
'
-test_expect_success 'rebase -p works when base inside second parent' '
- (
- cd clone4 &&
- git fetch &&
- git rebase -p HEAD^2 &&
- test 1 = $(git rev-list --all --pretty=oneline | grep "Modify A" | wc -l) &&
- test 1 = $(git rev-list --all --pretty=oneline | grep "Modify B" | wc -l) &&
- test 1 = $(git rev-list --all --pretty=oneline | grep "Merge remote-tracking branch " | wc -l)
- )
-'
-
test_done
diff --git a/t/t3420-rebase-autostash.sh b/t/t3420-rebase-autostash.sh
index 479cbb215f..90eb26493c 100755
--- a/t/t3420-rebase-autostash.sh
+++ b/t/t3420-rebase-autostash.sh
@@ -141,6 +141,28 @@ testrebase() {
'
}
+test_expect_success "rebase: fast-forward rebase" '
+ test_config rebase.autostash true &&
+ git reset --hard &&
+ git checkout -b behind-feature-branch feature-branch~1 &&
+ test_when_finished git branch -D behind-feature-branch &&
+ echo dirty >>file1 &&
+ git rebase feature-branch &&
+ grep dirty file1 &&
+ git checkout feature-branch
+'
+
+test_expect_success "rebase: noop rebase" '
+ test_config rebase.autostash true &&
+ git reset --hard &&
+ git checkout -b same-feature-branch feature-branch &&
+ test_when_finished git branch -D same-feature-branch &&
+ echo dirty >>file1 &&
+ git rebase feature-branch &&
+ grep dirty file1 &&
+ git checkout feature-branch
+'
+
testrebase "" .git/rebase-apply
testrebase " --merge" .git/rebase-merge
testrebase " --interactive" .git/rebase-merge
diff --git a/t/t3421-rebase-topology-linear.sh b/t/t3421-rebase-topology-linear.sh
new file mode 100755
index 0000000000..9c55cba198
--- /dev/null
+++ b/t/t3421-rebase-topology-linear.sh
@@ -0,0 +1,350 @@
+#!/bin/sh
+
+test_description='basic rebase topology tests'
+. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-rebase.sh
+
+# a---b---c
+# \
+# d---e
+test_expect_success 'setup' '
+ test_commit a &&
+ test_commit b &&
+ test_commit c &&
+ git checkout b &&
+ test_commit d &&
+ test_commit e
+'
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "simple rebase $*" "
+ reset_rebase &&
+ git rebase $* c e &&
+ test_cmp_rev c HEAD~2 &&
+ test_linear_range 'd e' c..
+ "
+}
+test_run_rebase success ''
+test_run_rebase success -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* is no-op if upstream is an ancestor" "
+ reset_rebase &&
+ git rebase $* b e &&
+ test_cmp_rev e HEAD
+ "
+}
+test_run_rebase success ''
+test_run_rebase success -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* -f rewrites even if upstream is an ancestor" "
+ reset_rebase &&
+ git rebase $* -f b e &&
+ ! test_cmp_rev e HEAD &&
+ test_cmp_rev b HEAD~2 &&
+ test_linear_range 'd e' b..
+ "
+}
+test_run_rebase success ''
+test_run_rebase success -m
+test_run_rebase success -i
+test_run_rebase failure -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* fast-forwards from ancestor of upstream" "
+ reset_rebase &&
+ git rebase $* e b &&
+ test_cmp_rev e HEAD
+ "
+}
+test_run_rebase success ''
+test_run_rebase success -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+# f
+# /
+# a---b---c---g---h
+# \
+# d---gp--i
+#
+# gp = cherry-picked g
+# h = reverted g
+#
+# Reverted patches are there for tests to be able to check if a commit
+# that introduced the same change as another commit is
+# dropped. Without reverted commits, we could get false positives
+# because applying the patch succeeds, but simply results in no
+# changes.
+test_expect_success 'setup of linear history for range selection tests' '
+ git checkout c &&
+ test_commit g &&
+ revert h g &&
+ git checkout d &&
+ cherry_pick gp g &&
+ test_commit i &&
+ git checkout b &&
+ test_commit f
+'
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* drops patches in upstream" "
+ reset_rebase &&
+ git rebase $* h i &&
+ test_cmp_rev h HEAD~2 &&
+ test_linear_range 'd i' h..
+ "
+}
+test_run_rebase success ''
+test_run_rebase failure -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* can drop last patch if in upstream" "
+ reset_rebase &&
+ git rebase $* h gp &&
+ test_cmp_rev h HEAD^ &&
+ test_linear_range 'd' h..
+ "
+}
+test_run_rebase success ''
+test_run_rebase failure -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* --onto drops patches in upstream" "
+ reset_rebase &&
+ git rebase $* --onto f h i &&
+ test_cmp_rev f HEAD~2 &&
+ test_linear_range 'd i' f..
+ "
+}
+test_run_rebase success ''
+test_run_rebase failure -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* --onto does not drop patches in onto" "
+ reset_rebase &&
+ git rebase $* --onto h f i &&
+ test_cmp_rev h HEAD~3 &&
+ test_linear_range 'd gp i' h..
+ "
+}
+test_run_rebase success ''
+test_run_rebase success -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+# a---b---c---j!
+# \
+# d---k!--l
+#
+# ! = empty
+test_expect_success 'setup of linear history for empty commit tests' '
+ git checkout c &&
+ make_empty j &&
+ git checkout d &&
+ make_empty k &&
+ test_commit l
+'
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* drops empty commit" "
+ reset_rebase &&
+ git rebase $* c l &&
+ test_cmp_rev c HEAD~2 &&
+ test_linear_range 'd l' c..
+ "
+}
+test_run_rebase success ''
+test_run_rebase success -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* --keep-empty" "
+ reset_rebase &&
+ git rebase $* --keep-empty c l &&
+ test_cmp_rev c HEAD~3 &&
+ test_linear_range 'd k l' c..
+ "
+}
+test_run_rebase success ''
+test_run_rebase failure -m
+test_run_rebase success -i
+test_run_rebase failure -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* --keep-empty keeps empty even if already in upstream" "
+ reset_rebase &&
+ git rebase $* --keep-empty j l &&
+ test_cmp_rev j HEAD~3 &&
+ test_linear_range 'd k l' j..
+ "
+}
+test_run_rebase success ''
+test_run_rebase failure -m
+test_run_rebase failure -i
+test_run_rebase failure -p
+
+# m
+# /
+# a---b---c---g
+#
+# x---y---bp
+#
+# bp = cherry-picked b
+# m = reverted b
+#
+# Reverted patches are there for tests to be able to check if a commit
+# that introduced the same change as another commit is
+# dropped. Without reverted commits, we could get false positives
+# because applying the patch succeeds, but simply results in no
+# changes.
+test_expect_success 'setup of linear history for test involving root' '
+ git checkout b &&
+ revert m b &&
+ git checkout --orphan disjoint &&
+ git rm -rf . &&
+ test_commit x &&
+ test_commit y &&
+ cherry_pick bp b
+'
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* --onto --root" "
+ reset_rebase &&
+ git rebase $* --onto c --root y &&
+ test_cmp_rev c HEAD~2 &&
+ test_linear_range 'x y' c..
+ "
+}
+test_run_rebase success ''
+test_run_rebase failure -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* without --onto --root with disjoint history" "
+ reset_rebase &&
+ git rebase $* c y &&
+ test_cmp_rev c HEAD~2 &&
+ test_linear_range 'x y' c..
+ "
+}
+test_run_rebase success ''
+test_run_rebase failure -m
+test_run_rebase success -i
+test_run_rebase failure -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* --onto --root drops patch in onto" "
+ reset_rebase &&
+ git rebase $* --onto m --root bp &&
+ test_cmp_rev m HEAD~2 &&
+ test_linear_range 'x y' m..
+ "
+}
+test_run_rebase success ''
+test_run_rebase failure -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* --onto --root with merge-base does not go to root" "
+ reset_rebase &&
+ git rebase $* --onto m --root g &&
+ test_cmp_rev m HEAD~2 &&
+ test_linear_range 'c g' m..
+ "
+}
+
+test_run_rebase success ''
+test_run_rebase success -m
+test_run_rebase success -i
+test_run_rebase failure -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* without --onto --root with disjoint history drops patch in onto" "
+ reset_rebase &&
+ git rebase $* m bp &&
+ test_cmp_rev m HEAD~2 &&
+ test_linear_range 'x y' m..
+ "
+}
+test_run_rebase success ''
+test_run_rebase failure -m
+test_run_rebase success -i
+test_run_rebase failure -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* --root on linear history is a no-op" "
+ reset_rebase &&
+ git rebase $* --root c &&
+ test_cmp_rev c HEAD
+ "
+}
+test_run_rebase failure ''
+test_run_rebase failure -m
+test_run_rebase failure -i
+test_run_rebase failure -p
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* -f --root on linear history causes re-write" "
+ reset_rebase &&
+ git rebase $* -f --root c &&
+ ! test_cmp_rev a HEAD~2 &&
+ test_linear_range 'a b c' HEAD
+ "
+}
+test_run_rebase success ''
+test_run_rebase success -m
+test_run_rebase success -i
+test_run_rebase success -p
+
+test_done
diff --git a/t/t3425-rebase-topology-merges.sh b/t/t3425-rebase-topology-merges.sh
new file mode 100755
index 0000000000..1d195fbd64
--- /dev/null
+++ b/t/t3425-rebase-topology-merges.sh
@@ -0,0 +1,258 @@
+#!/bin/sh
+
+test_description='rebase topology tests with merges'
+. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-rebase.sh
+
+test_revision_subjects () {
+ expected="$1"
+ shift
+ set -- $(git log --format=%s --no-walk=unsorted "$@")
+ test "$expected" = "$*"
+}
+
+# a---b-----------c
+# \ \
+# d-------e \
+# \ \ \
+# n---o---w---v
+# \
+# z
+test_expect_success 'setup of non-linear-history' '
+ test_commit a &&
+ test_commit b &&
+ test_commit c &&
+ git checkout b &&
+ test_commit d &&
+ test_commit e
+
+ git checkout c &&
+ test_commit g &&
+ revert h g &&
+ git checkout d &&
+ cherry_pick gp g &&
+ test_commit i &&
+ git checkout b &&
+ test_commit f
+
+ git checkout d &&
+ test_commit n &&
+ test_commit o &&
+ test_merge w e &&
+ test_merge v c &&
+ git checkout o &&
+ test_commit z
+'
+
+test_run_rebase () {
+ result=$1
+ shift
+ test_expect_$result "rebase $* after merge from upstream" "
+ reset_rebase &&
+ git rebase $* e w &&
+ test_cmp_rev e HEAD~2 &&
+ test_linear_range 'n o' e..
+ "
+}
+test_run_rebase success ''
+test_run_rebase success -m
+test_run_rebase success -i
+
+test_run_rebase () {
+ result=$1
+ shift
+ expected=$1
+ shift
+ test_expect_$result "rebase $* of non-linear history is linearized in place" "
+ reset_rebase &&
+ git rebase $* d w &&
+ test_cmp_rev d HEAD~3 &&
+ test_linear_range "\'"$expected"\'" d..
+ "
+}
+#TODO: make order consistent across all flavors of rebase
+test_run_rebase success 'e n o' ''
+test_run_rebase success 'e n o' -m
+test_run_rebase success 'n o e' -i
+
+test_run_rebase () {
+ result=$1
+ shift
+ expected=$1
+ shift
+ test_expect_$result "rebase $* of non-linear history is linearized upstream" "
+ reset_rebase &&
+ git rebase $* c w &&
+ test_cmp_rev c HEAD~4 &&
+ test_linear_range "\'"$expected"\'" c..
+ "
+}
+#TODO: make order consistent across all flavors of rebase
+test_run_rebase success 'd e n o' ''
+test_run_rebase success 'd e n o' -m
+test_run_rebase success 'd n o e' -i
+
+test_run_rebase () {
+ result=$1
+ shift
+ expected=$1
+ shift
+ test_expect_$result "rebase $* of non-linear history with merges after upstream merge is linearized" "
+ reset_rebase &&
+ git rebase $* c v &&
+ test_cmp_rev c HEAD~4 &&
+ test_linear_range "\'"$expected"\'" c..
+ "
+}
+#TODO: make order consistent across all flavors of rebase
+test_run_rebase success 'd e n o' ''
+test_run_rebase success 'd e n o' -m
+test_run_rebase success 'd n o e' -i
+
+test_expect_success "rebase -p is no-op in non-linear history" "
+ reset_rebase &&
+ git rebase -p d w &&
+ test_cmp_rev w HEAD
+"
+
+test_expect_success "rebase -p is no-op when base inside second parent" "
+ reset_rebase &&
+ git rebase -p e w &&
+ test_cmp_rev w HEAD
+"
+
+test_expect_failure "rebase -p --root on non-linear history is a no-op" "
+ reset_rebase &&
+ git rebase -p --root w &&
+ test_cmp_rev w HEAD
+"
+
+test_expect_success "rebase -p re-creates merge from side branch" "
+ reset_rebase &&
+ git rebase -p z w &&
+ test_cmp_rev z HEAD^ &&
+ test_cmp_rev w^2 HEAD^2
+"
+
+test_expect_success "rebase -p re-creates internal merge" "
+ reset_rebase &&
+ git rebase -p c w &&
+ test_cmp_rev c HEAD~4 &&
+ test_cmp_rev HEAD^2^ HEAD~3 &&
+ test_revision_subjects 'd n e o w' HEAD~3 HEAD~2 HEAD^2 HEAD^ HEAD
+"
+
+test_expect_success "rebase -p can re-create two branches on onto" "
+ reset_rebase &&
+ git rebase -p --onto c d w &&
+ test_cmp_rev c HEAD~3 &&
+ test_cmp_rev c HEAD^2^ &&
+ test_revision_subjects 'n e o w' HEAD~2 HEAD^2 HEAD^ HEAD
+"
+
+# f
+# /
+# a---b---c---g---h
+# \
+# d---gp--i
+# \ \
+# e-------u
+#
+# gp = cherry-picked g
+# h = reverted g
+test_expect_success 'setup of non-linear-history for patch-equivalence tests' '
+ git checkout e &&
+ test_merge u i
+'
+
+test_expect_success "rebase -p re-creates history around dropped commit matching upstream" "
+ reset_rebase &&
+ git rebase -p h u &&
+ test_cmp_rev h HEAD~3 &&
+ test_cmp_rev HEAD^2^ HEAD~2 &&
+ test_revision_subjects 'd i e u' HEAD~2 HEAD^2 HEAD^ HEAD
+"
+
+test_expect_success "rebase -p --onto in merged history drops patches in upstream" "
+ reset_rebase &&
+ git rebase -p --onto f h u &&
+ test_cmp_rev f HEAD~3 &&
+ test_cmp_rev HEAD^2^ HEAD~2 &&
+ test_revision_subjects 'd i e u' HEAD~2 HEAD^2 HEAD^ HEAD
+"
+
+test_expect_success "rebase -p --onto in merged history does not drop patches in onto" "
+ reset_rebase &&
+ git rebase -p --onto h f u &&
+ test_cmp_rev h HEAD~3 &&
+ test_cmp_rev HEAD^2~2 HEAD~2 &&
+ test_revision_subjects 'd gp i e u' HEAD~2 HEAD^2^ HEAD^2 HEAD^ HEAD
+"
+
+# a---b---c---g---h
+# \
+# d---gp--s
+# \ \ /
+# \ X
+# \ / \
+# e---t
+#
+# gp = cherry-picked g
+# h = reverted g
+test_expect_success 'setup of non-linear-history for dropping whole side' '
+ git checkout gp &&
+ test_merge s e &&
+ git checkout e &&
+ test_merge t gp
+'
+
+test_expect_failure "rebase -p drops merge commit when entire first-parent side is dropped" "
+ reset_rebase &&
+ git rebase -p h s &&
+ test_cmp_rev h HEAD~2 &&
+ test_linear_range 'd e' h..
+"
+
+test_expect_success "rebase -p drops merge commit when entire second-parent side is dropped" "
+ reset_rebase &&
+ git rebase -p h t &&
+ test_cmp_rev h HEAD~2 &&
+ test_linear_range 'd e' h..
+"
+
+# a---b---c
+# \
+# d---e
+# \ \
+# n---r
+# \
+# o
+#
+# r = tree-same with n
+test_expect_success 'setup of non-linear-history for empty commits' '
+ git checkout n &&
+ git merge --no-commit e &&
+ git reset n . &&
+ git commit -m r &&
+ git reset --hard &&
+ git clean -f &&
+ git tag r
+'
+
+test_expect_success "rebase -p re-creates empty internal merge commit" "
+ reset_rebase &&
+ git rebase -p c r &&
+ test_cmp_rev c HEAD~3 &&
+ test_cmp_rev HEAD^2^ HEAD~2 &&
+ test_revision_subjects 'd e n r' HEAD~2 HEAD^2 HEAD^ HEAD
+"
+
+test_expect_success "rebase -p re-creates empty merge commit" "
+ reset_rebase &&
+ git rebase -p o r &&
+ test_cmp_rev e HEAD^2 &&
+ test_cmp_rev o HEAD^ &&
+ test_revision_subjects 'r' HEAD
+"
+
+test_done
diff --git a/t/t3600-rm.sh b/t/t3600-rm.sh
index 0c44e9f5d0..5c87b55645 100755
--- a/t/t3600-rm.sh
+++ b/t/t3600-rm.sh
@@ -687,4 +687,100 @@ test_expect_failure SYMLINKS 'rm across a symlinked leading path (w/ index)' '
test_path_is_file e/f
'
+test_expect_success 'setup for testing rm messages' '
+ >bar.txt &&
+ >foo.txt &&
+ git add bar.txt foo.txt
+'
+
+test_expect_success 'rm files with different staged content' '
+ cat >expect <<-\EOF &&
+ error: the following files have staged content different from both the
+ file and the HEAD:
+ bar.txt
+ foo.txt
+ (use -f to force removal)
+ EOF
+ echo content1 >foo.txt &&
+ echo content1 >bar.txt &&
+ test_must_fail git rm foo.txt bar.txt 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'rm files with different staged content without hints' '
+ cat >expect <<-\EOF &&
+ error: the following files have staged content different from both the
+ file and the HEAD:
+ bar.txt
+ foo.txt
+ EOF
+ echo content2 >foo.txt &&
+ echo content2 >bar.txt &&
+ test_must_fail git -c advice.rmhints=false rm foo.txt bar.txt 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'rm file with local modification' '
+ cat >expect <<-\EOF &&
+ error: the following file has local modifications:
+ foo.txt
+ (use --cached to keep the file, or -f to force removal)
+ EOF
+ git commit -m "testing rm 3" &&
+ echo content3 >foo.txt &&
+ test_must_fail git rm foo.txt 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'rm file with local modification without hints' '
+ cat >expect <<-\EOF &&
+ error: the following file has local modifications:
+ bar.txt
+ EOF
+ echo content4 >bar.txt &&
+ test_must_fail git -c advice.rmhints=false rm bar.txt 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'rm file with changes in the index' '
+ cat >expect <<-\EOF &&
+ error: the following file has changes staged in the index:
+ foo.txt
+ (use --cached to keep the file, or -f to force removal)
+ EOF
+ git reset --hard &&
+ echo content5 >foo.txt &&
+ git add foo.txt &&
+ test_must_fail git rm foo.txt 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'rm file with changes in the index without hints' '
+ cat >expect <<-\EOF &&
+ error: the following file has changes staged in the index:
+ foo.txt
+ EOF
+ test_must_fail git -c advice.rmhints=false rm foo.txt 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'rm files with two different errors' '
+ cat >expect <<-\EOF &&
+ error: the following file has staged content different from both the
+ file and the HEAD:
+ foo1.txt
+ (use -f to force removal)
+ error: the following file has changes staged in the index:
+ bar1.txt
+ (use --cached to keep the file, or -f to force removal)
+ EOF
+ echo content >foo1.txt &&
+ git add foo1.txt &&
+ echo content6 >foo1.txt &&
+ echo content6 >bar1.txt &&
+ git add bar1.txt &&
+ test_must_fail git rm bar1.txt foo1.txt 2>actual &&
+ test_i18ncmp expect actual
+'
+
test_done
diff --git a/t/t3903-stash.sh b/t/t3903-stash.sh
index 634b2b74f4..debda7a678 100755
--- a/t/t3903-stash.sh
+++ b/t/t3903-stash.sh
@@ -654,4 +654,23 @@ test_expect_success 'stash where working directory contains "HEAD" file' '
test_cmp output expect
'
+test_expect_success 'store called with invalid commit' '
+ test_must_fail git stash store foo
+'
+
+test_expect_success 'store updates stash ref and reflog' '
+ git stash clear &&
+ git reset --hard &&
+ echo quux >bazzy &&
+ git add bazzy &&
+ STASH_ID=$(git stash create) &&
+ git reset --hard &&
+ ! grep quux bazzy &&
+ git stash store -m quuxery $STASH_ID &&
+ test $(cat .git/refs/stash) = $STASH_ID &&
+ grep $STASH_ID .git/logs/refs/stash &&
+ git stash pop &&
+ grep quux bazzy
+'
+
test_done
diff --git a/t/t4015-diff-whitespace.sh b/t/t4015-diff-whitespace.sh
index cc3db1304e..3fb4b976a2 100755
--- a/t/t4015-diff-whitespace.sh
+++ b/t/t4015-diff-whitespace.sh
@@ -142,6 +142,351 @@ EOF
git diff --ignore-space-at-eol > out
test_expect_success 'another test, with --ignore-space-at-eol' 'test_cmp expect out'
+test_expect_success 'ignore-blank-lines: only new lines' '
+ test_seq 5 >x &&
+ git update-index x &&
+ test_seq 5 | sed "/3/i \\
+" >x &&
+ git diff --ignore-blank-lines >out &&
+ >expect &&
+ test_cmp out expect
+'
+
+test_expect_success 'ignore-blank-lines: only new lines with space' '
+ test_seq 5 >x &&
+ git update-index x &&
+ test_seq 5 | sed "/3/i \ " >x &&
+ git diff -w --ignore-blank-lines >out &&
+ >expect &&
+ test_cmp out expect
+'
+
+test_expect_success 'ignore-blank-lines: after change' '
+ cat <<-\EOF >x &&
+ 1
+ 2
+
+ 3
+ 4
+ 5
+
+ 6
+ 7
+ EOF
+ git update-index x &&
+ cat <<-\EOF >x &&
+ change
+
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+
+ 7
+ EOF
+ git diff --inter-hunk-context=100 --ignore-blank-lines >out.tmp &&
+ cat <<-\EOF >expected &&
+ diff --git a/x b/x
+ --- a/x
+ +++ b/x
+ @@ -1,6 +1,7 @@
+ +change
+ +
+ 1
+ 2
+ -
+ 3
+ 4
+ 5
+ EOF
+ compare_diff_patch expected out.tmp
+'
+
+test_expect_success 'ignore-blank-lines: before change' '
+ cat <<-\EOF >x &&
+ 1
+ 2
+
+ 3
+ 4
+ 5
+ 6
+ 7
+ EOF
+ git update-index x &&
+ cat <<-\EOF >x &&
+
+ 1
+ 2
+ 3
+ 4
+ 5
+
+ 6
+ 7
+ change
+ EOF
+ git diff --inter-hunk-context=100 --ignore-blank-lines >out.tmp &&
+ cat <<-\EOF >expected &&
+ diff --git a/x b/x
+ --- a/x
+ +++ b/x
+ @@ -4,5 +4,7 @@
+ 3
+ 4
+ 5
+ +
+ 6
+ 7
+ +change
+ EOF
+ compare_diff_patch expected out.tmp
+'
+
+test_expect_success 'ignore-blank-lines: between changes' '
+ cat <<-\EOF >x &&
+ 1
+ 2
+ 3
+ 4
+ 5
+
+
+ 6
+ 7
+ 8
+ 9
+ 10
+ EOF
+ git update-index x &&
+ cat <<-\EOF >x &&
+ change
+ 1
+ 2
+
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+
+ 9
+ 10
+ change
+ EOF
+ git diff --ignore-blank-lines >out.tmp &&
+ cat <<-\EOF >expected &&
+ diff --git a/x b/x
+ --- a/x
+ +++ b/x
+ @@ -1,5 +1,7 @@
+ +change
+ 1
+ 2
+ +
+ 3
+ 4
+ 5
+ @@ -8,5 +8,7 @@
+ 6
+ 7
+ 8
+ +
+ 9
+ 10
+ +change
+ EOF
+ compare_diff_patch expected out.tmp
+'
+
+test_expect_success 'ignore-blank-lines: between changes (with interhunkctx)' '
+ test_seq 10 >x &&
+ git update-index x &&
+ cat <<-\EOF >x &&
+ change
+ 1
+ 2
+
+ 3
+ 4
+ 5
+
+ 6
+ 7
+ 8
+ 9
+
+ 10
+ change
+ EOF
+ git diff --inter-hunk-context=2 --ignore-blank-lines >out.tmp &&
+ cat <<-\EOF >expected &&
+ diff --git a/x b/x
+ --- a/x
+ +++ b/x
+ @@ -1,10 +1,15 @@
+ +change
+ 1
+ 2
+ +
+ 3
+ 4
+ 5
+ +
+ 6
+ 7
+ 8
+ 9
+ +
+ 10
+ +change
+ EOF
+ compare_diff_patch expected out.tmp
+'
+
+test_expect_success 'ignore-blank-lines: scattered spaces' '
+ test_seq 10 >x &&
+ git update-index x &&
+ cat <<-\EOF >x &&
+ change
+ 1
+ 2
+ 3
+
+ 4
+
+ 5
+
+ 6
+
+
+ 7
+
+ 8
+ 9
+ 10
+ change
+ EOF
+ git diff --inter-hunk-context=4 --ignore-blank-lines >out.tmp &&
+ cat <<-\EOF >expected &&
+ diff --git a/x b/x
+ --- a/x
+ +++ b/x
+ @@ -1,3 +1,4 @@
+ +change
+ 1
+ 2
+ 3
+ @@ -8,3 +15,4 @@
+ 8
+ 9
+ 10
+ +change
+ EOF
+ compare_diff_patch expected out.tmp
+'
+
+test_expect_success 'ignore-blank-lines: spaces coalesce' '
+ test_seq 6 >x &&
+ git update-index x &&
+ cat <<-\EOF >x &&
+ change
+ 1
+ 2
+ 3
+
+ 4
+
+ 5
+
+ 6
+ change
+ EOF
+ git diff --inter-hunk-context=4 --ignore-blank-lines >out.tmp &&
+ cat <<-\EOF >expected &&
+ diff --git a/x b/x
+ --- a/x
+ +++ b/x
+ @@ -1,6 +1,11 @@
+ +change
+ 1
+ 2
+ 3
+ +
+ 4
+ +
+ 5
+ +
+ 6
+ +change
+ EOF
+ compare_diff_patch expected out.tmp
+'
+
+test_expect_success 'ignore-blank-lines: mix changes and blank lines' '
+ test_seq 16 >x &&
+ git update-index x &&
+ cat <<-\EOF >x &&
+ change
+ 1
+ 2
+
+ 3
+ 4
+ 5
+ change
+ 6
+ 7
+ 8
+
+ 9
+ 10
+ 11
+ change
+ 12
+ 13
+ 14
+
+ 15
+ 16
+ change
+ EOF
+ git diff --ignore-blank-lines >out.tmp &&
+ cat <<-\EOF >expected &&
+ diff --git a/x b/x
+ --- a/x
+ +++ b/x
+ @@ -1,8 +1,11 @@
+ +change
+ 1
+ 2
+ +
+ 3
+ 4
+ 5
+ +change
+ 6
+ 7
+ 8
+ @@ -9,8 +13,11 @@
+ 9
+ 10
+ 11
+ +change
+ 12
+ 13
+ 14
+ +
+ 15
+ 16
+ +change
+ EOF
+ compare_diff_patch expected out.tmp
+'
+
test_expect_success 'check mixed spaces and tabs in indent' '
# This is indented with SP HT SP.
diff --git a/t/t4111-apply-subdir.sh b/t/t4111-apply-subdir.sh
index 7c398432ba..1618a6dbc7 100755
--- a/t/t4111-apply-subdir.sh
+++ b/t/t4111-apply-subdir.sh
@@ -86,6 +86,20 @@ test_expect_success 'apply --index from subdir of toplevel' '
test_cmp expected sub/dir/file
'
+test_expect_success 'apply half-broken patch from subdir of toplevel' '
+ (
+ cd sub/dir &&
+ test_must_fail git apply <<-EOF
+ --- sub/dir/file
+ +++ sub/dir/file
+ @@ -1,0 +1,0 @@
+ --- file_in_root
+ +++ file_in_root
+ @@ -1,0 +1,0 @@
+ EOF
+ )
+'
+
test_expect_success 'apply from .git dir' '
cp postimage expected &&
cp preimage .git/file &&
diff --git a/t/t4150-am.sh b/t/t4150-am.sh
index 12f6b027ac..5edb79a058 100755
--- a/t/t4150-am.sh
+++ b/t/t4150-am.sh
@@ -147,7 +147,7 @@ test_expect_success 'am applies patch correctly' '
git checkout first &&
test_tick &&
git am <patch1 &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git diff --exit-code second &&
test "$(git rev-parse second)" = "$(git rev-parse HEAD)" &&
test "$(git rev-parse second^)" = "$(git rev-parse HEAD^)"
@@ -158,7 +158,7 @@ test_expect_success 'am applies patch e-mail not in a mbox' '
git reset --hard &&
git checkout first &&
git am patch1.eml &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git diff --exit-code second &&
test "$(git rev-parse second)" = "$(git rev-parse HEAD)" &&
test "$(git rev-parse second^)" = "$(git rev-parse HEAD^)"
@@ -169,7 +169,7 @@ test_expect_success 'am applies patch e-mail not in a mbox with CRLF' '
git reset --hard &&
git checkout first &&
git am patch1-crlf.eml &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git diff --exit-code second &&
test "$(git rev-parse second)" = "$(git rev-parse HEAD)" &&
test "$(git rev-parse second^)" = "$(git rev-parse HEAD^)"
@@ -180,7 +180,7 @@ test_expect_success 'am applies patch e-mail with preceding whitespace' '
git reset --hard &&
git checkout first &&
git am patch1-ws.eml &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git diff --exit-code second &&
test "$(git rev-parse second)" = "$(git rev-parse HEAD)" &&
test "$(git rev-parse second^)" = "$(git rev-parse HEAD^)"
@@ -206,7 +206,7 @@ test_expect_success 'am changes committer and keeps author' '
git reset --hard &&
git checkout first &&
git am patch2 &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
test "$(git rev-parse master^^)" = "$(git rev-parse HEAD^^)" &&
git diff --exit-code master..HEAD &&
git diff --exit-code master^..HEAD^ &&
@@ -258,7 +258,7 @@ test_expect_success 'am --keep really keeps the subject' '
git reset --hard &&
git checkout HEAD^ &&
git am --keep patch4 &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git cat-file commit HEAD >actual &&
grep "Re: Re: Re: \[PATCH 1/5 v2\] \[foo\] third" actual
'
@@ -268,7 +268,7 @@ test_expect_success 'am --keep-non-patch really keeps the non-patch part' '
git reset --hard &&
git checkout HEAD^ &&
git am --keep-non-patch patch4 &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git cat-file commit HEAD >actual &&
grep "^\[foo\] third" actual
'
@@ -283,7 +283,7 @@ test_expect_success 'am -3 falls back to 3-way merge' '
test_tick &&
git commit -m "copied stuff" &&
git am -3 lorem-move.patch &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git diff --exit-code lorem
'
@@ -297,7 +297,7 @@ test_expect_success 'am -3 -p0 can read --no-prefix patch' '
test_tick &&
git commit -m "copied stuff" &&
git am -3 -p0 lorem-zero.patch &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git diff --exit-code lorem
'
@@ -307,7 +307,7 @@ test_expect_success 'am can rename a file' '
git reset --hard &&
git checkout lorem^0 &&
git am rename.patch &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git update-index --refresh &&
git diff --exit-code rename
'
@@ -318,7 +318,7 @@ test_expect_success 'am -3 can rename a file' '
git reset --hard &&
git checkout lorem^0 &&
git am -3 rename.patch &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git update-index --refresh &&
git diff --exit-code rename
'
@@ -329,7 +329,7 @@ test_expect_success 'am -3 can rename a file after falling back to 3-way merge'
git reset --hard &&
git checkout lorem^0 &&
git am -3 rename-add.patch &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git update-index --refresh &&
git diff --exit-code rename
'
@@ -358,11 +358,17 @@ test_expect_success 'am pauses on conflict' '
test_expect_success 'am --skip works' '
echo goodbye >expected &&
git am --skip &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git diff --exit-code lorem2^^ -- file &&
test_cmp expected another
'
+test_expect_success 'am --abort removes a stray directory' '
+ mkdir .git/rebase-apply &&
+ git am --abort &&
+ test_path_is_missing .git/rebase-apply
+'
+
test_expect_success 'am --resolved works' '
echo goodbye >expected &&
rm -fr .git/rebase-apply &&
@@ -373,7 +379,7 @@ test_expect_success 'am --resolved works' '
echo resolved >>file &&
git add file &&
git am --resolved &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
test_cmp expected another
'
@@ -382,7 +388,7 @@ test_expect_success 'am takes patches from a Pine mailbox' '
git reset --hard &&
git checkout first &&
cat pine patch1 | git am &&
- ! test -d .git/rebase-apply &&
+ test_path_is_missing .git/rebase-apply &&
git diff --exit-code master^..HEAD
'
@@ -391,7 +397,7 @@ test_expect_success 'am fails on mail without patch' '
git reset --hard &&
test_must_fail git am <failmail &&
git am --abort &&
- ! test -d .git/rebase-apply
+ test_path_is_missing .git/rebase-apply
'
test_expect_success 'am fails on empty patch' '
@@ -400,7 +406,7 @@ test_expect_success 'am fails on empty patch' '
echo "---" >>failmail &&
test_must_fail git am <failmail &&
git am --skip &&
- ! test -d .git/rebase-apply
+ test_path_is_missing .git/rebase-apply
'
test_expect_success 'am works from stdin in subdirectory' '
diff --git a/t/t5150-request-pull.sh b/t/t5150-request-pull.sh
index 432f98c357..1afa0d5c44 100755
--- a/t/t5150-request-pull.sh
+++ b/t/t5150-request-pull.sh
@@ -80,12 +80,12 @@ test_expect_success 'setup: two scripts for reading pull requests' '
cat <<-EOT >fuzz.sed
#!/bin/sed -nf
+ s/$downstream_url_for_sed/URL/g
s/$_x40/OBJECT_NAME/g
s/A U Thor/AUTHOR/g
s/[-0-9]\{10\} [:0-9]\{8\} [-+][0-9]\{4\}/DATE/g
s/ [^ ].*/ SUBJECT/g
s/ [^ ].* (DATE)/ SUBJECT (DATE)/g
- s/$downstream_url_for_sed/URL/g
s/for-upstream/BRANCH/g
s/mnemonic.txt/FILENAME/g
s/^version [0-9]/VERSION/
diff --git a/t/t5303-pack-corruption-resilience.sh b/t/t5303-pack-corruption-resilience.sh
index 5b1250f0d2..35926debe3 100755
--- a/t/t5303-pack-corruption-resilience.sh
+++ b/t/t5303-pack-corruption-resilience.sh
@@ -51,7 +51,7 @@ do_corrupt_object() {
ofs=`git show-index < ${pack}.idx | grep $1 | cut -f1 -d" "` &&
ofs=$(($ofs + $2)) &&
chmod +w ${pack}.pack &&
- dd of=${pack}.pack count=1 bs=1 conv=notrunc seek=$ofs &&
+ dd of=${pack}.pack bs=1 conv=notrunc seek=$ofs &&
test_must_fail git verify-pack ${pack}.pack
}
@@ -276,6 +276,33 @@ test_expect_success \
git cat-file blob $blob_3 > /dev/null'
test_expect_success \
+ 'corruption of delta base reference pointing to wrong object' \
+ 'create_new_pack --delta-base-offset &&
+ git prune-packed &&
+ printf "\220\033" | do_corrupt_object $blob_3 2 &&
+ git cat-file blob $blob_1 >/dev/null &&
+ git cat-file blob $blob_2 >/dev/null &&
+ test_must_fail git cat-file blob $blob_3 >/dev/null'
+
+test_expect_success \
+ '... but having a loose copy allows for full recovery' \
+ 'mv ${pack}.idx tmp &&
+ git hash-object -t blob -w file_3 &&
+ mv tmp ${pack}.idx &&
+ git cat-file blob $blob_1 > /dev/null &&
+ git cat-file blob $blob_2 > /dev/null &&
+ git cat-file blob $blob_3 > /dev/null'
+
+test_expect_success \
+ '... and then a repack "clears" the corruption' \
+ 'do_repack --delta-base-offset --no-reuse-delta &&
+ git prune-packed &&
+ git verify-pack ${pack}.pack &&
+ git cat-file blob $blob_1 > /dev/null &&
+ git cat-file blob $blob_2 > /dev/null &&
+ git cat-file blob $blob_3 > /dev/null'
+
+test_expect_success \
'corrupting header to have too small output buffer fails unpack' \
'create_new_pack &&
git prune-packed &&
diff --git a/t/t5520-pull.sh b/t/t5520-pull.sh
index 6af6c63350..ed4d9c8318 100755
--- a/t/t5520-pull.sh
+++ b/t/t5520-pull.sh
@@ -57,6 +57,35 @@ test_expect_success 'pulling into void does not overwrite untracked files' '
)
'
+test_expect_success 'pulling into void does not overwrite staged files' '
+ git init cloned-staged-colliding &&
+ (
+ cd cloned-staged-colliding &&
+ echo "alternate content" >file &&
+ git add file &&
+ test_must_fail git pull .. master &&
+ echo "alternate content" >expect &&
+ test_cmp expect file &&
+ git cat-file blob :file >file.index &&
+ test_cmp expect file.index
+ )
+'
+
+
+test_expect_success 'pulling into void does not remove new staged files' '
+ git init cloned-staged-new &&
+ (
+ cd cloned-staged-new &&
+ echo "new tracked file" >newfile &&
+ git add newfile &&
+ git pull .. master &&
+ echo "new tracked file" >expect &&
+ test_cmp expect newfile &&
+ git cat-file blob :newfile >newfile.index &&
+ test_cmp expect newfile.index
+ )
+'
+
test_expect_success 'test . as a remote' '
git branch copy master &&
diff --git a/t/t6002-rev-list-bisect.sh b/t/t6002-rev-list-bisect.sh
index fb07536a0f..43ad772484 100755
--- a/t/t6002-rev-list-bisect.sh
+++ b/t/t6002-rev-list-bisect.sh
@@ -39,25 +39,25 @@ test_bisection_diff()
date >path0
git update-index --add path0
save_tag tree git write-tree
-on_committer_date "1971-08-16 00:00:00" hide_error save_tag root unique_commit root tree
-on_committer_date "1971-08-16 00:00:01" save_tag l0 unique_commit l0 tree -p root
-on_committer_date "1971-08-16 00:00:02" save_tag l1 unique_commit l1 tree -p l0
-on_committer_date "1971-08-16 00:00:03" save_tag l2 unique_commit l2 tree -p l1
-on_committer_date "1971-08-16 00:00:04" save_tag a0 unique_commit a0 tree -p l2
-on_committer_date "1971-08-16 00:00:05" save_tag a1 unique_commit a1 tree -p a0
-on_committer_date "1971-08-16 00:00:06" save_tag b1 unique_commit b1 tree -p a0
-on_committer_date "1971-08-16 00:00:07" save_tag c1 unique_commit c1 tree -p b1
-on_committer_date "1971-08-16 00:00:08" save_tag b2 unique_commit b2 tree -p b1
-on_committer_date "1971-08-16 00:00:09" save_tag b3 unique_commit b2 tree -p b2
-on_committer_date "1971-08-16 00:00:10" save_tag c2 unique_commit c2 tree -p c1 -p b2
-on_committer_date "1971-08-16 00:00:11" save_tag c3 unique_commit c3 tree -p c2
-on_committer_date "1971-08-16 00:00:12" save_tag a2 unique_commit a2 tree -p a1
-on_committer_date "1971-08-16 00:00:13" save_tag a3 unique_commit a3 tree -p a2
-on_committer_date "1971-08-16 00:00:14" save_tag b4 unique_commit b4 tree -p b3 -p a3
-on_committer_date "1971-08-16 00:00:15" save_tag a4 unique_commit a4 tree -p a3 -p b4 -p c3
-on_committer_date "1971-08-16 00:00:16" save_tag l3 unique_commit l3 tree -p a4
-on_committer_date "1971-08-16 00:00:17" save_tag l4 unique_commit l4 tree -p l3
-on_committer_date "1971-08-16 00:00:18" save_tag l5 unique_commit l5 tree -p l4
+on_committer_date "00:00" hide_error save_tag root unique_commit root tree
+on_committer_date "00:01" save_tag l0 unique_commit l0 tree -p root
+on_committer_date "00:02" save_tag l1 unique_commit l1 tree -p l0
+on_committer_date "00:03" save_tag l2 unique_commit l2 tree -p l1
+on_committer_date "00:04" save_tag a0 unique_commit a0 tree -p l2
+on_committer_date "00:05" save_tag a1 unique_commit a1 tree -p a0
+on_committer_date "00:06" save_tag b1 unique_commit b1 tree -p a0
+on_committer_date "00:07" save_tag c1 unique_commit c1 tree -p b1
+on_committer_date "00:08" save_tag b2 unique_commit b2 tree -p b1
+on_committer_date "00:09" save_tag b3 unique_commit b2 tree -p b2
+on_committer_date "00:10" save_tag c2 unique_commit c2 tree -p c1 -p b2
+on_committer_date "00:11" save_tag c3 unique_commit c3 tree -p c2
+on_committer_date "00:12" save_tag a2 unique_commit a2 tree -p a1
+on_committer_date "00:13" save_tag a3 unique_commit a3 tree -p a2
+on_committer_date "00:14" save_tag b4 unique_commit b4 tree -p b3 -p a3
+on_committer_date "00:15" save_tag a4 unique_commit a4 tree -p a3 -p b4 -p c3
+on_committer_date "00:16" save_tag l3 unique_commit l3 tree -p a4
+on_committer_date "00:17" save_tag l4 unique_commit l4 tree -p l3
+on_committer_date "00:18" save_tag l5 unique_commit l5 tree -p l4
git update-ref HEAD $(tag l5)
@@ -90,29 +90,29 @@ git update-ref HEAD $(tag l5)
# F
-on_committer_date "1971-08-16 00:00:00" hide_error save_tag F unique_commit F tree
-on_committer_date "1971-08-16 00:00:01" save_tag e8 unique_commit e8 tree -p F
-on_committer_date "1971-08-16 00:00:02" save_tag e7 unique_commit e7 tree -p e8
-on_committer_date "1971-08-16 00:00:03" save_tag e6 unique_commit e6 tree -p e7
-on_committer_date "1971-08-16 00:00:04" save_tag e5 unique_commit e5 tree -p e6
-on_committer_date "1971-08-16 00:00:05" save_tag f4 unique_commit f4 tree -p F
-on_committer_date "1971-08-16 00:00:06" save_tag f3 unique_commit f3 tree -p f4
-on_committer_date "1971-08-16 00:00:07" save_tag f2 unique_commit f2 tree -p f3
-on_committer_date "1971-08-16 00:00:08" save_tag f1 unique_commit f1 tree -p f2
-on_committer_date "1971-08-16 00:00:09" save_tag e4 unique_commit e4 tree -p e5
-on_committer_date "1971-08-16 00:00:10" save_tag e3 unique_commit e3 tree -p e4
-on_committer_date "1971-08-16 00:00:11" save_tag e2 unique_commit e2 tree -p e3
-on_committer_date "1971-08-16 00:00:12" save_tag e1 unique_commit e1 tree -p e2
-on_committer_date "1971-08-16 00:00:13" save_tag E unique_commit E tree -p e1 -p f1
-
-on_committer_date "1971-08-16 00:00:00" hide_error save_tag U unique_commit U tree
-on_committer_date "1971-08-16 00:00:01" save_tag u0 unique_commit u0 tree -p U
-on_committer_date "1971-08-16 00:00:01" save_tag u1 unique_commit u1 tree -p u0
-on_committer_date "1971-08-16 00:00:02" save_tag u2 unique_commit u2 tree -p u0
-on_committer_date "1971-08-16 00:00:03" save_tag u3 unique_commit u3 tree -p u0
-on_committer_date "1971-08-16 00:00:04" save_tag u4 unique_commit u4 tree -p u0
-on_committer_date "1971-08-16 00:00:05" save_tag u5 unique_commit u5 tree -p u0
-on_committer_date "1971-08-16 00:00:06" save_tag V unique_commit V tree -p u1 -p u2 -p u3 -p u4 -p u5
+on_committer_date "00:00" hide_error save_tag F unique_commit F tree
+on_committer_date "00:01" save_tag e8 unique_commit e8 tree -p F
+on_committer_date "00:02" save_tag e7 unique_commit e7 tree -p e8
+on_committer_date "00:03" save_tag e6 unique_commit e6 tree -p e7
+on_committer_date "00:04" save_tag e5 unique_commit e5 tree -p e6
+on_committer_date "00:05" save_tag f4 unique_commit f4 tree -p F
+on_committer_date "00:06" save_tag f3 unique_commit f3 tree -p f4
+on_committer_date "00:07" save_tag f2 unique_commit f2 tree -p f3
+on_committer_date "00:08" save_tag f1 unique_commit f1 tree -p f2
+on_committer_date "00:09" save_tag e4 unique_commit e4 tree -p e5
+on_committer_date "00:10" save_tag e3 unique_commit e3 tree -p e4
+on_committer_date "00:11" save_tag e2 unique_commit e2 tree -p e3
+on_committer_date "00:12" save_tag e1 unique_commit e1 tree -p e2
+on_committer_date "00:13" save_tag E unique_commit E tree -p e1 -p f1
+
+on_committer_date "00:00" hide_error save_tag U unique_commit U tree
+on_committer_date "00:01" save_tag u0 unique_commit u0 tree -p U
+on_committer_date "00:01" save_tag u1 unique_commit u1 tree -p u0
+on_committer_date "00:02" save_tag u2 unique_commit u2 tree -p u0
+on_committer_date "00:03" save_tag u3 unique_commit u3 tree -p u0
+on_committer_date "00:04" save_tag u4 unique_commit u4 tree -p u0
+on_committer_date "00:05" save_tag u5 unique_commit u5 tree -p u0
+on_committer_date "00:06" save_tag V unique_commit V tree -p u1 -p u2 -p u3 -p u4 -p u5
test_sequence()
{
diff --git a/t/t6003-rev-list-topo-order.sh b/t/t6003-rev-list-topo-order.sh
index e4c52b0214..24d1836f41 100755
--- a/t/t6003-rev-list-topo-order.sh
+++ b/t/t6003-rev-list-topo-order.sh
@@ -16,39 +16,34 @@ list_duplicates()
date >path0
git update-index --add path0
save_tag tree git write-tree
-on_committer_date "1971-08-16 00:00:00" hide_error save_tag root unique_commit root tree
-on_committer_date "1971-08-16 00:00:01" save_tag l0 unique_commit l0 tree -p root
-on_committer_date "1971-08-16 00:00:02" save_tag l1 unique_commit l1 tree -p l0
-on_committer_date "1971-08-16 00:00:03" save_tag l2 unique_commit l2 tree -p l1
-on_committer_date "1971-08-16 00:00:04" save_tag a0 unique_commit a0 tree -p l2
-on_committer_date "1971-08-16 00:00:05" save_tag a1 unique_commit a1 tree -p a0
-on_committer_date "1971-08-16 00:00:06" save_tag b1 unique_commit b1 tree -p a0
-on_committer_date "1971-08-16 00:00:07" save_tag c1 unique_commit c1 tree -p b1
-on_committer_date "1971-08-16 00:00:08" as_author foobar@example.com save_tag b2 unique_commit b2 tree -p b1
-on_committer_date "1971-08-16 00:00:09" save_tag b3 unique_commit b3 tree -p b2
-on_committer_date "1971-08-16 00:00:10" save_tag c2 unique_commit c2 tree -p c1 -p b2
-on_committer_date "1971-08-16 00:00:11" save_tag c3 unique_commit c3 tree -p c2
-on_committer_date "1971-08-16 00:00:12" save_tag a2 unique_commit a2 tree -p a1
-on_committer_date "1971-08-16 00:00:13" save_tag a3 unique_commit a3 tree -p a2
-on_committer_date "1971-08-16 00:00:14" save_tag b4 unique_commit b4 tree -p b3 -p a3
-on_committer_date "1971-08-16 00:00:15" save_tag a4 unique_commit a4 tree -p a3 -p b4 -p c3
-on_committer_date "1971-08-16 00:00:16" save_tag l3 unique_commit l3 tree -p a4
-on_committer_date "1971-08-16 00:00:17" save_tag l4 unique_commit l4 tree -p l3
-on_committer_date "1971-08-16 00:00:18" save_tag l5 unique_commit l5 tree -p l4
-on_committer_date "1971-08-16 00:00:19" save_tag m1 unique_commit m1 tree -p a4 -p c3
-on_committer_date "1971-08-16 00:00:20" save_tag m2 unique_commit m2 tree -p c3 -p a4
-on_committer_date "1971-08-16 00:00:21" hide_error save_tag alt_root unique_commit alt_root tree
-on_committer_date "1971-08-16 00:00:22" save_tag r0 unique_commit r0 tree -p alt_root
-on_committer_date "1971-08-16 00:00:23" save_tag r1 unique_commit r1 tree -p r0
-on_committer_date "1971-08-16 00:00:24" save_tag l5r1 unique_commit l5r1 tree -p l5 -p r1
-on_committer_date "1971-08-16 00:00:25" save_tag r1l5 unique_commit r1l5 tree -p r1 -p l5
+on_dates "00:00" "00:00" hide_error save_tag root unique_commit root tree
+on_dates "00:01" "00:01" save_tag l0 unique_commit l0 tree -p root
+on_dates "00:02" "00:02" save_tag l1 unique_commit l1 tree -p l0
+on_dates "00:03" "00:03" save_tag l2 unique_commit l2 tree -p l1
+on_dates "00:04" "00:04" save_tag a0 unique_commit a0 tree -p l2
+on_dates "00:05" "00:05" save_tag a1 unique_commit a1 tree -p a0
+on_dates "00:06" "00:06" save_tag b1 unique_commit b1 tree -p a0
+on_dates "00:07" "00:07" save_tag c1 unique_commit c1 tree -p b1
+on_dates "00:08" "00:08" as_author foobar@example.com save_tag b2 unique_commit b2 tree -p b1
+on_dates "00:09" "00:09" save_tag b3 unique_commit b3 tree -p b2
+on_dates "00:10" "00:10" save_tag c2 unique_commit c2 tree -p c1 -p b2
+on_dates "00:11" "00:11" save_tag c3 unique_commit c3 tree -p c2
+on_dates "00:12" "00:00" save_tag a2 unique_commit a2 tree -p a1
+on_dates "00:13" "00:01" save_tag a3 unique_commit a3 tree -p a2
+on_dates "00:14" "00:14" save_tag b4 unique_commit b4 tree -p b3 -p a3
+on_dates "00:15" "00:15" save_tag a4 unique_commit a4 tree -p a3 -p b4 -p c3
+on_dates "00:16" "00:16" save_tag l3 unique_commit l3 tree -p a4
+on_dates "00:17" "00:17" save_tag l4 unique_commit l4 tree -p l3
+on_dates "00:18" "00:18" save_tag l5 unique_commit l5 tree -p l4
+on_dates "00:19" "00:19" save_tag m1 unique_commit m1 tree -p a4 -p c3
+on_dates "00:20" "00:20" save_tag m2 unique_commit m2 tree -p c3 -p a4
+on_dates "00:21" "00:21" hide_error save_tag alt_root unique_commit alt_root tree
+on_dates "00:22" "00:22" save_tag r0 unique_commit r0 tree -p alt_root
+on_dates "00:23" "00:23" save_tag r1 unique_commit r1 tree -p r0
+on_dates "00:24" "00:24" save_tag l5r1 unique_commit l5r1 tree -p l5 -p r1
+on_dates "00:25" "00:25" save_tag r1l5 unique_commit r1l5 tree -p r1 -p l5
-#
-# note: as of 20/6, it isn't possible to create duplicate parents, so this
-# can't be tested.
-#
-#on_committer_date "1971-08-16 00:00:20" save_tag m3 unique_commit m3 tree -p c3 -p a4 -p c3
hide_error save_tag e1 as_author e@example.com unique_commit e1 tree
save_tag e2 as_author e@example.com unique_commit e2 tree -p e1
save_tag f1 as_author f@example.com unique_commit f1 tree -p e1
@@ -105,6 +100,50 @@ l0
root
EOF
+test_output_expect_success 'simple date order' 'git rev-list --date-order HEAD' <<EOF
+l5
+l4
+l3
+a4
+b4
+a3
+a2
+c3
+c2
+b3
+b2
+c1
+b1
+a1
+a0
+l2
+l1
+l0
+root
+EOF
+
+test_output_expect_success 'simple author-date order' 'git rev-list --author-date-order HEAD' <<EOF
+l5
+l4
+l3
+a4
+b4
+c3
+c2
+b3
+b2
+c1
+b1
+a3
+a2
+a1
+a0
+l2
+l1
+l0
+root
+EOF
+
test_output_expect_success 'two diamonds topo order (g6)' 'git rev-list --topo-order g4' <<EOF
g4
h2
diff --git a/t/t7011-skip-worktree-reading.sh b/t/t7011-skip-worktree-reading.sh
index 8f3b54d826..88d60c1ce2 100755
--- a/t/t7011-skip-worktree-reading.sh
+++ b/t/t7011-skip-worktree-reading.sh
@@ -91,12 +91,12 @@ test_expect_success 'update-index --remove' '
test_cmp expected 1
'
-test_expect_success 'ls-files --delete' '
+test_expect_success 'ls-files --deleted' '
setup_absent &&
test -z "$(git ls-files -d)"
'
-test_expect_success 'ls-files --delete' '
+test_expect_success 'ls-files --deleted' '
setup_dirty &&
test -z "$(git ls-files -d)"
'
diff --git a/t/t7400-submodule-basic.sh b/t/t7400-submodule-basic.sh
index f47cc7b604..50e6ad7458 100755
--- a/t/t7400-submodule-basic.sh
+++ b/t/t7400-submodule-basic.sh
@@ -212,6 +212,32 @@ test_expect_success 'submodule add with ./, /.. and // in path' '
test_cmp empty untracked
'
+test_expect_success 'submodule add in subdirectory' '
+ echo "refs/heads/master" >expect &&
+ >empty &&
+
+ mkdir addtest/sub &&
+ (
+ cd addtest/sub &&
+ git submodule add "$submodurl" ../realsubmod3 &&
+ git submodule init
+ ) &&
+
+ rm -f heads head untracked &&
+ inspect addtest/realsubmod3 ../.. &&
+ test_cmp expect heads &&
+ test_cmp expect head &&
+ test_cmp empty untracked
+'
+
+test_expect_success 'submodule add in subdirectory with relative path should fail' '
+ (
+ cd addtest/sub &&
+ test_must_fail git submodule add ../../ submod3 2>../../output.err
+ ) &&
+ test_i18ngrep toplevel output.err
+'
+
test_expect_success 'setup - add an example entry to .gitmodules' '
GIT_CONFIG=.gitmodules \
git config submodule.example.url git://example.com/init.git
@@ -319,6 +345,26 @@ test_expect_success 'status should be "up-to-date" after update' '
grep "^ $rev1" list
'
+test_expect_success 'status "up-to-date" from subdirectory' '
+ mkdir -p sub &&
+ (
+ cd sub &&
+ git submodule status >../list
+ ) &&
+ grep "^ $rev1" list &&
+ grep "\\.\\./init" list
+'
+
+test_expect_success 'status "up-to-date" from subdirectory with path' '
+ mkdir -p sub &&
+ (
+ cd sub &&
+ git submodule status ../init >../list
+ ) &&
+ grep "^ $rev1" list &&
+ grep "\\.\\./init" list
+'
+
test_expect_success 'status should be "modified" after submodule commit' '
(
cd init &&
@@ -399,6 +445,25 @@ test_expect_success 'update --init' '
git rev-parse --resolve-git-dir init/.git
'
+test_expect_success 'update --init from subdirectory' '
+ mv init init2 &&
+ git config -f .gitmodules submodule.example.url "$(pwd)/init2" &&
+ git config --remove-section submodule.example &&
+ test_must_fail git config submodule.example.url &&
+
+ mkdir -p sub &&
+ (
+ cd sub &&
+ git submodule update ../init >update.out &&
+ cat update.out &&
+ test_i18ngrep "not initialized" update.out &&
+ test_must_fail git rev-parse --resolve-git-dir ../init/.git &&
+
+ git submodule update --init ../init
+ ) &&
+ git rev-parse --resolve-git-dir init/.git
+'
+
test_expect_success 'do not add files from a submodule' '
git reset --hard &&
@@ -772,6 +837,21 @@ test_expect_success 'submodule deinit should remove the whole submodule section
rmdir init
'
+test_expect_success 'submodule deinit from subdirectory' '
+ git submodule update --init &&
+ git config submodule.example.foo bar &&
+ mkdir -p sub &&
+ (
+ cd sub &&
+ git submodule deinit ../init >../output
+ ) &&
+ grep "\\.\\./init" output &&
+ test -z "$(git config --get-regexp "submodule\.example\.")" &&
+ test -n "$(git config --get-regexp "submodule\.example2\.")" &&
+ test -f example2/.git &&
+ rmdir init
+'
+
test_expect_success 'submodule deinit . deinits all initialized submodules' '
git submodule update --init &&
git config submodule.example.foo bar &&
@@ -868,4 +948,19 @@ test_expect_success 'submodule deinit fails when submodule has a .git directory
test -n "$(git config --get-regexp "submodule\.example\.")"
'
+test_expect_success 'submodule with UTF-8 name' '
+ svname=$(printf "\303\245 \303\244\303\266") &&
+ mkdir "$svname" &&
+ (
+ cd "$svname" &&
+ git init &&
+ >sub &&
+ git add sub &&
+ git commit -m "init sub"
+ ) &&
+ test_config core.precomposeunicode true &&
+ git submodule add ./"$svname" &&
+ git submodule >&2 &&
+ test -n "$(git submodule | grep "$svname")"
+'
test_done
diff --git a/t/t7401-submodule-summary.sh b/t/t7401-submodule-summary.sh
index 30b429e7dc..ac2434c0db 100755
--- a/t/t7401-submodule-summary.sh
+++ b/t/t7401-submodule-summary.sh
@@ -45,6 +45,42 @@ EOF
test_cmp expected actual
"
+test_expect_success 'added submodule (subdirectory)' "
+ mkdir sub &&
+ (
+ cd sub &&
+ git submodule summary >../actual
+ ) &&
+ cat >expected <<-EOF &&
+* ../sm1 0000000...$head1 (2):
+ > Add foo2
+
+EOF
+ test_cmp expected actual
+"
+
+test_expect_success 'added submodule (subdirectory only)' "
+ (
+ cd sub &&
+ git submodule summary . >../actual
+ ) &&
+ >expected &&
+ test_cmp expected actual
+"
+
+test_expect_success 'added submodule (subdirectory with explicit path)' "
+ (
+ cd sub &&
+ git submodule summary ../sm1 >../actual
+ ) &&
+ cat >expected <<-EOF &&
+* ../sm1 0000000...$head1 (2):
+ > Add foo2
+
+EOF
+ test_cmp expected actual
+"
+
commit_file sm1 &&
head2=$(add_file sm1 foo3)
@@ -76,8 +112,8 @@ head3=$(
)
test_expect_success 'modified submodule(backward)' "
- git submodule summary >actual &&
- cat >expected <<-EOF &&
+ git submodule summary >actual &&
+ cat >expected <<-EOF &&
* sm1 $head2...$head3 (2):
< Add foo3
< Add foo2
@@ -89,8 +125,8 @@ EOF
head4=$(add_file sm1 foo4 foo5) &&
head4_full=$(GIT_DIR=sm1/.git git rev-parse --verify HEAD)
test_expect_success 'modified submodule(backward and forward)' "
- git submodule summary >actual &&
- cat >expected <<-EOF &&
+ git submodule summary >actual &&
+ cat >expected <<-EOF &&
* sm1 $head2...$head4 (4):
> Add foo5
> Add foo4
@@ -102,15 +138,15 @@ EOF
"
test_expect_success '--summary-limit' "
- git submodule summary -n 3 >actual &&
- cat >expected <<-EOF &&
+ git submodule summary -n 3 >actual &&
+ cat >expected <<-EOF &&
* sm1 $head2...$head4 (4):
> Add foo5
> Add foo4
< Add foo3
EOF
- test_cmp expected actual
+ test_cmp expected actual
"
commit_file sm1 &&
@@ -122,8 +158,8 @@ rm -f sm1 &&
mv sm1-bak sm1
test_expect_success 'typechanged submodule(submodule->blob), --cached' "
- git submodule summary --cached >actual &&
- cat >expected <<-EOF &&
+ git submodule summary --cached >actual &&
+ cat >expected <<-EOF &&
* sm1 $head4(submodule)->$head5(blob) (3):
< Add foo5
@@ -132,59 +168,59 @@ EOF
"
test_expect_success 'typechanged submodule(submodule->blob), --files' "
- git submodule summary --files >actual &&
- cat >expected <<-EOF &&
+ git submodule summary --files >actual &&
+ cat >expected <<-EOF &&
* sm1 $head5(blob)->$head4(submodule) (3):
> Add foo5
EOF
- test_i18ncmp actual expected
+ test_i18ncmp actual expected
"
rm -rf sm1 &&
git checkout-index sm1
test_expect_success 'typechanged submodule(submodule->blob)' "
- git submodule summary >actual &&
- cat >expected <<-EOF &&
+ git submodule summary >actual &&
+ cat >expected <<-EOF &&
* sm1 $head4(submodule)->$head5(blob):
EOF
- test_i18ncmp actual expected
+ test_i18ncmp actual expected
"
rm -f sm1 &&
test_create_repo sm1 &&
head6=$(add_file sm1 foo6 foo7)
test_expect_success 'nonexistent commit' "
- git submodule summary >actual &&
- cat >expected <<-EOF &&
+ git submodule summary >actual &&
+ cat >expected <<-EOF &&
* sm1 $head4...$head6:
Warn: sm1 doesn't contain commit $head4_full
EOF
- test_i18ncmp actual expected
+ test_i18ncmp actual expected
"
commit_file
test_expect_success 'typechanged submodule(blob->submodule)' "
- git submodule summary >actual &&
- cat >expected <<-EOF &&
+ git submodule summary >actual &&
+ cat >expected <<-EOF &&
* sm1 $head5(blob)->$head6(submodule) (2):
> Add foo7
EOF
- test_i18ncmp expected actual
+ test_i18ncmp expected actual
"
commit_file sm1 &&
rm -rf sm1
test_expect_success 'deleted submodule' "
- git submodule summary >actual &&
- cat >expected <<-EOF &&
+ git submodule summary >actual &&
+ cat >expected <<-EOF &&
* sm1 $head6...0000000:
EOF
- test_cmp expected actual
+ test_cmp expected actual
"
test_create_repo sm2 &&
@@ -192,43 +228,43 @@ head7=$(add_file sm2 foo8 foo9) &&
git add sm2
test_expect_success 'multiple submodules' "
- git submodule summary >actual &&
- cat >expected <<-EOF &&
+ git submodule summary >actual &&
+ cat >expected <<-EOF &&
* sm1 $head6...0000000:
* sm2 0000000...$head7 (2):
> Add foo9
EOF
- test_cmp expected actual
+ test_cmp expected actual
"
test_expect_success 'path filter' "
- git submodule summary sm2 >actual &&
- cat >expected <<-EOF &&
+ git submodule summary sm2 >actual &&
+ cat >expected <<-EOF &&
* sm2 0000000...$head7 (2):
> Add foo9
EOF
- test_cmp expected actual
+ test_cmp expected actual
"
commit_file sm2
test_expect_success 'given commit' "
- git submodule summary HEAD^ >actual &&
- cat >expected <<-EOF &&
+ git submodule summary HEAD^ >actual &&
+ cat >expected <<-EOF &&
* sm1 $head6...0000000:
* sm2 0000000...$head7 (2):
> Add foo9
EOF
- test_cmp expected actual
+ test_cmp expected actual
"
test_expect_success '--for-status' "
- git submodule summary --for-status HEAD^ >actual &&
- test_i18ncmp actual - <<EOF
+ git submodule summary --for-status HEAD^ >actual &&
+ test_i18ncmp actual - <<EOF
# Submodule changes to be committed:
#
# * sm1 $head6...0000000:
@@ -240,14 +276,14 @@ EOF
"
test_expect_success 'fail when using --files together with --cached' "
- test_must_fail git submodule summary --files --cached
+ test_must_fail git submodule summary --files --cached
"
test_expect_success 'should not fail in an empty repo' "
- git init xyzzy &&
- cd xyzzy &&
- git submodule summary >output 2>&1 &&
- test_cmp output /dev/null
+ git init xyzzy &&
+ cd xyzzy &&
+ git submodule summary >output 2>&1 &&
+ test_cmp output /dev/null
"
test_done
diff --git a/t/t7403-submodule-sync.sh b/t/t7403-submodule-sync.sh
index 94e26c47ea..79bc135bf6 100755
--- a/t/t7403-submodule-sync.sh
+++ b/t/t7403-submodule-sync.sh
@@ -11,216 +11,338 @@ These tests exercise the "git submodule sync" subcommand.
. ./test-lib.sh
test_expect_success setup '
- echo file > file &&
+ echo file >file &&
git add file &&
test_tick &&
git commit -m upstream &&
git clone . super &&
git clone super submodule &&
- (cd submodule &&
- git submodule add ../submodule sub-submodule &&
- test_tick &&
- git commit -m "sub-submodule"
+ (
+ cd submodule &&
+ git submodule add ../submodule sub-submodule &&
+ test_tick &&
+ git commit -m "sub-submodule"
) &&
- (cd super &&
- git submodule add ../submodule submodule &&
- test_tick &&
- git commit -m "submodule"
+ (
+ cd super &&
+ git submodule add ../submodule submodule &&
+ test_tick &&
+ git commit -m "submodule"
) &&
git clone super super-clone &&
- (cd super-clone && git submodule update --init --recursive) &&
+ (
+ cd super-clone &&
+ git submodule update --init --recursive
+ ) &&
git clone super empty-clone &&
- (cd empty-clone && git submodule init) &&
+ (
+ cd empty-clone &&
+ git submodule init
+ ) &&
git clone super top-only-clone &&
git clone super relative-clone &&
- (cd relative-clone && git submodule update --init --recursive) &&
+ (
+ cd relative-clone &&
+ git submodule update --init --recursive
+ ) &&
git clone super recursive-clone &&
- (cd recursive-clone && git submodule update --init --recursive)
+ (
+ cd recursive-clone &&
+ git submodule update --init --recursive
+ )
'
test_expect_success 'change submodule' '
- (cd submodule &&
- echo second line >> file &&
- test_tick &&
- git commit -a -m "change submodule"
+ (
+ cd submodule &&
+ echo second line >>file &&
+ test_tick &&
+ git commit -a -m "change submodule"
)
'
+reset_submodule_urls () {
+ local root
+ root=$(pwd) &&
+ (
+ cd super-clone/submodule &&
+ git config remote.origin.url "$root/submodule"
+ ) &&
+ (
+ cd super-clone/submodule/sub-submodule &&
+ git config remote.origin.url "$root/submodule"
+ )
+}
+
test_expect_success 'change submodule url' '
- (cd super &&
- cd submodule &&
- git checkout master &&
- git pull
+ (
+ cd super &&
+ cd submodule &&
+ git checkout master &&
+ git pull
) &&
mv submodule moved-submodule &&
- (cd moved-submodule &&
- git config -f .gitmodules submodule.sub-submodule.url ../moved-submodule &&
- test_tick &&
- git commit -a -m moved-sub-submodule
+ (
+ cd moved-submodule &&
+ git config -f .gitmodules submodule.sub-submodule.url ../moved-submodule &&
+ test_tick &&
+ git commit -a -m moved-sub-submodule
) &&
- (cd super &&
- git config -f .gitmodules submodule.submodule.url ../moved-submodule &&
- test_tick &&
- git commit -a -m moved-submodule
+ (
+ cd super &&
+ git config -f .gitmodules submodule.submodule.url ../moved-submodule &&
+ test_tick &&
+ git commit -a -m moved-submodule
)
'
test_expect_success '"git submodule sync" should update submodule URLs' '
- (cd super-clone &&
- git pull --no-recurse-submodules &&
- git submodule sync
+ (
+ cd super-clone &&
+ git pull --no-recurse-submodules &&
+ git submodule sync
) &&
- test -d "$(cd super-clone/submodule &&
- git config remote.origin.url
+ test -d "$(
+ cd super-clone/submodule &&
+ git config remote.origin.url
)" &&
- test ! -d "$(cd super-clone/submodule/sub-submodule &&
- git config remote.origin.url
+ test ! -d "$(
+ cd super-clone/submodule/sub-submodule &&
+ git config remote.origin.url
)" &&
- (cd super-clone/submodule &&
- git checkout master &&
- git pull
+ (
+ cd super-clone/submodule &&
+ git checkout master &&
+ git pull
) &&
- (cd super-clone &&
- test -d "$(git config submodule.submodule.url)"
+ (
+ cd super-clone &&
+ test -d "$(git config submodule.submodule.url)"
)
'
test_expect_success '"git submodule sync --recursive" should update all submodule URLs' '
- (cd super-clone &&
- (cd submodule &&
- git pull --no-recurse-submodules
- ) &&
- git submodule sync --recursive
+ (
+ cd super-clone &&
+ (
+ cd submodule &&
+ git pull --no-recurse-submodules
+ ) &&
+ git submodule sync --recursive
+ ) &&
+ test -d "$(
+ cd super-clone/submodule &&
+ git config remote.origin.url
+ )" &&
+ test -d "$(
+ cd super-clone/submodule/sub-submodule &&
+ git config remote.origin.url
+ )" &&
+ (
+ cd super-clone/submodule/sub-submodule &&
+ git checkout master &&
+ git pull
+ )
+'
+
+test_expect_success 'reset submodule URLs' '
+ reset_submodule_urls super-clone
+'
+
+test_expect_success '"git submodule sync" should update submodule URLs - subdirectory' '
+ (
+ cd super-clone &&
+ git pull --no-recurse-submodules &&
+ mkdir -p sub &&
+ cd sub &&
+ git submodule sync >../../output
+ ) &&
+ grep "\\.\\./submodule" output &&
+ test -d "$(
+ cd super-clone/submodule &&
+ git config remote.origin.url
+ )" &&
+ test ! -d "$(
+ cd super-clone/submodule/sub-submodule &&
+ git config remote.origin.url
+ )" &&
+ (
+ cd super-clone/submodule &&
+ git checkout master &&
+ git pull
+ ) &&
+ (
+ cd super-clone &&
+ test -d "$(git config submodule.submodule.url)"
+ )
+'
+
+test_expect_success '"git submodule sync --recursive" should update all submodule URLs - subdirectory' '
+ (
+ cd super-clone &&
+ (
+ cd submodule &&
+ git pull --no-recurse-submodules
+ ) &&
+ mkdir -p sub &&
+ cd sub &&
+ git submodule sync --recursive >../../output
) &&
- test -d "$(cd super-clone/submodule &&
- git config remote.origin.url
+ grep "\\.\\./submodule/sub-submodule" output &&
+ test -d "$(
+ cd super-clone/submodule &&
+ git config remote.origin.url
)" &&
- test -d "$(cd super-clone/submodule/sub-submodule &&
- git config remote.origin.url
+ test -d "$(
+ cd super-clone/submodule/sub-submodule &&
+ git config remote.origin.url
)" &&
- (cd super-clone/submodule/sub-submodule &&
- git checkout master &&
- git pull
+ (
+ cd super-clone/submodule/sub-submodule &&
+ git checkout master &&
+ git pull
)
'
test_expect_success '"git submodule sync" should update known submodule URLs' '
- (cd empty-clone &&
- git pull &&
- git submodule sync &&
- test -d "$(git config submodule.submodule.url)"
+ (
+ cd empty-clone &&
+ git pull &&
+ git submodule sync &&
+ test -d "$(git config submodule.submodule.url)"
)
'
test_expect_success '"git submodule sync" should not vivify uninteresting submodule' '
- (cd top-only-clone &&
- git pull &&
- git submodule sync &&
- test -z "$(git config submodule.submodule.url)" &&
- git submodule sync submodule &&
- test -z "$(git config submodule.submodule.url)"
+ (
+ cd top-only-clone &&
+ git pull &&
+ git submodule sync &&
+ test -z "$(git config submodule.submodule.url)" &&
+ git submodule sync submodule &&
+ test -z "$(git config submodule.submodule.url)"
)
'
test_expect_success '"git submodule sync" handles origin URL of the form foo' '
- (cd relative-clone &&
- git remote set-url origin foo &&
- git submodule sync &&
- (cd submodule &&
- #actual fails with: "cannot strip off url foo
- test "$(git config remote.origin.url)" = "../submodule"
- )
+ (
+ cd relative-clone &&
+ git remote set-url origin foo &&
+ git submodule sync &&
+ (
+ cd submodule &&
+ #actual fails with: "cannot strip off url foo
+ test "$(git config remote.origin.url)" = "../submodule"
+ )
)
'
test_expect_success '"git submodule sync" handles origin URL of the form foo/bar' '
- (cd relative-clone &&
- git remote set-url origin foo/bar &&
- git submodule sync &&
- (cd submodule &&
- #actual foo/submodule
- test "$(git config remote.origin.url)" = "../foo/submodule"
- )
- (cd submodule/sub-submodule &&
- test "$(git config remote.origin.url)" != "../../foo/submodule"
- )
+ (
+ cd relative-clone &&
+ git remote set-url origin foo/bar &&
+ git submodule sync &&
+ (
+ cd submodule &&
+ #actual foo/submodule
+ test "$(git config remote.origin.url)" = "../foo/submodule"
+ ) &&
+ (
+ cd submodule/sub-submodule &&
+ test "$(git config remote.origin.url)" != "../../foo/submodule"
+ )
)
'
test_expect_success '"git submodule sync --recursive" propagates changes in origin' '
- (cd recursive-clone &&
- git remote set-url origin foo/bar &&
- git submodule sync --recursive &&
- (cd submodule &&
- #actual foo/submodule
- test "$(git config remote.origin.url)" = "../foo/submodule"
- )
- (cd submodule/sub-submodule &&
- test "$(git config remote.origin.url)" = "../../foo/submodule"
- )
+ (
+ cd recursive-clone &&
+ git remote set-url origin foo/bar &&
+ git submodule sync --recursive &&
+ (
+ cd submodule &&
+ #actual foo/submodule
+ test "$(git config remote.origin.url)" = "../foo/submodule"
+ ) &&
+ (
+ cd submodule/sub-submodule &&
+ test "$(git config remote.origin.url)" = "../../foo/submodule"
+ )
)
'
test_expect_success '"git submodule sync" handles origin URL of the form ./foo' '
- (cd relative-clone &&
- git remote set-url origin ./foo &&
- git submodule sync &&
- (cd submodule &&
- #actual ./submodule
- test "$(git config remote.origin.url)" = "../submodule"
- )
+ (
+ cd relative-clone &&
+ git remote set-url origin ./foo &&
+ git submodule sync &&
+ (
+ cd submodule &&
+ #actual ./submodule
+ test "$(git config remote.origin.url)" = "../submodule"
+ )
)
'
test_expect_success '"git submodule sync" handles origin URL of the form ./foo/bar' '
- (cd relative-clone &&
- git remote set-url origin ./foo/bar &&
- git submodule sync &&
- (cd submodule &&
- #actual ./foo/submodule
- test "$(git config remote.origin.url)" = "../foo/submodule"
- )
+ (
+ cd relative-clone &&
+ git remote set-url origin ./foo/bar &&
+ git submodule sync &&
+ (
+ cd submodule &&
+ #actual ./foo/submodule
+ test "$(git config remote.origin.url)" = "../foo/submodule"
+ )
)
'
test_expect_success '"git submodule sync" handles origin URL of the form ../foo' '
- (cd relative-clone &&
- git remote set-url origin ../foo &&
- git submodule sync &&
- (cd submodule &&
- #actual ../submodule
- test "$(git config remote.origin.url)" = "../../submodule"
- )
+ (
+ cd relative-clone &&
+ git remote set-url origin ../foo &&
+ git submodule sync &&
+ (
+ cd submodule &&
+ #actual ../submodule
+ test "$(git config remote.origin.url)" = "../../submodule"
+ )
)
'
test_expect_success '"git submodule sync" handles origin URL of the form ../foo/bar' '
- (cd relative-clone &&
- git remote set-url origin ../foo/bar &&
- git submodule sync &&
- (cd submodule &&
- #actual ../foo/submodule
- test "$(git config remote.origin.url)" = "../../foo/submodule"
- )
+ (
+ cd relative-clone &&
+ git remote set-url origin ../foo/bar &&
+ git submodule sync &&
+ (
+ cd submodule &&
+ #actual ../foo/submodule
+ test "$(git config remote.origin.url)" = "../../foo/submodule"
+ )
)
'
test_expect_success '"git submodule sync" handles origin URL of the form ../foo/bar with deeply nested submodule' '
- (cd relative-clone &&
- git remote set-url origin ../foo/bar &&
- mkdir -p a/b/c &&
- ( cd a/b/c &&
- git init &&
- :> .gitignore &&
- git add .gitignore &&
- test_tick &&
- git commit -m "initial commit" ) &&
- git submodule add ../bar/a/b/c ./a/b/c &&
- git submodule sync &&
- (cd a/b/c &&
- #actual ../foo/bar/a/b/c
- test "$(git config remote.origin.url)" = "../../../../foo/bar/a/b/c"
- )
+ (
+ cd relative-clone &&
+ git remote set-url origin ../foo/bar &&
+ mkdir -p a/b/c &&
+ (
+ cd a/b/c &&
+ git init &&
+ >.gitignore &&
+ git add .gitignore &&
+ test_tick &&
+ git commit -m "initial commit"
+ ) &&
+ git submodule add ../bar/a/b/c ./a/b/c &&
+ git submodule sync &&
+ (
+ cd a/b/c &&
+ #actual ../foo/bar/a/b/c
+ test "$(git config remote.origin.url)" = "../../../../foo/bar/a/b/c"
+ )
)
'
diff --git a/t/t7406-submodule-update.sh b/t/t7406-submodule-update.sh
index a4ffea0dbe..cdb0538392 100755
--- a/t/t7406-submodule-update.sh
+++ b/t/t7406-submodule-update.sh
@@ -80,6 +80,21 @@ test_expect_success 'submodule update detaching the HEAD ' '
)
'
+test_expect_success 'submodule update from subdirectory' '
+ (cd super/submodule &&
+ git reset --hard HEAD~1
+ ) &&
+ mkdir super/sub &&
+ (cd super/sub &&
+ (cd ../submodule &&
+ compare_head
+ ) &&
+ git submodule update ../submodule &&
+ cd ../submodule &&
+ ! compare_head
+ )
+'
+
apos="'";
test_expect_success 'submodule update does not fetch already present commits' '
(cd submodule &&
diff --git a/t/t7407-submodule-foreach.sh b/t/t7407-submodule-foreach.sh
index 107b4b7c45..91d4fd1fac 100755
--- a/t/t7407-submodule-foreach.sh
+++ b/t/t7407-submodule-foreach.sh
@@ -80,6 +80,22 @@ test_expect_success 'test basic "submodule foreach" usage' '
test_i18ncmp expect actual
'
+cat >expect <<EOF
+Entering '../sub1'
+$pwd/clone-foo1-../sub1-$sub1sha1
+Entering '../sub3'
+$pwd/clone-foo3-../sub3-$sub3sha1
+EOF
+
+test_expect_success 'test "submodule foreach" from subdirectory' '
+ mkdir clone/sub &&
+ (
+ cd clone/sub &&
+ git submodule foreach "echo \$toplevel-\$name-\$sm_path-\$sha1" >../../actual
+ ) &&
+ test_i18ncmp expect actual
+'
+
test_expect_success 'setup nested submodules' '
git clone submodule nested1 &&
git clone submodule nested2 &&
diff --git a/t/t7512-status-help.sh b/t/t7512-status-help.sh
index bf08d4e098..4f09beca90 100755
--- a/t/t7512-status-help.sh
+++ b/t/t7512-status-help.sh
@@ -632,7 +632,8 @@ test_expect_success 'status when cherry-picking before resolving conflicts' '
cat >expected <<-\EOF &&
# On branch cherry_branch
# You are currently cherry-picking.
- # (fix conflicts and run "git commit")
+ # (fix conflicts and run "git cherry-pick --continue")
+ # (use "git cherry-pick --abort" to cancel the cherry-pick operation)
#
# Unmerged paths:
# (use "git add <file>..." to mark resolution)
@@ -655,7 +656,8 @@ test_expect_success 'status when cherry-picking after resolving conflicts' '
cat >expected <<-\EOF &&
# On branch cherry_branch
# You are currently cherry-picking.
- # (all conflicts fixed: run "git commit")
+ # (all conflicts fixed: run "git cherry-pick --continue")
+ # (use "git cherry-pick --abort" to cancel the cherry-pick operation)
#
# Changes to be committed:
#
diff --git a/t/t7600-merge.sh b/t/t7600-merge.sh
index 2f70433568..460d8ebf48 100755
--- a/t/t7600-merge.sh
+++ b/t/t7600-merge.sh
@@ -316,7 +316,7 @@ test_expect_success 'merge c1 with c2 (squash)' '
test_debug 'git log --graph --decorate --oneline --all'
-test_expect_success 'unsuccesful merge of c1 with c2 (squash, ff-only)' '
+test_expect_success 'unsuccessful merge of c1 with c2 (squash, ff-only)' '
git reset --hard c1 &&
test_must_fail git merge --squash --ff-only c2
'
diff --git a/t/t9001-send-email.sh b/t/t9001-send-email.sh
index 9f46f22ca8..2813aa9a61 100755
--- a/t/t9001-send-email.sh
+++ b/t/t9001-send-email.sh
@@ -956,6 +956,20 @@ test_expect_success $PREREQ 'utf8 author is correctly passed on' '
grep "^From: Füñný Nâmé <odd_?=mail@example.com>" msgtxt1
'
+test_expect_success $PREREQ 'utf8 sender is not duplicated' '
+ clean_fake_sendmail &&
+ test_commit weird_sender &&
+ test_when_finished "git reset --hard HEAD^" &&
+ git commit --amend --author "Füñný Nâmé <odd_?=mail@example.com>" &&
+ git format-patch --stdout -1 >funny_name.patch &&
+ git send-email --from="Füñný Nâmé <odd_?=mail@example.com>" \
+ --to=nobody@example.com \
+ --smtp-server="$(pwd)/fake.sendmail" \
+ funny_name.patch &&
+ grep "^From: " msgtxt1 >msgfrom &&
+ test_line_count = 1 msgfrom
+'
+
test_expect_success $PREREQ 'sendemail.composeencoding works' '
clean_fake_sendmail &&
git config sendemail.composeencoding iso-8859-1 &&
diff --git a/t/t9802-git-p4-filetype.sh b/t/t9802-git-p4-filetype.sh
index eeefa67904..b0d1d94bf4 100755
--- a/t/t9802-git-p4-filetype.sh
+++ b/t/t9802-git-p4-filetype.sh
@@ -95,7 +95,7 @@ test_expect_success 'gitattributes setting eol=lf produces lf newlines' '
git init &&
echo "* eol=lf" >.gitattributes &&
git p4 sync //depot@all &&
- git checkout master &&
+ git checkout -b master p4/master &&
test_cmp "$cli"/f-unix-orig f-unix &&
test_cmp "$cli"/f-win-as-lf f-win
)
@@ -109,7 +109,7 @@ test_expect_success 'gitattributes setting eol=crlf produces crlf newlines' '
git init &&
echo "* eol=crlf" >.gitattributes &&
git p4 sync //depot@all &&
- git checkout master &&
+ git checkout -b master p4/master &&
test_cmp "$cli"/f-unix-as-crlf f-unix &&
test_cmp "$cli"/f-win-orig f-win
)
diff --git a/test-prio-queue.c b/test-prio-queue.c
new file mode 100644
index 0000000000..7be72f0086
--- /dev/null
+++ b/test-prio-queue.c
@@ -0,0 +1,39 @@
+#include "cache.h"
+#include "prio-queue.h"
+
+static int intcmp(const void *va, const void *vb, void *data)
+{
+ const int *a = va, *b = vb;
+ return *a - *b;
+}
+
+static void show(int *v)
+{
+ if (!v)
+ printf("NULL\n");
+ else
+ printf("%d\n", *v);
+ free(v);
+}
+
+int main(int argc, char **argv)
+{
+ struct prio_queue pq = { intcmp };
+
+ while (*++argv) {
+ if (!strcmp(*argv, "get"))
+ show(prio_queue_get(&pq));
+ else if (!strcmp(*argv, "dump")) {
+ int *v;
+ while ((v = prio_queue_get(&pq)))
+ show(v);
+ }
+ else {
+ int *v = malloc(sizeof(*v));
+ *v = atoi(*argv);
+ prio_queue_put(&pq, v);
+ }
+ }
+
+ return 0;
+}
diff --git a/transport.c b/transport.c
index 359a671c8c..e15db9808c 100644
--- a/transport.c
+++ b/transport.c
@@ -1232,7 +1232,7 @@ int transport_fetch_refs(struct transport *transport, struct ref *refs)
* then local and remote refs are likely to still be equal.
* Just feed them all to the fetch method in that case.
* This condition shouldn't be met in a non-deepening fetch
- * (see builtin-fetch.c:quickfetch()).
+ * (see builtin/fetch.c:quickfetch()).
*/
heads = xmalloc(nr_refs * sizeof(*heads));
for (rm = refs; rm; rm = rm->next)
diff --git a/transport.h b/transport.h
index 4edebc5355..ea70ea7e4a 100644
--- a/transport.h
+++ b/transport.h
@@ -170,7 +170,7 @@ int transport_connect(struct transport *transport, const char *name,
int transport_helper_init(struct transport *transport, const char *name);
int bidirectional_transfer_loop(int input, int output);
-/* common methods used by transport.c and builtin-send-pack.c */
+/* common methods used by transport.c and builtin/send-pack.c */
void transport_verify_remote_names(int nr_heads, const char **heads);
void transport_update_tracking_ref(struct remote *remote, struct ref *ref, int verbose);
diff --git a/tree-walk.h b/tree-walk.h
index 2bf0db9814..ae04b6417d 100644
--- a/tree-walk.h
+++ b/tree-walk.h
@@ -46,7 +46,7 @@ struct traverse_info {
int pathlen;
struct pathspec *pathspec;
- unsigned long conflicts;
+ unsigned long df_conflicts;
traverse_callback_t fn;
void *data;
int show_all_errors;
diff --git a/unpack-trees.c b/unpack-trees.c
index 57b40743a1..b27f2a62e8 100644
--- a/unpack-trees.c
+++ b/unpack-trees.c
@@ -464,7 +464,7 @@ static int traverse_trees_recursive(int n, unsigned long dirmask,
newinfo.pathspec = info->pathspec;
newinfo.name = *p;
newinfo.pathlen += tree_entry_len(p) + 1;
- newinfo.conflicts |= df_conflicts;
+ newinfo.df_conflicts |= df_conflicts;
for (i = 0; i < n; i++, dirmask >>= 1) {
const unsigned char *sha1 = NULL;
@@ -565,17 +565,12 @@ static int unpack_nondirectories(int n, unsigned long mask,
{
int i;
struct unpack_trees_options *o = info->data;
- unsigned long conflicts;
+ unsigned long conflicts = info->df_conflicts | dirmask;
/* Do we have *only* directories? Nothing to do */
if (mask == dirmask && !src[0])
return 0;
- conflicts = info->conflicts;
- if (o->merge)
- conflicts >>= 1;
- conflicts |= dirmask;
-
/*
* Ok, we've filled in up to any potential index entry in src[0],
* now do the rest.
@@ -807,13 +802,6 @@ static int unpack_callback(int n, unsigned long mask, unsigned long dirmask, str
/* Now handle any directories.. */
if (dirmask) {
- unsigned long conflicts = mask & ~dirmask;
- if (o->merge) {
- conflicts <<= 1;
- if (src[0])
- conflicts |= 1;
- }
-
/* special case: "diff-index --cached" looking at a tree */
if (o->diff_index_cached &&
n == 1 && dirmask == 1 && S_ISDIR(names->mode)) {
@@ -832,7 +820,7 @@ static int unpack_callback(int n, unsigned long mask, unsigned long dirmask, str
}
}
- if (traverse_trees_recursive(n, dirmask, conflicts,
+ if (traverse_trees_recursive(n, dirmask, mask & ~dirmask,
names, info) < 0)
return -1;
return mask;
diff --git a/wt-status.c b/wt-status.c
index bf84a86ee3..438a40d637 100644
--- a/wt-status.c
+++ b/wt-status.c
@@ -955,10 +955,12 @@ static void show_cherry_pick_in_progress(struct wt_status *s,
if (advice_status_hints) {
if (has_unmerged(s))
status_printf_ln(s, color,
- _(" (fix conflicts and run \"git commit\")"));
+ _(" (fix conflicts and run \"git cherry-pick --continue\")"));
else
status_printf_ln(s, color,
- _(" (all conflicts fixed: run \"git commit\")"));
+ _(" (all conflicts fixed: run \"git cherry-pick --continue\")"));
+ status_printf_ln(s, color,
+ _(" (use \"git cherry-pick --abort\" to cancel the cherry-pick operation)"));
}
wt_status_print_trailer(s);
}
diff --git a/xdiff/xdiff.h b/xdiff/xdiff.h
index 219a3bbca6..c0339919cc 100644
--- a/xdiff/xdiff.h
+++ b/xdiff/xdiff.h
@@ -39,6 +39,8 @@ extern "C" {
#define XDF_DIFF_ALGORITHM_MASK (XDF_PATIENCE_DIFF | XDF_HISTOGRAM_DIFF)
#define XDF_DIFF_ALG(x) ((x) & XDF_DIFF_ALGORITHM_MASK)
+#define XDF_IGNORE_BLANK_LINES (1 << 7)
+
#define XDL_EMIT_FUNCNAMES (1 << 0)
#define XDL_EMIT_COMMON (1 << 1)
#define XDL_EMIT_FUNCCONTEXT (1 << 2)
diff --git a/xdiff/xdiffi.c b/xdiff/xdiffi.c
index b2eb6db2c8..2358a2d632 100644
--- a/xdiff/xdiffi.c
+++ b/xdiff/xdiffi.c
@@ -394,6 +394,7 @@ static xdchange_t *xdl_add_change(xdchange_t *xscr, long i1, long i2, long chg1,
xch->i2 = i2;
xch->chg1 = chg1;
xch->chg2 = chg2;
+ xch->ignore = 0;
return xch;
}
@@ -544,7 +545,9 @@ static int xdl_call_hunk_func(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
xdchange_t *xch, *xche;
for (xch = xscr; xch; xch = xche->next) {
- xche = xdl_get_hunk(xch, xecfg);
+ xche = xdl_get_hunk(&xch, xecfg);
+ if (!xch)
+ break;
if (xecfg->hunk_func(xch->i1, xche->i1 + xche->chg1 - xch->i1,
xch->i2, xche->i2 + xche->chg2 - xch->i2,
ecb->priv) < 0)
@@ -553,6 +556,27 @@ static int xdl_call_hunk_func(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
return 0;
}
+static void xdl_mark_ignorable(xdchange_t *xscr, xdfenv_t *xe, long flags)
+{
+ xdchange_t *xch;
+
+ for (xch = xscr; xch; xch = xch->next) {
+ int ignore = 1;
+ xrecord_t **rec;
+ long i;
+
+ rec = &xe->xdf1.recs[xch->i1];
+ for (i = 0; i < xch->chg1 && ignore; i++)
+ ignore = xdl_blankline(rec[i]->ptr, rec[i]->size, flags);
+
+ rec = &xe->xdf2.recs[xch->i2];
+ for (i = 0; i < xch->chg2 && ignore; i++)
+ ignore = xdl_blankline(rec[i]->ptr, rec[i]->size, flags);
+
+ xch->ignore = ignore;
+ }
+}
+
int xdl_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
xdemitconf_t const *xecfg, xdemitcb_t *ecb) {
xdchange_t *xscr;
@@ -571,6 +595,9 @@ int xdl_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
return -1;
}
if (xscr) {
+ if (xpp->flags & XDF_IGNORE_BLANK_LINES)
+ xdl_mark_ignorable(xscr, &xe, xpp->flags);
+
if (ef(&xe, xscr, ecb, xecfg) < 0) {
xdl_free_script(xscr);
diff --git a/xdiff/xdiffi.h b/xdiff/xdiffi.h
index 7a92ea9c4d..8b81206c9a 100644
--- a/xdiff/xdiffi.h
+++ b/xdiff/xdiffi.h
@@ -41,6 +41,7 @@ typedef struct s_xdchange {
struct s_xdchange *next;
long i1, i2;
long chg1, chg2;
+ int ignore;
} xdchange_t;
diff --git a/xdiff/xemit.c b/xdiff/xemit.c
index d11dbf9f13..4d8645867e 100644
--- a/xdiff/xemit.c
+++ b/xdiff/xemit.c
@@ -56,16 +56,51 @@ static int xdl_emit_record(xdfile_t *xdf, long ri, char const *pre, xdemitcb_t *
/*
* Starting at the passed change atom, find the latest change atom to be included
* inside the differential hunk according to the specified configuration.
+ * Also advance xscr if the first changes must be discarded.
*/
-xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg) {
- xdchange_t *xch, *xchp;
+xdchange_t *xdl_get_hunk(xdchange_t **xscr, xdemitconf_t const *xecfg)
+{
+ xdchange_t *xch, *xchp, *lxch;
long max_common = 2 * xecfg->ctxlen + xecfg->interhunkctxlen;
+ long max_ignorable = xecfg->ctxlen;
+ unsigned long ignored = 0; /* number of ignored blank lines */
+
+ /* remove ignorable changes that are too far before other changes */
+ for (xchp = *xscr; xchp && xchp->ignore; xchp = xchp->next) {
+ xch = xchp->next;
+
+ if (xch == NULL ||
+ xch->i1 - (xchp->i1 + xchp->chg1) >= max_ignorable)
+ *xscr = xch;
+ }
+
+ if (*xscr == NULL)
+ return NULL;
+
+ lxch = *xscr;
- for (xchp = xscr, xch = xscr->next; xch; xchp = xch, xch = xch->next)
- if (xch->i1 - (xchp->i1 + xchp->chg1) > max_common)
+ for (xchp = *xscr, xch = xchp->next; xch; xchp = xch, xch = xch->next) {
+ long distance = xch->i1 - (xchp->i1 + xchp->chg1);
+ if (distance > max_common)
break;
- return xchp;
+ if (distance < max_ignorable && (!xch->ignore || lxch == xchp)) {
+ lxch = xch;
+ ignored = 0;
+ } else if (distance < max_ignorable && xch->ignore) {
+ ignored += xch->chg2;
+ } else if (lxch != xchp &&
+ xch->i1 + ignored - (lxch->i1 + lxch->chg1) > max_common) {
+ break;
+ } else if (!xch->ignore) {
+ lxch = xch;
+ ignored = 0;
+ } else {
+ ignored += xch->chg2;
+ }
+ }
+
+ return lxch;
}
@@ -139,7 +174,9 @@ int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
return xdl_emit_common(xe, xscr, ecb, xecfg);
for (xch = xscr; xch; xch = xche->next) {
- xche = xdl_get_hunk(xch, xecfg);
+ xche = xdl_get_hunk(&xch, xecfg);
+ if (!xch)
+ break;
s1 = XDL_MAX(xch->i1 - xecfg->ctxlen, 0);
s2 = XDL_MAX(xch->i2 - xecfg->ctxlen, 0);
diff --git a/xdiff/xemit.h b/xdiff/xemit.h
index c2e2e83027..d29710770c 100644
--- a/xdiff/xemit.h
+++ b/xdiff/xemit.h
@@ -27,7 +27,7 @@
typedef int (*emit_func_t)(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
xdemitconf_t const *xecfg);
-xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg);
+xdchange_t *xdl_get_hunk(xdchange_t **xscr, xdemitconf_t const *xecfg);
int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
xdemitconf_t const *xecfg);
diff --git a/xdiff/xutils.c b/xdiff/xutils.c
index 9504eaecb8..62cb23dfd3 100644
--- a/xdiff/xutils.c
+++ b/xdiff/xutils.c
@@ -143,6 +143,19 @@ long xdl_guess_lines(mmfile_t *mf, long sample) {
return nl + 1;
}
+int xdl_blankline(const char *line, long size, long flags)
+{
+ long i;
+
+ if (!(flags & XDF_WHITESPACE_FLAGS))
+ return (size <= 1);
+
+ for (i = 0; i < size && XDL_ISSPACE(line[i]); i++)
+ ;
+
+ return (i == size);
+}
+
int xdl_recmatch(const char *l1, long s1, const char *l2, long s2, long flags)
{
int i1, i2;
diff --git a/xdiff/xutils.h b/xdiff/xutils.h
index ad1428ed69..4646ce5752 100644
--- a/xdiff/xutils.h
+++ b/xdiff/xutils.h
@@ -32,6 +32,7 @@ int xdl_cha_init(chastore_t *cha, long isize, long icount);
void xdl_cha_free(chastore_t *cha);
void *xdl_cha_alloc(chastore_t *cha);
long xdl_guess_lines(mmfile_t *mf, long sample);
+int xdl_blankline(const char *line, long size, long flags);
int xdl_recmatch(const char *l1, long s1, const char *l2, long s2, long flags);
unsigned long xdl_hash_record(char const **data, char const *top, long flags);
unsigned int xdl_hashbits(unsigned int size);