summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore2
-rw-r--r--.mailmap1
-rw-r--r--CODE_OF_CONDUCT.md93
-rw-r--r--Documentation/Makefile7
-rw-r--r--Documentation/MyFirstContribution.txt10
-rw-r--r--Documentation/RelNotes/2.24.0.txt370
-rw-r--r--Documentation/RelNotes/2.7.1.txt2
-rw-r--r--Documentation/RelNotes/2.8.0.txt2
-rw-r--r--Documentation/SubmittingPatches4
-rw-r--r--Documentation/asciidoc.conf6
-rw-r--r--Documentation/asciidoctor-extensions.rb24
-rw-r--r--Documentation/config.txt86
-rw-r--r--Documentation/config/core.txt6
-rw-r--r--Documentation/config/diff.txt2
-rw-r--r--Documentation/config/feature.txt37
-rw-r--r--Documentation/config/fetch.txt23
-rw-r--r--Documentation/config/format.txt1
-rw-r--r--Documentation/config/gc.txt2
-rw-r--r--Documentation/config/index.txt1
-rw-r--r--Documentation/config/pack.txt3
-rw-r--r--Documentation/config/remote.txt8
-rw-r--r--Documentation/config/trace2.txt6
-rw-r--r--Documentation/diff-generate-patch.txt32
-rwxr-xr-xDocumentation/doc-diff17
-rw-r--r--Documentation/fetch-options.txt20
-rw-r--r--Documentation/git-clean.txt16
-rw-r--r--Documentation/git-commit-graph.txt7
-rw-r--r--Documentation/git-commit.txt8
-rw-r--r--Documentation/git-config.txt56
-rw-r--r--Documentation/git-fast-export.txt23
-rw-r--r--Documentation/git-fast-import.txt25
-rw-r--r--Documentation/git-filter-branch.txt273
-rw-r--r--Documentation/git-format-patch.txt23
-rw-r--r--Documentation/git-gc.txt17
-rw-r--r--Documentation/git-grep.txt17
-rw-r--r--Documentation/git-gui.txt10
-rw-r--r--Documentation/git-ls-remote.txt32
-rw-r--r--Documentation/git-merge-base.txt98
-rw-r--r--Documentation/git-merge-index.txt26
-rw-r--r--Documentation/git-merge.txt2
-rw-r--r--Documentation/git-rebase.txt35
-rw-r--r--Documentation/git-receive-pack.txt52
-rw-r--r--Documentation/git-replace.txt10
-rw-r--r--Documentation/git-rev-list.txt54
-rw-r--r--Documentation/git-send-email.txt12
-rw-r--r--Documentation/git-stash.txt5
-rw-r--r--Documentation/git-status.txt18
-rw-r--r--Documentation/git-submodule.txt3
-rw-r--r--Documentation/git-svn.txt10
-rw-r--r--Documentation/gitattributes.txt2
-rw-r--r--Documentation/gitcli.txt6
-rw-r--r--Documentation/githooks.txt32
-rw-r--r--Documentation/gitmodules.txt17
-rw-r--r--Documentation/gitremote-helpers.txt10
-rw-r--r--Documentation/gitrepository-layout.txt2
-rw-r--r--Documentation/gitweb.conf.txt6
-rw-r--r--Documentation/manpage.xsl3
-rw-r--r--Documentation/merge-options.txt32
-rw-r--r--Documentation/pretty-formats.txt2
-rw-r--r--Documentation/rev-list-options.txt16
-rw-r--r--Documentation/technical/api-directory-listing.txt6
-rw-r--r--Documentation/technical/api-trace2.txt31
-rw-r--r--Documentation/technical/api-tree-walking.txt8
-rw-r--r--Documentation/technical/partial-clone.txt117
-rw-r--r--Documentation/trace2-target-values.txt4
-rw-r--r--Documentation/user-manual.txt377
-rwxr-xr-xGIT-VERSION-GEN2
-rw-r--r--Makefile61
l---------RelNotes2
-rw-r--r--apply.c54
-rw-r--r--apply.h1
-rw-r--r--archive-tar.c14
-rw-r--r--attr.c34
-rw-r--r--azure-pipelines.yml168
-rw-r--r--banned.h2
-rw-r--r--bisect.c2
-rw-r--r--blame.c27
-rw-r--r--builtin/am.c23
-rw-r--r--builtin/blame.c8
-rw-r--r--builtin/cat-file.c5
-rw-r--r--builtin/check-ignore.c34
-rw-r--r--builtin/checkout.c31
-rw-r--r--builtin/clean.c27
-rw-r--r--builtin/clone.c11
-rw-r--r--builtin/commit-graph.c35
-rw-r--r--builtin/commit.c4
-rw-r--r--builtin/describe.c24
-rw-r--r--builtin/difftool.c56
-rw-r--r--builtin/fast-export.c82
-rw-r--r--builtin/fetch.c300
-rw-r--r--builtin/gc.c15
-rw-r--r--builtin/grep.c14
-rw-r--r--builtin/index-pack.c12
-rw-r--r--builtin/log.c5
-rw-r--r--builtin/ls-files.c8
-rw-r--r--builtin/merge-recursive.c4
-rw-r--r--builtin/merge-tree.c5
-rw-r--r--builtin/merge.c31
-rw-r--r--builtin/name-rev.c15
-rw-r--r--builtin/pack-objects.c52
-rw-r--r--builtin/patch-id.c11
-rw-r--r--builtin/pull.c6
-rw-r--r--builtin/push.c75
-rw-r--r--builtin/rebase.c113
-rw-r--r--builtin/receive-pack.c50
-rw-r--r--builtin/repack.c5
-rw-r--r--builtin/replace.c9
-rw-r--r--builtin/rev-list.c5
-rw-r--r--builtin/rev-parse.c5
-rw-r--r--builtin/show-index.c13
-rw-r--r--builtin/stash.c17
-rw-r--r--builtin/submodule--helper.c4
-rw-r--r--builtin/update-index.c6
-rw-r--r--builtin/worktree.c5
-rw-r--r--bulk-checkin.c2
-rw-r--r--bundle.c4
-rw-r--r--cache-tree.c92
-rw-r--r--cache-tree.h3
-rw-r--r--cache.h68
-rwxr-xr-xci/install-dependencies.sh5
-rwxr-xr-xci/lib.sh7
-rwxr-xr-xci/run-static-analysis.sh3
-rwxr-xr-xci/test-documentation.sh2
-rw-r--r--combine-diff.c2
-rw-r--r--commit-graph.c105
-rw-r--r--commit-graph.h21
-rw-r--r--commit.c5
-rw-r--r--common-main.c8
-rw-r--r--compat/mingw.c32
-rw-r--r--compat/mingw.h6
-rw-r--r--compat/nedmalloc/malloc.c.h6
-rwxr-xr-xcompat/vcbuild/scripts/clink.pl48
-rw-r--r--compat/win32/path-utils.h5
-rw-r--r--compat/win32/pthread.h6
-rw-r--r--compat/winansi.c2
-rw-r--r--config.c57
-rw-r--r--config.mak.uname19
-rw-r--r--connect.c4
-rw-r--r--connected.c10
-rw-r--r--contrib/buildsystems/Generators/Vcxproj.pm18
-rw-r--r--contrib/coccinelle/hashmap.cocci16
-rw-r--r--contrib/completion/git-completion.bash295
-rwxr-xr-xcontrib/hg-to-git/hg-to-git.py50
-rw-r--r--contrib/svn-fe/svn-fe.txt4
-rwxr-xr-xcontrib/svn-fe/svnrdump_sim.py2
-rw-r--r--convert.c29
-rw-r--r--convert.h6
-rw-r--r--credential-store.c9
-rw-r--r--date.c27
-rw-r--r--diff-delta.c2
-rw-r--r--diff.c87
-rw-r--r--diff.h2
-rw-r--r--diffcore-break.c12
-rw-r--r--diffcore-rename.c17
-rw-r--r--dir.c349
-rw-r--r--dir.h79
-rw-r--r--environment.c2
-rw-r--r--fast-import.c103
-rw-r--r--fetch-negotiator.c25
-rw-r--r--fetch-negotiator.h5
-rw-r--r--fetch-object.c40
-rw-r--r--fetch-object.h9
-rw-r--r--fetch-pack.c56
-rwxr-xr-xgit-add--interactive.perl2
-rw-r--r--git-compat-util.h51
-rwxr-xr-xgit-filter-branch.sh14
-rwxr-xr-xgit-gui/git-gui.sh159
-rw-r--r--git-gui/lib/checkout_op.tcl6
-rw-r--r--git-gui/lib/commit.tcl4
-rw-r--r--git-gui/lib/diff.tcl96
-rw-r--r--git-gui/lib/index.tcl8
-rwxr-xr-xgit-p4.py13
-rw-r--r--git.c5
-rwxr-xr-xgitk-git/gitk64
-rw-r--r--gitk-git/po/zh_cn.po1367
-rw-r--r--grep.c196
-rw-r--r--grep.h25
-rw-r--r--hashmap.c58
-rw-r--r--hashmap.h176
-rw-r--r--help.c3
-rw-r--r--help.h2
-rw-r--r--http.c7
-rw-r--r--http.h4
-rw-r--r--line-log.c71
-rw-r--r--list-objects-filter-options.c324
-rw-r--r--list-objects-filter-options.h62
-rw-r--r--list-objects-filter.c382
-rw-r--r--list-objects-filter.h40
-rw-r--r--list-objects.c59
-rw-r--r--ll-merge.c19
-rw-r--r--ll-merge.h1
-rw-r--r--log-tree.c5
-rw-r--r--merge-recursive.c656
-rw-r--r--merge-recursive.h164
-rw-r--r--midx.c11
-rw-r--r--name-hash.c57
-rw-r--r--notes.c6
-rw-r--r--object.c1
-rw-r--r--oidmap.c20
-rw-r--r--oidmap.h6
-rw-r--r--pack-bitmap-write.c2
-rw-r--r--pack-bitmap.c6
-rw-r--r--pack-bitmap.h6
-rw-r--r--pack-objects.c22
-rw-r--r--pack-objects.h6
-rw-r--r--pack-write.c8
-rw-r--r--packfile.c50
-rw-r--r--packfile.h2
-rw-r--r--parse-options.c3
-rw-r--r--parse-options.h18
-rw-r--r--patch-ids.c18
-rw-r--r--path.c39
-rw-r--r--path.h3
-rw-r--r--perl/Git/SVN.pm4
-rw-r--r--pretty.c2
-rw-r--r--progress.c61
-rw-r--r--promisor-remote.c268
-rw-r--r--promisor-remote.h33
-rw-r--r--quote.c24
-rw-r--r--quote.h1
-rw-r--r--range-diff.c13
-rw-r--r--read-cache.c55
-rw-r--r--ref-filter.c43
-rw-r--r--refs.c25
-rw-r--r--refs/packed-backend.c23
-rw-r--r--remote.c21
-rw-r--r--remote.h2
-rw-r--r--repo-settings.c68
-rw-r--r--repository.h34
-rw-r--r--rerere.c8
-rw-r--r--revision.c44
-rw-r--r--send-pack.c3
-rw-r--r--sequencer.c203
-rw-r--r--sequencer.h6
-rw-r--r--setup.c13
-rw-r--r--sha1-file.c24
-rw-r--r--sha1-lookup.c12
-rw-r--r--sha1-name.c35
-rw-r--r--shallow.c5
-rw-r--r--stable-qsort.c (renamed from compat/qsort.c)6
-rw-r--r--strbuf.c15
-rw-r--r--strbuf.h7
-rw-r--r--sub-process.c20
-rw-r--r--sub-process.h6
-rw-r--r--submodule-config.c52
-rw-r--r--t/helper/.gitignore9
-rw-r--r--t/helper/test-date.c27
-rw-r--r--t/helper/test-hashmap.c50
-rw-r--r--t/helper/test-lazy-init-name-hash.c12
-rw-r--r--t/helper/test-progress.c81
-rw-r--r--t/helper/test-read-cache.c5
-rw-r--r--t/helper/test-run-command.c153
-rw-r--r--t/helper/test-tool.c1
-rw-r--r--t/helper/test-tool.h1
-rw-r--r--t/lib-git-svn.sh2
-rw-r--r--t/lib-rebase.sh9
-rwxr-xr-xt/perf/p5601-clone-reference.sh (renamed from t/perf/p5600-clone-reference.sh)0
-rwxr-xr-xt/t0000-basic.sh58
-rwxr-xr-xt/t0014-alias.sh7
-rwxr-xr-xt/t0021-conversion.sh3
-rwxr-xr-xt/t0028-working-tree-encoding.sh41
-rwxr-xr-xt/t0040-parse-options.sh7
-rwxr-xr-xt/t0050-filesystem.sh20
-rwxr-xr-xt/t0061-run-command.sh21
-rwxr-xr-xt/t0211-trace2-perf.sh4
-rwxr-xr-xt/t0212-trace2-event.sh19
-rwxr-xr-xt/t0410-partial-clone.sh93
-rwxr-xr-xt/t0500-progress-display.sh286
-rwxr-xr-xt/t1300-config.sh9
-rwxr-xr-xt/t1309-early-config.sh7
-rwxr-xr-xt/t1404-update-ref-errors.sh64
-rwxr-xr-xt/t1414-reflog-walk.sh3
-rwxr-xr-xt/t1450-fsck.sh16
-rwxr-xr-xt/t1506-rev-parse-diagnosis.sh13
-rwxr-xr-xt/t1507-rev-parse-upstream.sh12
-rwxr-xr-xt/t1600-index.sh31
-rwxr-xr-xt/t2022-checkout-paths.sh11
-rwxr-xr-xt/t2070-restore.sh11
-rwxr-xr-xt/t3005-ls-files-relative.sh12
-rwxr-xr-xt/t3030-merge-recursive.sh37
-rwxr-xr-xt/t3201-branch-contains.sh8
-rwxr-xr-xt/t3206-range-diff.sh271
-rw-r--r--t/t3206/history.export31
-rwxr-xr-xt/t3301-notes.sh140
-rwxr-xr-xt/t3305-notes-fanout.sh22
-rwxr-xr-xt/t3306-notes-prune.sh45
-rwxr-xr-xt/t3400-rebase.sh38
-rwxr-xr-xt/t3404-rebase-interactive.sh43
-rwxr-xr-xt/t3416-rebase-onto-threedots.sh57
-rwxr-xr-xt/t3418-rebase-continue.sh14
-rwxr-xr-xt/t3420-rebase-autostash.sh12
-rwxr-xr-xt/t3421-rebase-topology-linear.sh29
-rwxr-xr-xt/t3422-rebase-incompatible-options.sh10
-rwxr-xr-xt/t3427-rebase-subtree.sh160
-rwxr-xr-xt/t3429-rebase-edit-todo.sh21
-rwxr-xr-xt/t3430-rebase-merges.sh45
-rwxr-xr-xt/t3431-rebase-fork-point.sh57
-rwxr-xr-xt/t3432-rebase-fast-forward.sh125
-rwxr-xr-xt/t3506-cherry-pick-ff.sh8
-rwxr-xr-xt/t3600-rm.sh4
-rwxr-xr-xt/t3701-add-interactive.sh2
-rwxr-xr-xt/t3800-mktag.sh49
-rwxr-xr-xt/t3903-stash.sh48
-rwxr-xr-xt/t3908-stash-in-worktree.sh27
-rwxr-xr-xt/t4000-diff-format.sh2
-rwxr-xr-xt/t4002-diff-basic.sh367
-rwxr-xr-xt/t4009-diff-rename-4.sh19
-rwxr-xr-xt/t4013-diff-various.sh3
-rwxr-xr-xt/t4014-format-patch.sh815
-rwxr-xr-xt/t4018-diff-funcname.sh1
-rw-r--r--t/t4018/dts-labels9
-rw-r--r--t/t4018/dts-node-unitless8
-rw-r--r--t/t4018/dts-nodes8
-rw-r--r--t/t4018/dts-nodes-comment18
-rw-r--r--t/t4018/dts-nodes-comment28
-rw-r--r--t/t4018/dts-reference9
-rw-r--r--t/t4018/dts-root5
-rwxr-xr-xt/t4034-diff-words.sh1
-rw-r--r--t/t4034/dts/expect37
-rw-r--r--t/t4034/dts/post32
-rw-r--r--t/t4034/dts/pre32
-rwxr-xr-xt/t4038-diff-combined.sh2
-rwxr-xr-xt/t4067-diff-partial-clone.sh31
-rwxr-xr-xt/t4150-am.sh52
-rwxr-xr-xt/t4202-log.sh22
-rwxr-xr-xt/t4210-log-i18n.sh41
-rwxr-xr-xt/t4211-line-log.sh82
-rwxr-xr-xt/t4214-log-graph-octopus.sh329
-rwxr-xr-xt/t5004-archive-corner-cases.sh19
-rwxr-xr-xt/t5307-pack-missing-commit.sh4
-rwxr-xr-xt/t5318-commit-graph.sh93
-rwxr-xr-xt/t5324-split-commit-graph.sh4
-rwxr-xr-xt/t5500-fetch-pack.sh54
-rwxr-xr-xt/t5510-fetch.sh60
-rwxr-xr-xt/t5514-fetch-multiple.sh11
-rwxr-xr-xt/t5515-fetch-merge-logic.sh3
-rwxr-xr-xt/t5517-push-mirror.sh10
-rwxr-xr-xt/t5537-fetch-shallow.sh3
-rwxr-xr-xt/t5541-http-push-smart.sh6
-rwxr-xr-xt/t5545-push-options.sh3
-rwxr-xr-xt/t5552-skipping-fetch-negotiator.sh23
-rwxr-xr-xt/t5553-set-upstream.sh178
-rwxr-xr-xt/t5601-clone.sh7
-rwxr-xr-xt/t5607-clone-bundle.sh11
-rwxr-xr-xt/t5616-partial-clone.sh62
-rwxr-xr-xt/t5700-protocol-v1.sh3
-rwxr-xr-xt/t5702-protocol-v2.sh16
-rwxr-xr-xt/t5703-upload-pack-ref-in-want.sh204
-rwxr-xr-xt/t5801-remote-helpers.sh1
-rwxr-xr-xt/t6000-rev-list-misc.sh8
-rwxr-xr-xt/t6006-rev-list-format.sh5
-rwxr-xr-xt/t6011-rev-list-with-bad-commit.sh2
-rwxr-xr-xt/t6036-recursive-corner-cases.sh8
-rwxr-xr-xt/t6047-diff3-conflict-markers.sh202
-rwxr-xr-xt/t6112-rev-list-filters-objects.sh194
-rwxr-xr-xt/t6120-describe.sh15
-rwxr-xr-xt/t6300-for-each-ref.sh19
-rwxr-xr-xt/t6501-freshen-objects.sh6
-rwxr-xr-xt/t7008-filter-branch-null-sha1.sh (renamed from t/t7009-filter-branch-null-sha1.sh)0
-rwxr-xr-xt/t7300-clean.sh64
-rwxr-xr-xt/t7400-submodule-basic.sh2
-rwxr-xr-xt/t7406-submodule-update.sh3
-rwxr-xr-xt/t7503-pre-commit-and-pre-merge-commit-hooks.sh281
-rwxr-xr-xt/t7503-pre-commit-hook.sh139
-rwxr-xr-xt/t7505-prepare-commit-msg-hook.sh8
-rw-r--r--t/t7505/expected-rebase-i3
-rwxr-xr-xt/t7512-status-help.sh6
-rwxr-xr-xt/t7812-grep-icase-non-ascii.sh28
-rwxr-xr-xt/t7814-grep-recurse-submodules.sh21
-rwxr-xr-xt/t7815-grep-binary.sh (renamed from t/t7008-grep-binary.sh)101
-rwxr-xr-xt/t7816-grep-binary-pattern.sh127
-rwxr-xr-xt/t9300-fast-import.sh50
-rwxr-xr-xt/t9350-fast-export.sh68
-rwxr-xr-xt/t9902-completion.sh75
-rw-r--r--t/test-lib-functions.sh10
-rw-r--r--t/test-lib.sh43
-rw-r--r--tag.c7
-rw-r--r--tag.h1
-rwxr-xr-xtemplates/hooks--pre-merge-commit.sample13
-rw-r--r--trace.c2
-rw-r--r--trace2/tr2_dst.c120
-rw-r--r--trace2/tr2_dst.h1
-rw-r--r--trace2/tr2_sysenv.c3
-rw-r--r--trace2/tr2_sysenv.h2
-rw-r--r--trace2/tr2_tgt_event.c36
-rw-r--r--trace2/tr2_tgt_normal.c37
-rw-r--r--trace2/tr2_tgt_perf.c91
-rw-r--r--transport-helper.c48
-rw-r--r--transport-internal.h6
-rw-r--r--transport.c33
-rw-r--r--tree-walk.c64
-rw-r--r--tree-walk.h18
-rw-r--r--tree.c18
-rw-r--r--unpack-trees.c150
-rw-r--r--unpack-trees.h4
-rw-r--r--upload-pack.c15
-rw-r--r--url.c6
-rw-r--r--url.h8
-rw-r--r--userdiff.c8
-rw-r--r--wrapper.c84
-rw-r--r--wt-status.c6
-rw-r--r--wt-status.h2
-rw-r--r--xdiff/xdiffi.c99
403 files changed, 13416 insertions, 5042 deletions
diff --git a/.gitignore b/.gitignore
index 521d8f4fb4..89b3b79c1a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -216,6 +216,7 @@
/tags
/TAGS
/cscope*
+*.hcc
*.obj
*.lib
*.res
@@ -231,7 +232,6 @@
*.ipdb
*.dll
.vs/
-*.manifest
Debug/
Release/
/UpgradeLog*.htm
diff --git a/.mailmap b/.mailmap
index 9a5ff04753..14fa041043 100644
--- a/.mailmap
+++ b/.mailmap
@@ -18,6 +18,7 @@ Alexey Shumkin <alex.crezoff@gmail.com> <zapped@mail.ru>
Alexey Shumkin <alex.crezoff@gmail.com> <Alex.Crezoff@gmail.com>
Anders Kaseorg <andersk@MIT.EDU> <andersk@ksplice.com>
Anders Kaseorg <andersk@MIT.EDU> <andersk@mit.edu>
+Andrey Mazo <ahippo@yandex.com> Mazo, Andrey <amazo@checkvideo.com>
Aneesh Kumar K.V <aneesh.kumar@gmail.com>
Amos Waterland <apw@debian.org> <apw@rossby.metr.ou.edu>
Amos Waterland <apw@debian.org> <apw@us.ibm.com>
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..fc4645d5c0
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,93 @@
+# Git Code of Conduct
+
+This code of conduct outlines our expectations for participants within
+the Git community, as well as steps for reporting unacceptable behavior.
+We are committed to providing a welcoming and inspiring community for
+all and expect our code of conduct to be honored. Anyone who violates
+this code of conduct may be banned from the community.
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to make participation in our project and
+our community a harassment-free experience for everyone, regardless of age,
+body size, disability, ethnicity, sex characteristics, gender identity and
+expression, level of experience, education, socio-economic status,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies within all project spaces, and it also applies
+when an individual is representing the project or its community in public
+spaces. Examples of representing a project or community include using an
+official project e-mail address, posting via an official social media account,
+or acting as an appointed representative at an online or offline event.
+Representation of a project may be further defined and clarified by project
+maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at git@sfconservancy.org. All
+complaints will be reviewed and investigated and will result in a response
+that is deemed necessary and appropriate to the circumstances. The project
+team is obligated to maintain confidentiality with regard to the reporter of
+an incident. Further details of specific enforcement policies may be posted
+separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+The project leadership team can be contacted by email as a whole at
+git@sfconservancy.org, or individually:
+
+ - Ævar Arnfjörð Bjarmason <avarab@gmail.com>
+ - Christian Couder <christian.couder@gmail.com>
+ - Jeff King <peff@peff.net>
+ - Junio C Hamano <gitster@pobox.com>
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see
+https://www.contributor-covenant.org/faq
diff --git a/Documentation/Makefile b/Documentation/Makefile
index 76f2ecfc1b..06d85ad958 100644
--- a/Documentation/Makefile
+++ b/Documentation/Makefile
@@ -123,7 +123,8 @@ ASCIIDOC_HTML = xhtml11
ASCIIDOC_DOCBOOK = docbook
ASCIIDOC_CONF = -f asciidoc.conf
ASCIIDOC_COMMON = $(ASCIIDOC) $(ASCIIDOC_EXTRA) $(ASCIIDOC_CONF) \
- -agit_version=$(GIT_VERSION)
+ -amanversion=$(GIT_VERSION) \
+ -amanmanual='Git Manual' -amansource='Git'
TXT_TO_HTML = $(ASCIIDOC_COMMON) -b $(ASCIIDOC_HTML)
TXT_TO_XML = $(ASCIIDOC_COMMON) -b $(ASCIIDOC_DOCBOOK)
MANPAGE_XSL = manpage-normal.xsl
@@ -197,11 +198,13 @@ ifdef USE_ASCIIDOCTOR
ASCIIDOC = asciidoctor
ASCIIDOC_CONF =
ASCIIDOC_HTML = xhtml5
-ASCIIDOC_DOCBOOK = docbook45
+ASCIIDOC_DOCBOOK = docbook5
ASCIIDOC_EXTRA += -acompat-mode -atabsize=8
ASCIIDOC_EXTRA += -I. -rasciidoctor-extensions
ASCIIDOC_EXTRA += -alitdd='&\#x2d;&\#x2d;'
DBLATEX_COMMON =
+XMLTO_EXTRA += --skip-validation
+XMLTO_EXTRA += -x manpage.xsl
endif
SHELL_PATH ?= $(SHELL)
diff --git a/Documentation/MyFirstContribution.txt b/Documentation/MyFirstContribution.txt
index f8670379c0..5e9b808f5f 100644
--- a/Documentation/MyFirstContribution.txt
+++ b/Documentation/MyFirstContribution.txt
@@ -97,8 +97,8 @@ int cmd_psuh(int argc, const char **argv, const char *prefix)
----
We'll also need to add the declaration of psuh; open up `builtin.h`, find the
-declaration for `cmd_push`, and add a new line for `psuh` immediately before it,
-in order to keep the declarations sorted:
+declaration for `cmd_pull`, and add a new line for `psuh` immediately before it,
+in order to keep the declarations alphabetically sorted:
----
int cmd_psuh(int argc, const char **argv, const char *prefix);
@@ -123,7 +123,7 @@ int cmd_psuh(int argc, const char **argv, const char *prefix)
}
----
-Let's try to build it. Open `Makefile`, find where `builtin/push.o` is added
+Let's try to build it. Open `Makefile`, find where `builtin/pull.o` is added
to `BUILTIN_OBJS`, and add `builtin/psuh.o` in the same way next to it in
alphabetical order. Once you've done so, move to the top-level directory and
build simply with `make`. Also add the `DEVELOPER=1` variable to turn on
@@ -149,7 +149,7 @@ a `cmd_struct` to the `commands[]` array. `struct cmd_struct` takes a string
with the command name, a function pointer to the command implementation, and a
setup option flag. For now, let's keep mimicking `push`. Find the line where
`cmd_push` is registered, copy it, and modify it for `cmd_psuh`, placing the new
-line in alphabetical order.
+line in alphabetical order (immediately before `cmd_pull`).
The options are documented in `builtin.h` under "Adding a new built-in." Since
we hope to print some data about the user's current workspace context later,
@@ -167,7 +167,7 @@ Check it out! You've got a command! Nice work! Let's commit this.
`git status` reveals modified `Makefile`, `builtin.h`, and `git.c` as well as
untracked `builtin/psuh.c` and `git-psuh`. First, let's take care of the binary,
-which should be ignored. Open `.gitignore` in your editor, find `/git-push`, and
+which should be ignored. Open `.gitignore` in your editor, find `/git-pull`, and
add an entry for your new command in alphabetical order:
----
diff --git a/Documentation/RelNotes/2.24.0.txt b/Documentation/RelNotes/2.24.0.txt
new file mode 100644
index 0000000000..22de0bc497
--- /dev/null
+++ b/Documentation/RelNotes/2.24.0.txt
@@ -0,0 +1,370 @@
+Git 2.24 Release Notes
+======================
+
+Updates since v2.23
+-------------------
+
+Backward compatibility note
+
+ * Although it is not officially deprecated, "filter-branch" is
+ showing its age and alternatives are available. From this release,
+ we started to discourage its uses and hint people about
+ filter-repo.
+
+UI, Workflows & Features
+
+ * We now have an active interim maintainer for the Git-Gui part of
+ the system. Praise and thank Pratyush Yadav for volunteering.
+
+ * The command line parser learned "--end-of-options" notation; the
+ standard convention for scripters to have hardcoded set of options
+ first on the command line, and force the command to treat end-user
+ input as non-options, has been to use "--" as the delimiter, but
+ that would not work for commands that use "--" as a delimiter
+ between revs and pathspec.
+
+ * A mechanism to affect the default setting for a (related) group of
+ configuration variables is introduced.
+
+ * "git fetch" learned "--set-upstream" option to help those who first
+ clone from their private fork they intend to push to, add the true
+ upstream via "git remote add" and then "git fetch" from it.
+
+ * Device-tree files learned their own userdiff patterns.
+ (merge 3c81760bc6 sb/userdiff-dts later to maint).
+
+ * "git rebase --rebase-merges" learned to drive different merge
+ strategies and pass strategy specific options to them.
+
+ * A new "pre-merge-commit" hook has been introduced.
+
+ * Command line completion updates for "git -c var.name=val" have been
+ added.
+
+ * The lazy clone machinery has been taught that there can be more
+ than one promisor remote and consult them in order when downloading
+ missing objects on demand.
+
+ * The list-objects-filter API (used to create a sparse/lazy clone)
+ learned to take a combined filter specification.
+
+ * The documentation and tests for "git format-patch" have been
+ cleaned up.
+
+ * On Windows, the root level of UNC share is now allowed to be used
+ just like any other directory.
+
+ * The command line completion support (in contrib/) learned about the
+ "--skip" option of "git revert" and "git cherry-pick".
+
+ * "git rebase --keep-base <upstream>" tries to find the original base
+ of the topic being rebased and rebase on top of that same base,
+ which is useful when running the "git rebase -i" (and its limited
+ variant "git rebase -x").
+
+ The command also has learned to fast-forward in more cases where it
+ can instead of replaying to recreate identical commits.
+
+ * A configuration variable tells "git fetch" to write the commit
+ graph after finishing.
+
+ * "git add -i" has been taught to show the total number of hunks and
+ the hunks that has been processed so far when showing prompts.
+
+ * "git fetch --jobs=<n>" allowed <n> parallel jobs when fetching
+ submodules, but this did not apply to "git fetch --multiple" that
+ fetches from multiple remote repositories. It now does.
+
+
+Performance, Internal Implementation, Development Support etc.
+
+ * The code to write commit-graph over given commit object names has
+ been made a bit more robust.
+
+ * The first line of verbose output from each test piece now carries
+ the test name and number to help scanning with eyeballs.
+
+ * Further clean-up of the initialization code.
+
+ * xmalloc() used to have a mechanism to ditch memory and address
+ space resources as the last resort upon seeing an allocation
+ failure from the underlying malloc(), which made the code complex
+ and thread-unsafe with dubious benefit, as major memory resource
+ users already do limit their uses with various other mechanisms.
+ It has been simplified away.
+
+ * Unnecessary full-tree diff in "git log -L" machinery has been
+ optimized away.
+
+ * The http transport lacked some optimization the native transports
+ learned to avoid unnecessary ref advertisement, which has been
+ corrected.
+
+ * Preparation for SHA-256 upgrade continues in the test department.
+ (merge 0c37c41d13 bc/hash-independent-tests-part-5 later to maint).
+
+ * The memory ownership model of the "git fast-import" got
+ straightened out.
+
+ * Output from trace2 subsystem is formatted more prettily now.
+
+ * The internal code originally invented for ".gitignore" processing
+ got reshuffled and renamed to make it less tied to "excluding" and
+ stress more that it is about "matching", as it has been reused for
+ things like sparse checkout specification that want to check if a
+ path is "included".
+
+ * "git stash" learned to write refreshed index back to disk.
+
+ * Coccinelle checks are done on more source files than before now.
+
+ * The cache-tree code has been taught to be less aggressive in
+ attempting to see if a tree object it computed already exists in
+ the repository.
+
+ * The code to parse and use the commit-graph file has been made more
+ robust against corrupted input.
+
+ * The hg-to-git script (in contrib/) has been updated to work with
+ Python 3.
+
+ * Update the way build artifacts in t/helper/ directory are ignored.
+
+ * Preparation for SHA-256 upgrade continues.
+
+ * "git log --graph" for an octopus merge is sometimes colored
+ incorrectly, which is demonstrated and documented but not yet
+ fixed.
+
+ * The trace2 output, when sending them to files in a designated
+ directory, can populate the directory with too many files; a
+ mechanism is introduced to set the maximum number of files and
+ discard further logs when the maximum is reached.
+
+ * We have adopted a Code-of-conduct document.
+ (merge 3f9ef874a7 jk/coc later to maint).
+
+
+Fixes since v2.23
+-----------------
+
+ * "git grep --recurse-submodules" that looks at the working tree
+ files looked at the contents in the index in submodules, instead of
+ files in the working tree.
+ (merge 6a289d45c0 mt/grep-submodules-working-tree later to maint).
+
+ * Codepaths to walk tree objects have been audited for integer
+ overflows and hardened.
+ (merge 5aa02f9868 jk/tree-walk-overflow later to maint).
+
+ * "git pack-refs" can lose refs that are created while running, which
+ is getting corrected.
+ (merge a613d4f817 sc/pack-refs-deletion-racefix later to maint).
+
+ * "git checkout" and "git restore" to re-populate the index from a
+ tree-ish (typically HEAD) did not work correctly for a path that
+ was removed and then added again with the intent-to-add bit, when
+ the corresponding working tree file was empty. This has been
+ corrected.
+
+ * Compilation fix.
+ (merge 70597e8386 rs/nedalloc-fixlets later to maint).
+
+ * "git gui" learned to call the clean-up procedure before exiting.
+ (merge 0d88f3d2c5 py/git-gui-do-quit later to maint).
+
+ * We promoted the "indent heuristics" that decides where to split
+ diff hunks from experimental to the default a few years ago, but
+ some stale documentation still marked it as experimental, which has
+ been corrected.
+ (merge 64e5e1fba1 sg/diff-indent-heuristic-non-experimental later to maint).
+
+ * Fix a mismerge that happened in 2.22 timeframe.
+ (merge acb7da05ac en/checkout-mismerge-fix later to maint).
+
+ * "git archive" recorded incorrect length in extended pax header in
+ some corner cases, which has been corrected.
+ (merge 71d41ff651 rs/pax-extended-header-length-fix later to maint).
+
+ * On-demand object fetching in lazy clone incorrectly tried to fetch
+ commits from submodule projects, while still working in the
+ superproject, which has been corrected.
+ (merge a63694f523 jt/diff-lazy-fetch-submodule-fix later to maint).
+
+ * Prepare get_short_oid() codepath to be thread-safe.
+ (merge 7cfcb16b0e rs/sort-oid-array-thread-safe later to maint).
+
+ * "for-each-ref" and friends that show refs did not protect themselves
+ against ancient tags that did not record tagger names when asked to
+ show "%(taggername)", which have been corrected.
+ (merge 8b3f33ef11 mp/for-each-ref-missing-name-or-email later to maint).
+
+ * The "git am" based backend of "git rebase" ignored the result of
+ updating ".gitattributes" done in one step when replaying
+ subsequent steps.
+ (merge 2c65d90f75 bc/reread-attributes-during-rebase later to maint).
+
+ * Tell cURL library to use the same malloc() implementation, with the
+ xmalloc() wrapper, as the rest of the system, for consistency.
+ (merge 93b980e58f cb/curl-use-xmalloc later to maint).
+
+ * Build fix to adjust .gitignore to unignore a path that we started to track.
+ (merge aac6ff7b5b js/visual-studio later to maint).
+
+ * A few implementation fixes in the notes API.
+ (merge 60fe477a0b mh/notes-duplicate-entries later to maint).
+
+ * Fix an earlier regression to "git push --all" which should have
+ been forbidden when the target remote repository is set to be a
+ mirror.
+ (merge 8e4c8af058 tg/push-all-in-mirror-forbidden later to maint).
+
+ * Fix an earlier regression in the test suite, which mistakenly
+ stopped running HTTPD tests.
+ (merge 3960290675 sg/git-test-boolean later to maint).
+
+ * "git rebase --autostash <upstream> <branch>", when <branch> is
+ different from the current branch, incorrectly moved the tip of the
+ current branch, which has been corrected.
+ (merge bf1e28e0ad bw/rebase-autostash-keep-current-branch later to maint).
+
+ * Update support for Asciidoctor documentation toolchain.
+ (merge 83b0b8953e ma/asciidoctor-refmiscinfo later to maint).
+
+ * Start using DocBook 5 (instead of DocBook 4.5) as Asciidoctor 2.0
+ no longer works with the older one.
+ (merge f6461b82b9 bc/doc-use-docbook-5 later to maint).
+
+ * The markup used in user-manual has been updated to work better with
+ asciidoctor.
+ (merge c4d2f6143a ma/user-manual-markup-update later to maint).
+
+ * Make sure the grep machinery does not abort when seeing a payload
+ that is not UTF-8 even when JIT is not in use with PCRE1.
+ (merge ad7c543e3b cb/skip-utf8-check-with-pcre1 later to maint).
+
+ * The name of the blob object that stores the filter specification
+ for sparse cloning/fetching was interpreted in a wrong place in the
+ code, causing Git to abort.
+
+ * "git log --decorate-refs-exclude=<pattern>" was incorrectly
+ overruled when the "--simplify-by-decoration" option is used, which
+ has been corrected.
+ (merge 0cc7380d88 rs/simplify-by-deco-with-deco-refs-exclude later to maint).
+
+ * The "upload-pack" (the counterpart of "git fetch") needs to disable
+ commit-graph when responding to a shallow clone/fetch request, but
+ the way this was done made Git panic, which has been corrected.
+
+ * The object traversal machinery has been optimized not to load tree
+ objects when we are only interested in commit history.
+ (merge 72ed80c784 jk/list-objects-optim-wo-trees later to maint).
+
+ * The object name parser for "Nth parent" syntax has been made more
+ robust against integer overflows.
+ (merge 59fa5f5a25 rs/nth-parent-parse later to maint).
+
+ * The code used in following tags in "git fetch" has been optimized.
+ (merge b7e2d8bca5 ms/fetch-follow-tag-optim later to maint).
+
+ * Regression fix for progress output.
+ (merge 2bb74b53a4 sg/progress-fix later to maint).
+
+ * A bug in merge-recursive code that triggers when a branch with a
+ symbolic link is merged with a branch that replaces it with a
+ directory has been fixed.
+ (merge 83e3ad3b12 jt/merge-recursive-symlink-is-not-a-dir-in-way later to maint).
+
+ * The rename detection logic sorts a list of rename source candidates
+ by similarity to pick the best candidate, which means that a tie
+ between sources with the same similarity is broken by the original
+ location in the original candidate list (which is sorted by path).
+ Force the sorting by similarity done with a stable sort, which is
+ not promised by system supplied qsort(3), to ensure consistent
+ results across platforms.
+ (merge 2049b8dc65 js/diff-rename-force-stable-sort later to maint).
+
+ * The code to skip "UTF" and "UTF-" prefix, when computing an advice
+ message, did not work correctly when the prefix was "UTF", which
+ has been fixed.
+ (merge b181676ce9 rs/convert-fix-utf-without-dash later to maint).
+
+ * The author names taken from SVN repositories may have extra leading
+ or trailing whitespaces, which are now munged away.
+ (merge 4ddd4bddb1 tk/git-svn-trim-author-name later to maint).
+
+ * "git rebase -i" showed a wrong HEAD while "reword" open the editor.
+ (merge b0a3186140 pw/rebase-i-show-HEAD-to-reword later to maint).
+
+ * A few simplification and bugfixes to PCRE interface.
+ (merge c581e4a749 ab/pcre-jit-fixes later to maint).
+
+ * PCRE fixes.
+ (merge ff61681b46 cb/pcre1-cleanup later to maint).
+
+ * "git range-diff" segfaulted when diff.noprefix configuration was
+ used, as it blindly expected the patch it internally generates to
+ have the standard a/ and b/ prefixes. The command now forces the
+ internal patch to be built without any prefix, not to be affected
+ by any end-user configuration.
+ (merge 937b76ed49 js/range-diff-noprefix later to maint).
+
+ * "git stash apply" in a subdirectory of a secondary worktree failed
+ to access the worktree correctly, which has been corrected.
+ (merge dfd557c978 js/stash-apply-in-secondary-worktree later to maint).
+
+ * The merge-recursive machiery is one of the most complex parts of
+ the system that accumulated cruft over time. This large series
+ cleans up the implementation quite a bit.
+ (merge b657047719 en/merge-recursive-cleanup later to maint).
+
+ * Pretty-printed command line formatter (used in e.g. reporting the
+ command being run by the tracing API) had a bug that lost an
+ argument that is an empty string, which has been corrected.
+ (merge ce2d7ed2fd gs/sq-quote-buf-pretty later to maint).
+
+ * "git range-diff" failed to handle mode-only change, which has been
+ corrected.
+ (merge 2b6a9b13ca tg/range-diff-output-update later to maint).
+
+ * Other code cleanup, docfix, build fix, etc.
+ (merge d1387d3895 en/fast-import-merge-doc later to maint).
+ (merge 1c24a54ea4 bm/repository-layout-typofix later to maint).
+ (merge 415b770b88 ds/midx-expire-repack later to maint).
+ (merge 19800bdc3f nd/diff-parseopt later to maint).
+ (merge 58166c2e9d tg/t0021-racefix later to maint).
+ (merge 7027f508c7 dl/compat-cleanup later to maint).
+ (merge e770fbfeff jc/test-cleanup later to maint).
+ (merge 1fd881d404 rs/trace2-dst-warning later to maint).
+ (merge 7e92756751 mh/http-urlmatch-cleanup later to maint).
+ (merge 9784f97321 mh/release-commit-memory-fix later to maint).
+ (merge 60d198d022 tb/banned-vsprintf-namefix later to maint).
+ (merge 80e3658647 rs/help-unknown-ref-does-not-return later to maint).
+ (merge 0a8bc7068f dt/remote-helper-doc-re-lock-option later to maint).
+ (merge 27fd1e4ea7 en/merge-options-ff-and-friends later to maint).
+ (merge 502c386ff9 sg/clean-nested-repo-with-ignored later to maint).
+ (merge 26e3d1cbea am/mailmap-andrey-mazo later to maint).
+ (merge 47b27c96fa ss/get-time-cleanup later to maint).
+ (merge dd2e50a84e jk/commit-graph-cleanup later to maint).
+ (merge 4fd39c76e6 cs/pretty-formats-doc-typofix later to maint).
+ (merge 40e747e89d dl/submodule-set-branch later to maint).
+ (merge 689a146c91 rs/commit-graph-use-list-count later to maint).
+ (merge 0eb7c37a8a js/doc-patch-text later to maint).
+ (merge 4b3aa170d1 rs/nth-switch-code-simplification later to maint).
+ (merge 0d4304c124 ah/doc-submodule-ignore-submodules later to maint).
+ (merge af78249463 cc/svn-fe-py-shebang later to maint).
+ (merge 7bd97d6dff rs/alias-use-copy-array later to maint).
+ (merge c46ebc2496 sg/travis-help-debug later to maint).
+ (merge 24c681794f ps/my-first-contribution-alphasort later to maint).
+ (merge 75b2c15435 cb/do-not-use-test-cmp-with-a later to maint).
+ (merge cda0d497e3 bw/submodule-helper-usage-fix later to maint).
+ (merge fe0ed5d5e9 am/visual-studio-config-fix later to maint).
+ (merge 2e09c01232 sg/name-rev-cutoff-underflow-fix later to maint).
+ (merge ddb3c856f3 as/shallow-slab-use-fix later to maint).
+ (merge 71f4960b91 js/mingw-spawn-with-spaces-in-path later to maint).
+ (merge 53d687bf5f ah/cleanups later to maint).
+ (merge f537485fa5 rs/test-remove-useless-debugging-cat later to maint).
+ (merge 11a3d3aadd dl/rev-list-doc-cleanup later to maint).
+ (merge d928a8388a am/t0028-utf16-tests later to maint).
+ (merge b05b40930e dl/t0000-skip-test-test later to maint).
+ (merge 03d3b1297c js/xdiffi-comment-updates later to maint).
diff --git a/Documentation/RelNotes/2.7.1.txt b/Documentation/RelNotes/2.7.1.txt
index 6553d69e33..6323feaf64 100644
--- a/Documentation/RelNotes/2.7.1.txt
+++ b/Documentation/RelNotes/2.7.1.txt
@@ -10,7 +10,7 @@ Fixes since v2.7
setting GIT_WORK_TREE environment themselves.
* The "exclude_list" structure has the usual "alloc, nr" pair of
- fields to be used by ALLOC_GROW(), but clear_exclude_list() forgot
+ fields to be used by ALLOC_GROW(), but clear_pattern_list() forgot
to reset 'alloc' to 0 when it cleared 'nr' to discard the managed
array.
diff --git a/Documentation/RelNotes/2.8.0.txt b/Documentation/RelNotes/2.8.0.txt
index 25079710fa..5fbe1b86ee 100644
--- a/Documentation/RelNotes/2.8.0.txt
+++ b/Documentation/RelNotes/2.8.0.txt
@@ -270,7 +270,7 @@ notes for details).
setting GIT_WORK_TREE environment themselves.
* The "exclude_list" structure has the usual "alloc, nr" pair of
- fields to be used by ALLOC_GROW(), but clear_exclude_list() forgot
+ fields to be used by ALLOC_GROW(), but clear_pattern_list() forgot
to reset 'alloc' to 0 when it cleared 'nr' to discard the managed
array.
diff --git a/Documentation/SubmittingPatches b/Documentation/SubmittingPatches
index 6d589e118c..1a60cc1329 100644
--- a/Documentation/SubmittingPatches
+++ b/Documentation/SubmittingPatches
@@ -372,9 +372,9 @@ such as "Thanks-to:", "Based-on-patch-by:", or "Mentored-by:".
Some parts of the system have dedicated maintainers with their own
repositories.
-- `git-gui/` comes from git-gui project, maintained by Pat Thoyts:
+- `git-gui/` comes from git-gui project, maintained by Pratyush Yadav:
- git://repo.or.cz/git-gui.git
+ https://github.com/prati0100/git-gui.git
- `gitk-git/` comes from Paul Mackerras's gitk project:
diff --git a/Documentation/asciidoc.conf b/Documentation/asciidoc.conf
index 2c16c536ba..8fc4b67081 100644
--- a/Documentation/asciidoc.conf
+++ b/Documentation/asciidoc.conf
@@ -78,9 +78,9 @@ template::[header-declarations]
<refmeta>
<refentrytitle>{mantitle}</refentrytitle>
<manvolnum>{manvolnum}</manvolnum>
-<refmiscinfo class="source">Git</refmiscinfo>
-<refmiscinfo class="version">{git_version}</refmiscinfo>
-<refmiscinfo class="manual">Git Manual</refmiscinfo>
+<refmiscinfo class="source">{mansource}</refmiscinfo>
+<refmiscinfo class="version">{manversion}</refmiscinfo>
+<refmiscinfo class="manual">{manmanual}</refmiscinfo>
</refmeta>
<refnamediv>
<refname>{manname}</refname>
diff --git a/Documentation/asciidoctor-extensions.rb b/Documentation/asciidoctor-extensions.rb
index 0089e0cfb8..d906a00803 100644
--- a/Documentation/asciidoctor-extensions.rb
+++ b/Documentation/asciidoctor-extensions.rb
@@ -9,8 +9,11 @@ module Git
named :chrome
def process(parent, target, attrs)
- if parent.document.basebackend? 'html'
- prefix = parent.document.attr('git-relative-html-prefix')
+ prefix = parent.document.attr('git-relative-html-prefix')
+ if parent.document.doctype == 'book'
+ "<ulink url=\"#{prefix}#{target}.html\">" \
+ "#{target}(#{attrs[1]})</ulink>"
+ elsif parent.document.basebackend? 'html'
%(<a href="#{prefix}#{target}.html">#{target}(#{attrs[1]})</a>)
elsif parent.document.basebackend? 'docbook'
"<citerefentry>\n" \
@@ -20,9 +23,26 @@ module Git
end
end
end
+
+ class DocumentPostProcessor < Asciidoctor::Extensions::Postprocessor
+ def process document, output
+ if document.basebackend? 'docbook'
+ mansource = document.attributes['mansource']
+ manversion = document.attributes['manversion']
+ manmanual = document.attributes['manmanual']
+ new_tags = "" \
+ "<refmiscinfo class=\"source\">#{mansource}</refmiscinfo>\n" \
+ "<refmiscinfo class=\"version\">#{manversion}</refmiscinfo>\n" \
+ "<refmiscinfo class=\"manual\">#{manmanual}</refmiscinfo>\n"
+ output = output.sub(/<\/refmeta>/, new_tags + "</refmeta>")
+ end
+ output
+ end
+ end
end
end
Asciidoctor::Extensions.register do
inline_macro Git::Documentation::LinkGitProcessor, :linkgit
+ postprocessor Git::Documentation::DocumentPostProcessor
end
diff --git a/Documentation/config.txt b/Documentation/config.txt
index e3f5bc3396..f50f1b4128 100644
--- a/Documentation/config.txt
+++ b/Documentation/config.txt
@@ -178,47 +178,49 @@ to either specify only the realpath version, or both versions.
Example
~~~~~~~
- # Core variables
- [core]
- ; Don't trust file modes
- filemode = false
-
- # Our diff algorithm
- [diff]
- external = /usr/local/bin/diff-wrapper
- renames = true
-
- [branch "devel"]
- remote = origin
- merge = refs/heads/devel
-
- # Proxy settings
- [core]
- gitProxy="ssh" for "kernel.org"
- gitProxy=default-proxy ; for the rest
-
- [include]
- path = /path/to/foo.inc ; include by absolute path
- path = foo.inc ; find "foo.inc" relative to the current file
- path = ~/foo.inc ; find "foo.inc" in your `$HOME` directory
-
- ; include if $GIT_DIR is /path/to/foo/.git
- [includeIf "gitdir:/path/to/foo/.git"]
- path = /path/to/foo.inc
-
- ; include for all repositories inside /path/to/group
- [includeIf "gitdir:/path/to/group/"]
- path = /path/to/foo.inc
-
- ; include for all repositories inside $HOME/to/group
- [includeIf "gitdir:~/to/group/"]
- path = /path/to/foo.inc
-
- ; relative paths are always relative to the including
- ; file (if the condition is true); their location is not
- ; affected by the condition
- [includeIf "gitdir:/path/to/group/"]
- path = foo.inc
+----
+# Core variables
+[core]
+ ; Don't trust file modes
+ filemode = false
+
+# Our diff algorithm
+[diff]
+ external = /usr/local/bin/diff-wrapper
+ renames = true
+
+[branch "devel"]
+ remote = origin
+ merge = refs/heads/devel
+
+# Proxy settings
+[core]
+ gitProxy="ssh" for "kernel.org"
+ gitProxy=default-proxy ; for the rest
+
+[include]
+ path = /path/to/foo.inc ; include by absolute path
+ path = foo.inc ; find "foo.inc" relative to the current file
+ path = ~/foo.inc ; find "foo.inc" in your `$HOME` directory
+
+; include if $GIT_DIR is /path/to/foo/.git
+[includeIf "gitdir:/path/to/foo/.git"]
+ path = /path/to/foo.inc
+
+; include for all repositories inside /path/to/group
+[includeIf "gitdir:/path/to/group/"]
+ path = /path/to/foo.inc
+
+; include for all repositories inside $HOME/to/group
+[includeIf "gitdir:~/to/group/"]
+ path = /path/to/foo.inc
+
+; relative paths are always relative to the including
+; file (if the condition is true); their location is not
+; affected by the condition
+[includeIf "gitdir:/path/to/group/"]
+ path = foo.inc
+----
; include only if we are in a worktree where foo-branch is
; currently checked out
@@ -345,6 +347,8 @@ include::config/difftool.txt[]
include::config/fastimport.txt[]
+include::config/feature.txt[]
+
include::config/fetch.txt[]
include::config/format.txt[]
diff --git a/Documentation/config/core.txt b/Documentation/config/core.txt
index 75538d27e7..852d2ba37a 100644
--- a/Documentation/config/core.txt
+++ b/Documentation/config/core.txt
@@ -86,7 +86,9 @@ core.untrackedCache::
it will automatically be removed, if set to `false`. Before
setting it to `true`, you should check that mtime is working
properly on your system.
- See linkgit:git-update-index[1]. `keep` by default.
+ See linkgit:git-update-index[1]. `keep` by default, unless
+ `feature.manyFiles` is enabled which sets this setting to
+ `true` by default.
core.checkStat::
When missing or is set to `default`, many fields in the stat
@@ -577,7 +579,7 @@ the `GIT_NOTES_REF` environment variable. See linkgit:git-notes[1].
core.commitGraph::
If true, then git will read the commit-graph file (if it exists)
- to parse the graph structure of commits. Defaults to false. See
+ to parse the graph structure of commits. Defaults to true. See
linkgit:git-commit-graph[1] for more information.
core.useReplaceRefs::
diff --git a/Documentation/config/diff.txt b/Documentation/config/diff.txt
index 5afb5a2cbc..ff09f1cf73 100644
--- a/Documentation/config/diff.txt
+++ b/Documentation/config/diff.txt
@@ -189,7 +189,7 @@ diff.guitool::
include::../mergetools-diff.txt[]
diff.indentHeuristic::
- Set this option to `true` to enable experimental heuristics
+ Set this option to `false` to disable the default heuristics
that shift diff hunk boundaries to make patches easier to read.
diff.algorithm::
diff --git a/Documentation/config/feature.txt b/Documentation/config/feature.txt
new file mode 100644
index 0000000000..875f8c8a66
--- /dev/null
+++ b/Documentation/config/feature.txt
@@ -0,0 +1,37 @@
+feature.*::
+ The config settings that start with `feature.` modify the defaults of
+ a group of other config settings. These groups are created by the Git
+ developer community as recommended defaults and are subject to change.
+ In particular, new config options may be added with different defaults.
+
+feature.experimental::
+ Enable config options that are new to Git, and are being considered for
+ future defaults. Config settings included here may be added or removed
+ with each release, including minor version updates. These settings may
+ have unintended interactions since they are so new. Please enable this
+ setting if you are interested in providing feedback on experimental
+ features. The new default values are:
++
+* `pack.useSparse=true` uses a new algorithm when constructing a pack-file
+which can improve `git push` performance in repos with many files.
++
+* `fetch.negotiationAlgorithm=skipping` may improve fetch negotiation times by
+skipping more commits at a time, reducing the number of round trips.
++
+* `fetch.writeCommitGraph=true` writes a commit-graph after every `git fetch`
+command that downloads a pack-file from a remote. Using the `--split` option,
+most executions will create a very small commit-graph file on top of the
+existing commit-graph file(s). Occasionally, these files will merge and the
+write may take longer. Having an updated commit-graph file helps performance
+of many Git commands, including `git merge-base`, `git push -f`, and
+`git log --graph`.
+
+feature.manyFiles::
+ Enable config options that optimize for repos with many files in the
+ working directory. With many files, commands such as `git status` and
+ `git checkout` may be slow and these new defaults improve performance:
++
+* `index.version=4` enables path-prefix compression in the index.
++
+* `core.untrackedCache=true` enables the untracked cache. This setting assumes
+that mtime is working on your machine.
diff --git a/Documentation/config/fetch.txt b/Documentation/config/fetch.txt
index ba890b5884..f11940280f 100644
--- a/Documentation/config/fetch.txt
+++ b/Documentation/config/fetch.txt
@@ -59,7 +59,8 @@ fetch.negotiationAlgorithm::
effort to converge faster, but may result in a larger-than-necessary
packfile; The default is "default" which instructs Git to use the default algorithm
that never skips commits (unless the server has acknowledged it or one
- of its descendants).
+ of its descendants). If `feature.experimental` is enabled, then this
+ setting defaults to "skipping".
Unknown values will cause 'git fetch' to error out.
+
See also the `--negotiation-tip` option for linkgit:git-fetch[1].
@@ -68,3 +69,23 @@ fetch.showForcedUpdates::
Set to false to enable `--no-show-forced-updates` in
linkgit:git-fetch[1] and linkgit:git-pull[1] commands.
Defaults to true.
+
+fetch.parallel::
+ Specifies the maximal number of fetch operations to be run in parallel
+ at a time (submodules, or remotes when the `--multiple` option of
+ linkgit:git-fetch[1] is in effect).
++
+A value of 0 will give some reasonable default. If unset, it defaults to 1.
++
+For submodules, this setting can be overridden using the `submodule.fetchJobs`
+config setting.
+
+fetch.writeCommitGraph::
+ Set to true to write a commit-graph after every `git fetch` command
+ that downloads a pack-file from a remote. Using the `--split` option,
+ most executions will create a very small commit-graph file on top of
+ the existing commit-graph file(s). Occasionally, these files will
+ merge and the write may take longer. Having an updated commit-graph
+ file helps performance of many Git commands, including `git merge-base`,
+ `git push -f`, and `git log --graph`. Defaults to false, unless
+ `feature.experimental` is true.
diff --git a/Documentation/config/format.txt b/Documentation/config/format.txt
index 414a5a8a9d..cb629fa769 100644
--- a/Documentation/config/format.txt
+++ b/Documentation/config/format.txt
@@ -77,6 +77,7 @@ format.coverLetter::
A boolean that controls whether to generate a cover-letter when
format-patch is invoked, but in addition can be set to "auto", to
generate a cover-letter only when there's more than one patch.
+ Default is false.
format.outputDirectory::
Set a custom directory to store the resulting files instead of the
diff --git a/Documentation/config/gc.txt b/Documentation/config/gc.txt
index 02b92b18b5..00ea0a678e 100644
--- a/Documentation/config/gc.txt
+++ b/Documentation/config/gc.txt
@@ -63,7 +63,7 @@ gc.writeCommitGraph::
If true, then gc will rewrite the commit-graph file when
linkgit:git-gc[1] is run. When using `git gc --auto`
the commit-graph will be updated if housekeeping is
- required. Default is false. See linkgit:git-commit-graph[1]
+ required. Default is true. See linkgit:git-commit-graph[1]
for details.
gc.logExpiry::
diff --git a/Documentation/config/index.txt b/Documentation/config/index.txt
index f181503041..7cb50b37e9 100644
--- a/Documentation/config/index.txt
+++ b/Documentation/config/index.txt
@@ -24,3 +24,4 @@ index.threads::
index.version::
Specify the version with which new index files should be
initialized. This does not affect existing repositories.
+ If `feature.manyFiles` is enabled, then the default is 4.
diff --git a/Documentation/config/pack.txt b/Documentation/config/pack.txt
index 9cdcfa7324..1d66f0c992 100644
--- a/Documentation/config/pack.txt
+++ b/Documentation/config/pack.txt
@@ -112,7 +112,8 @@ pack.useSparse::
objects. This can have significant performance benefits when
computing a pack to send a small change. However, it is possible
that extra objects are added to the pack-file if the included
- commits contain certain types of direct renames.
+ commits contain certain types of direct renames. Default is `false`
+ unless `feature.experimental` is enabled.
pack.writeBitmaps (deprecated)::
This is a deprecated synonym for `repack.writeBitmaps`.
diff --git a/Documentation/config/remote.txt b/Documentation/config/remote.txt
index 6c4cad83a2..a8e6437a90 100644
--- a/Documentation/config/remote.txt
+++ b/Documentation/config/remote.txt
@@ -76,3 +76,11 @@ remote.<name>.pruneTags::
+
See also `remote.<name>.prune` and the PRUNING section of
linkgit:git-fetch[1].
+
+remote.<name>.promisor::
+ When set to true, this remote will be used to fetch promisor
+ objects.
+
+remote.<name>.partialclonefilter::
+ The filter that will be applied when fetching from this
+ promisor remote.
diff --git a/Documentation/config/trace2.txt b/Documentation/config/trace2.txt
index 2edbfb02fe..4ce0b9a6d1 100644
--- a/Documentation/config/trace2.txt
+++ b/Documentation/config/trace2.txt
@@ -54,3 +54,9 @@ trace2.destinationDebug::
By default, these errors are suppressed and tracing is
silently disabled. May be overridden by the
`GIT_TRACE2_DST_DEBUG` environment variable.
+
+trace2.maxFiles::
+ Integer. When writing trace files to a target directory, do not
+ write additional traces if we would exceed this many files. Instead,
+ write a sentinel file that will block further tracing to this
+ directory. Defaults to 0, which disables this check.
diff --git a/Documentation/diff-generate-patch.txt b/Documentation/diff-generate-patch.txt
index f10ca410ad..e8ed6470fb 100644
--- a/Documentation/diff-generate-patch.txt
+++ b/Documentation/diff-generate-patch.txt
@@ -1,11 +1,15 @@
-Generating patches with -p
---------------------------
-
-When "git-diff-index", "git-diff-tree", or "git-diff-files" are run
-with a `-p` option, "git diff" without the `--raw` option, or
-"git log" with the "-p" option, they
-do not produce the output described above; instead they produce a
-patch file. You can customize the creation of such patches via the
+Generating patch text with -p
+-----------------------------
+
+Running
+linkgit:git-diff[1],
+linkgit:git-log[1],
+linkgit:git-show[1],
+linkgit:git-diff-index[1],
+linkgit:git-diff-tree[1], or
+linkgit:git-diff-files[1]
+with the `-p` option produces patch text.
+You can customize the creation of patch text via the
`GIT_EXTERNAL_DIFF` and the `GIT_DIFF_OPTS` environment variables.
What the -p option produces is slightly different from the traditional
@@ -49,7 +53,7 @@ similarity index value of 100% is thus reserved for two equal
files, while 100% dissimilarity means that no line from the old
file made it into the new one.
+
-The index line includes the SHA-1 checksum before and after the change.
+The index line includes the blob object names before and after the change.
The <mode> is included if the file mode does not change; otherwise,
separate lines indicate the old and the new mode.
@@ -70,7 +74,7 @@ separate lines indicate the old and the new mode.
rename to a
-combined diff format
+Combined diff format
--------------------
Any diff-generating command can take the `-c` or `--cc` option to
@@ -80,7 +84,7 @@ linkgit:git-show[1]. Note also that you can give the `-m` option to any
of these commands to force generation of diffs with individual parents
of a merge.
-A 'combined diff' format looks like this:
+A "combined diff" format looks like this:
------------
diff --combined describe.c
@@ -113,11 +117,11 @@ index fabadb8,cc95eb0..4866510
------------
1. It is preceded with a "git diff" header, that looks like
- this (when `-c` option is used):
+ this (when the `-c` option is used):
diff --combined file
+
-or like this (when `--cc` option is used):
+or like this (when the `--cc` option is used):
diff --cc file
@@ -160,7 +164,7 @@ parents.
4. Chunk header format is modified to prevent people from
accidentally feeding it to `patch -p1`. Combined diff format
was created for review of merge commit changes, and was not
- meant for apply. The change is similar to the change in the
+ meant to be applied. The change is similar to the change in the
extended 'index' header:
@@@ <from-file-range> <from-file-range> <to-file-range> @@@
diff --git a/Documentation/doc-diff b/Documentation/doc-diff
index 3355be4798..88a9b20168 100755
--- a/Documentation/doc-diff
+++ b/Documentation/doc-diff
@@ -21,7 +21,7 @@ asciidoc use asciidoc with both commits
to-asciidoc use asciidoc with the 'to'-commit
to-asciidoctor use asciidoctor with the 'to'-commit
asciidoctor use asciidoctor with both commits
-cut-header-footer cut away header and footer
+cut-footer cut away footer
"
SUBDIRECTORY_OK=1
. "$(git --exec-path)/git-sh-setup"
@@ -31,7 +31,7 @@ force=
clean=
from_program=
to_program=
-cut_header_footer=
+cut_footer=
while test $# -gt 0
do
case "$1" in
@@ -55,8 +55,8 @@ do
--asciidoc)
from_program=-asciidoc
to_program=-asciidoc ;;
- --cut-header-footer)
- cut_header_footer=-cut-header-footer ;;
+ --cut-footer)
+ cut_footer=-cut-footer ;;
--)
shift; break ;;
*)
@@ -118,8 +118,8 @@ construct_makemanflags () {
from_makemanflags=$(construct_makemanflags "$from_program") &&
to_makemanflags=$(construct_makemanflags "$to_program") &&
-from_dir=$from_oid$from_program$cut_header_footer &&
-to_dir=$to_oid$to_program$cut_header_footer &&
+from_dir=$from_oid$from_program$cut_footer &&
+to_dir=$to_oid$to_program$cut_footer &&
# generate_render_makefile <srcdir> <dstdir>
generate_render_makefile () {
@@ -169,12 +169,11 @@ render_tree () {
make -j$parallel -f - &&
mv "$tmp/rendered/$dname+" "$tmp/rendered/$dname"
- if test "$cut_header_footer" = "-cut-header-footer"
+ if test "$cut_footer" = "-cut-footer"
then
for f in $(find "$tmp/rendered/$dname" -type f)
do
- tail -n +3 "$f" | head -n -2 |
- sed -e '1{/^$/d}' -e '${/^$/d}' >"$f+" &&
+ head -n -2 "$f" | sed -e '${/^$/d}' >"$f+" &&
mv "$f+" "$f" ||
return 1
done
diff --git a/Documentation/fetch-options.txt b/Documentation/fetch-options.txt
index 3c9b4f9e09..43b9ff3bce 100644
--- a/Documentation/fetch-options.txt
+++ b/Documentation/fetch-options.txt
@@ -160,15 +160,27 @@ ifndef::git-pull[]
-j::
--jobs=<n>::
- Number of parallel children to be used for fetching submodules.
- Each will fetch from different submodules, such that fetching many
- submodules will be faster. By default submodules will be fetched
- one at a time.
+ Number of parallel children to be used for all forms of fetching.
++
+If the `--multiple` option was specified, the different remotes will be fetched
+in parallel. If multiple submodules are fetched, they will be fetched in
+parallel. To control them independently, use the config settings
+`fetch.parallel` and `submodule.fetchJobs` (see linkgit:git-config[1]).
++
+Typically, parallel recursive and multi-remote fetches will be faster. By
+default fetches are performed sequentially, not in parallel.
--no-recurse-submodules::
Disable recursive fetching of submodules (this has the same effect as
using the `--recurse-submodules=no` option).
+--set-upstream::
+ If the remote is fetched successfully, pull and add upstream
+ (tracking) reference, used by argument-less
+ linkgit:git-pull[1] and other commands. For more information,
+ see `branch.<name>.merge` and `branch.<name>.remote` in
+ linkgit:git-config[1].
+
--submodule-prefix=<path>::
Prepend <path> to paths printed in informative messages
such as "Fetching submodule foo". This option is used
diff --git a/Documentation/git-clean.txt b/Documentation/git-clean.txt
index 0028ff12d1..a7f309dff5 100644
--- a/Documentation/git-clean.txt
+++ b/Documentation/git-clean.txt
@@ -26,18 +26,20 @@ are affected.
OPTIONS
-------
-d::
- Remove untracked directories in addition to untracked files.
- If an untracked directory is managed by a different Git
- repository, it is not removed by default. Use -f option twice
- if you really want to remove such a directory.
+ Normally, when no <path> is specified, git clean will not
+ recurse into untracked directories to avoid removing too much.
+ Specify -d to have it recurse into such directories as well.
+ If any paths are specified, -d is irrelevant; all untracked
+ files matching the specified paths (with exceptions for nested
+ git directories mentioned under `--force`) will be removed.
-f::
--force::
If the Git configuration variable clean.requireForce is not set
to false, 'git clean' will refuse to delete files or directories
- unless given -f, -n or -i. Git will refuse to delete directories
- with .git sub directory or file unless a second -f
- is given.
+ unless given -f or -i. Git will refuse to modify untracked
+ nested git repositories (directories with a .git subdirectory)
+ unless a second -f is given.
-i::
--interactive::
diff --git a/Documentation/git-commit-graph.txt b/Documentation/git-commit-graph.txt
index eb5e7865f0..8c708a7a16 100644
--- a/Documentation/git-commit-graph.txt
+++ b/Documentation/git-commit-graph.txt
@@ -10,8 +10,8 @@ SYNOPSIS
--------
[verse]
'git commit-graph read' [--object-dir <dir>]
-'git commit-graph verify' [--object-dir <dir>] [--shallow]
-'git commit-graph write' <options> [--object-dir <dir>]
+'git commit-graph verify' [--object-dir <dir>] [--shallow] [--[no-]progress]
+'git commit-graph write' <options> [--object-dir <dir>] [--[no-]progress]
DESCRIPTION
@@ -29,6 +29,9 @@ OPTIONS
commit-graph file is expected to be in the `<dir>/info` directory and
the packfiles are expected to be in `<dir>/pack`.
+--[no-]progress::
+ Turn progress on/off explicitly. If neither is specified, progress is
+ shown if standard error is connected to a terminal.
COMMANDS
--------
diff --git a/Documentation/git-commit.txt b/Documentation/git-commit.txt
index 7628193284..afa7b75a23 100644
--- a/Documentation/git-commit.txt
+++ b/Documentation/git-commit.txt
@@ -282,18 +282,20 @@ FROM UPSTREAM REBASE" section in linkgit:git-rebase[1].)
--untracked-files[=<mode>]::
Show untracked files.
+
+--
The mode parameter is optional (defaults to 'all'), and is used to
specify the handling of untracked files; when -u is not used, the
default is 'normal', i.e. show untracked files and directories.
-+
+
The possible options are:
-+
+
- 'no' - Show no untracked files
- 'normal' - Shows untracked files and directories
- 'all' - Also shows individual files in untracked directories.
-+
+
The default can be changed using the status.showUntrackedFiles
configuration variable documented in linkgit:git-config[1].
+--
-v::
--verbose::
diff --git a/Documentation/git-config.txt b/Documentation/git-config.txt
index ff9310f958..899e92a1c9 100644
--- a/Documentation/git-config.txt
+++ b/Documentation/git-config.txt
@@ -339,33 +339,35 @@ EXAMPLES
Given a .git/config like this:
- #
- # This is the config file, and
- # a '#' or ';' character indicates
- # a comment
- #
-
- ; core variables
- [core]
- ; Don't trust file modes
- filemode = false
-
- ; Our diff algorithm
- [diff]
- external = /usr/local/bin/diff-wrapper
- renames = true
-
- ; Proxy settings
- [core]
- gitproxy=proxy-command for kernel.org
- gitproxy=default-proxy ; for all the rest
-
- ; HTTP
- [http]
- sslVerify
- [http "https://weak.example.com"]
- sslVerify = false
- cookieFile = /tmp/cookie.txt
+------------
+#
+# This is the config file, and
+# a '#' or ';' character indicates
+# a comment
+#
+
+; core variables
+[core]
+ ; Don't trust file modes
+ filemode = false
+
+; Our diff algorithm
+[diff]
+ external = /usr/local/bin/diff-wrapper
+ renames = true
+
+; Proxy settings
+[core]
+ gitproxy=proxy-command for kernel.org
+ gitproxy=default-proxy ; for all the rest
+
+; HTTP
+[http]
+ sslVerify
+[http "https://weak.example.com"]
+ sslVerify = false
+ cookieFile = /tmp/cookie.txt
+------------
you can set the filemode to true with
diff --git a/Documentation/git-fast-export.txt b/Documentation/git-fast-export.txt
index cc940eb9ad..37634bffd1 100644
--- a/Documentation/git-fast-export.txt
+++ b/Documentation/git-fast-export.txt
@@ -17,9 +17,9 @@ This program dumps the given revisions in a form suitable to be piped
into 'git fast-import'.
You can use it as a human-readable bundle replacement (see
-linkgit:git-bundle[1]), or as a kind of an interactive
-'git filter-branch'.
-
+linkgit:git-bundle[1]), or as a format that can be edited before being
+fed to 'git fast-import' in order to do history rewrites (an ability
+relied on by tools like 'git filter-repo').
OPTIONS
-------
@@ -75,11 +75,20 @@ produced incorrect results if you gave these options.
Before processing any input, load the marks specified in
<file>. The input file must exist, must be readable, and
must use the same format as produced by --export-marks.
+
+--mark-tags::
+ In addition to labelling blobs and commits with mark ids, also
+ label tags. This is useful in conjunction with
+ `--export-marks` and `--import-marks`, and is also useful (and
+ necessary) for exporting of nested tags. It does not hurt
+ other cases and would be the default, but many fast-import
+ frontends are not prepared to accept tags with mark
+ identifiers.
+
-Any commits that have already been marked will not be exported again.
-If the backend uses a similar --import-marks file, this allows for
-incremental bidirectional exporting of the repository by keeping the
-marks the same across runs.
+Any commits (or tags) that have already been marked will not be
+exported again. If the backend uses a similar --import-marks file,
+this allows for incremental bidirectional exporting of the repository
+by keeping the marks the same across runs.
--fake-missing-tagger::
Some old repositories have tags without a tagger. The
diff --git a/Documentation/git-fast-import.txt b/Documentation/git-fast-import.txt
index fad327aecc..a3f1e0c5e4 100644
--- a/Documentation/git-fast-import.txt
+++ b/Documentation/git-fast-import.txt
@@ -337,6 +337,13 @@ and control the current import process. More detailed discussion
`commit` command. This command is optional and is not
needed to perform an import.
+`alias`::
+ Record that a mark refers to a given object without first
+ creating any new object. Using --import-marks and referring
+ to missing marks will cause fast-import to fail, so aliases
+ can provide a way to set otherwise pruned commits to a valid
+ value (e.g. the nearest non-pruned ancestor).
+
`checkpoint`::
Forces fast-import to close the current packfile, generate its
unique SHA-1 checksum and index, and start a new packfile.
@@ -391,7 +398,7 @@ change to the project.
('encoding' SP <encoding>)?
data
('from' SP <commit-ish> LF)?
- ('merge' SP <commit-ish> LF)?
+ ('merge' SP <commit-ish> LF)*
(filemodify | filedelete | filecopy | filerename | filedeleteall | notemodify)*
LF?
....
@@ -774,6 +781,7 @@ lightweight (non-annotated) tags see the `reset` command below.
....
'tag' SP <name> LF
+ mark?
'from' SP <commit-ish> LF
original-oid?
'tagger' (SP <name>)? SP LT <email> GT SP <when> LF
@@ -913,6 +921,21 @@ a data chunk which does not have an LF as its last byte.
+
The `LF` after `<delim> LF` is optional (it used to be required).
+`alias`
+~~~~~~~
+Record that a mark refers to a given object without first creating any
+new object.
+
+....
+ 'alias' LF
+ mark
+ 'to' SP <commit-ish> LF
+ LF?
+....
+
+For a detailed description of `<commit-ish>` see above under `from`.
+
+
`checkpoint`
~~~~~~~~~~~~
Forces fast-import to close the current packfile, start a new one, and to
diff --git a/Documentation/git-filter-branch.txt b/Documentation/git-filter-branch.txt
index 6b53dd7e06..5876598852 100644
--- a/Documentation/git-filter-branch.txt
+++ b/Documentation/git-filter-branch.txt
@@ -16,6 +16,19 @@ SYNOPSIS
[--original <namespace>] [-d <directory>] [-f | --force]
[--state-branch <branch>] [--] [<rev-list options>...]
+WARNING
+-------
+'git filter-branch' has a plethora of pitfalls that can produce non-obvious
+manglings of the intended history rewrite (and can leave you with little
+time to investigate such problems since it has such abysmal performance).
+These safety and performance issues cannot be backward compatibly fixed and
+as such, its use is not recommended. Please use an alternative history
+filtering tool such as https://github.com/newren/git-filter-repo/[git
+filter-repo]. If you still need to use 'git filter-branch', please
+carefully read <<SAFETY>> (and <<PERFORMANCE>>) to learn about the land
+mines of filter-branch, and then vigilantly avoid as many of the hazards
+listed there as reasonably possible.
+
DESCRIPTION
-----------
Lets you rewrite Git revision history by rewriting the branches mentioned
@@ -445,36 +458,236 @@ warned.
(or if your git-gc is not new enough to support arguments to
`--prune`, use `git repack -ad; git prune` instead).
-NOTES
------
-
-git-filter-branch allows you to make complex shell-scripted rewrites
-of your Git history, but you probably don't need this flexibility if
-you're simply _removing unwanted data_ like large files or passwords.
-For those operations you may want to consider
-http://rtyley.github.io/bfg-repo-cleaner/[The BFG Repo-Cleaner],
-a JVM-based alternative to git-filter-branch, typically at least
-10-50x faster for those use-cases, and with quite different
-characteristics:
-
-* Any particular version of a file is cleaned exactly _once_. The BFG,
- unlike git-filter-branch, does not give you the opportunity to
- handle a file differently based on where or when it was committed
- within your history. This constraint gives the core performance
- benefit of The BFG, and is well-suited to the task of cleansing bad
- data - you don't care _where_ the bad data is, you just want it
- _gone_.
-
-* By default The BFG takes full advantage of multi-core machines,
- cleansing commit file-trees in parallel. git-filter-branch cleans
- commits sequentially (i.e. in a single-threaded manner), though it
- _is_ possible to write filters that include their own parallelism,
- in the scripts executed against each commit.
-
-* The http://rtyley.github.io/bfg-repo-cleaner/#examples[command options]
- are much more restrictive than git-filter branch, and dedicated just
- to the tasks of removing unwanted data- e.g:
- `--strip-blobs-bigger-than 1M`.
+[[PERFORMANCE]]
+PERFORMANCE
+-----------
+
+The performance of git-filter-branch is glacially slow; its design makes it
+impossible for a backward-compatible implementation to ever be fast:
+
+* In editing files, git-filter-branch by design checks out each and
+every commit as it existed in the original repo. If your repo has 10\^5
+files and 10\^5 commits, but each commit only modifies 5 files, then
+git-filter-branch will make you do 10\^10 modifications, despite only
+having (at most) 5*10^5 unique blobs.
+
+* If you try and cheat and try to make git-filter-branch only work on
+files modified in a commit, then two things happen
+
+ ** you run into problems with deletions whenever the user is simply
+ trying to rename files (because attempting to delete files that
+ don't exist looks like a no-op; it takes some chicanery to remap
+ deletes across file renames when the renames happen via arbitrary
+ user-provided shell)
+
+ ** even if you succeed at the map-deletes-for-renames chicanery, you
+ still technically violate backward compatibility because users are
+ allowed to filter files in ways that depend upon topology of
+ commits instead of filtering solely based on file contents or names
+ (though this has not been observed in the wild).
+
+* Even if you don't need to edit files but only want to e.g. rename or
+remove some and thus can avoid checking out each file (i.e. you can use
+--index-filter), you still are passing shell snippets for your filters.
+This means that for every commit, you have to have a prepared git repo
+where those filters can be run. That's a significant setup.
+
+* Further, several additional files are created or updated per commit by
+git-filter-branch. Some of these are for supporting the convenience
+functions provided by git-filter-branch (such as map()), while others
+are for keeping track of internal state (but could have also been
+accessed by user filters; one of git-filter-branch's regression tests
+does so). This essentially amounts to using the filesystem as an IPC
+mechanism between git-filter-branch and the user-provided filters.
+Disks tend to be a slow IPC mechanism, and writing these files also
+effectively represents a forced synchronization point between separate
+processes that we hit with every commit.
+
+* The user-provided shell commands will likely involve a pipeline of
+commands, resulting in the creation of many processes per commit.
+Creating and running another process takes a widely varying amount of
+time between operating systems, but on any platform it is very slow
+relative to invoking a function.
+
+* git-filter-branch itself is written in shell, which is kind of slow.
+This is the one performance issue that could be backward-compatibly
+fixed, but compared to the above problems that are intrinsic to the
+design of git-filter-branch, the language of the tool itself is a
+relatively minor issue.
+
+ ** Side note: Unfortunately, people tend to fixate on the
+ written-in-shell aspect and periodically ask if git-filter-branch
+ could be rewritten in another language to fix the performance
+ issues. Not only does that ignore the bigger intrinsic problems
+ with the design, it'd help less than you'd expect: if
+ git-filter-branch itself were not shell, then the convenience
+ functions (map(), skip_commit(), etc) and the `--setup` argument
+ could no longer be executed once at the beginning of the program
+ but would instead need to be prepended to every user filter (and
+ thus re-executed with every commit).
+
+The https://github.com/newren/git-filter-repo/[git filter-repo] tool is
+an alternative to git-filter-branch which does not suffer from these
+performance problems or the safety problems (mentioned below). For those
+with existing tooling which relies upon git-filter-branch, 'git
+repo-filter' also provides
+https://github.com/newren/git-filter-repo/blob/master/contrib/filter-repo-demos/filter-lamely[filter-lamely],
+a drop-in git-filter-branch replacement (with a few caveats). While
+filter-lamely suffers from all the same safety issues as
+git-filter-branch, it at least ameloriates the performance issues a
+little.
+
+[[SAFETY]]
+SAFETY
+------
+
+git-filter-branch is riddled with gotchas resulting in various ways to
+easily corrupt repos or end up with a mess worse than what you started
+with:
+
+* Someone can have a set of "working and tested filters" which they
+document or provide to a coworker, who then runs them on a different OS
+where the same commands are not working/tested (some examples in the
+git-filter-branch manpage are also affected by this). BSD vs. GNU
+userland differences can really bite. If lucky, error messages are
+spewed. But just as likely, the commands either don't do the filtering
+requested, or silently corrupt by making some unwanted change. The
+unwanted change may only affect a few commits, so it's not necessarily
+obvious either. (The fact that problems won't necessarily be obvious
+means they are likely to go unnoticed until the rewritten history is in
+use for quite a while, at which point it's really hard to justify
+another flag-day for another rewrite.)
+
+* Filenames with spaces are often mishandled by shell snippets since
+they cause problems for shell pipelines. Not everyone is familiar with
+find -print0, xargs -0, git-ls-files -z, etc. Even people who are
+familiar with these may assume such flags are not relevant because
+someone else renamed any such files in their repo back before the person
+doing the filtering joined the project. And often, even those familiar
+with handling arguments with spaces may not do so just because they
+aren't in the mindset of thinking about everything that could possibly
+go wrong.
+
+* Non-ascii filenames can be silently removed despite being in a desired
+directory. Keeping only wanted paths is often done using pipelines like
+`git ls-files | grep -v ^WANTED_DIR/ | xargs git rm`. ls-files will
+only quote filenames if needed, so folks may not notice that one of the
+files didn't match the regex (at least not until it's much too late).
+Yes, someone who knows about core.quotePath can avoid this (unless they
+have other special characters like \t, \n, or "), and people who use
+ls-files -z with something other than grep can avoid this, but that
+doesn't mean they will.
+
+* Similarly, when moving files around, one can find that filenames with
+non-ascii or special characters end up in a different directory, one
+that includes a double quote character. (This is technically the same
+issue as above with quoting, but perhaps an interesting different way
+that it can and has manifested as a problem.)
+
+* It's far too easy to accidentally mix up old and new history. It's
+still possible with any tool, but git-filter-branch almost invites it.
+If lucky, the only downside is users getting frustrated that they don't
+know how to shrink their repo and remove the old stuff. If unlucky,
+they merge old and new history and end up with multiple "copies" of each
+commit, some of which have unwanted or sensitive files and others which
+don't. This comes about in multiple different ways:
+
+ ** the default to only doing a partial history rewrite ('--all' is not
+ the default and few examples show it)
+
+ ** the fact that there's no automatic post-run cleanup
+
+ ** the fact that --tag-name-filter (when used to rename tags) doesn't
+ remove the old tags but just adds new ones with the new name
+
+ ** the fact that little educational information is provided to inform
+ users of the ramifications of a rewrite and how to avoid mixing old
+ and new history. For example, this man page discusses how users
+ need to understand that they need to rebase their changes for all
+ their branches on top of new history (or delete and reclone), but
+ that's only one of multiple concerns to consider. See the
+ "DISCUSSION" section of the git filter-repo manual page for more
+ details.
+
+* Annotated tags can be accidentally converted to lightweight tags, due
+to either of two issues:
+
+ ** Someone can do a history rewrite, realize they messed up, restore
+ from the backups in refs/original/, and then redo their
+ git-filter-branch command. (The backup in refs/original/ is not a
+ real backup; it dereferences tags first.)
+
+ ** Running git-filter-branch with either --tags or --all in your
+ <rev-list options>. In order to retain annotated tags as
+ annotated, you must use --tag-name-filter (and must not have
+ restored from refs/original/ in a previously botched rewrite).
+
+* Any commit messages that specify an encoding will become corrupted
+by the rewrite; git-filter-branch ignores the encoding, takes the original
+bytes, and feeds it to commit-tree without telling it the proper
+encoding. (This happens whether or not --msg-filter is used.)
+
+* Commit messages (even if they are all UTF-8) by default become
+corrupted due to not being updated -- any references to other commit
+hashes in commit messages will now refer to no-longer-extant commits.
+
+* There are no facilities for helping users find what unwanted crud they
+should delete, which means they are much more likely to have incomplete
+or partial cleanups that sometimes result in confusion and people
+wasting time trying to understand. (For example, folks tend to just
+look for big files to delete instead of big directories or extensions,
+and once they do so, then sometime later folks using the new repository
+who are going through history will notice a build artifact directory
+that has some files but not others, or a cache of dependencies
+(node_modules or similar) which couldn't have ever been functional since
+it's missing some files.)
+
+* If --prune-empty isn't specified, then the filtering process can
+create hoards of confusing empty commits
+
+* If --prune-empty is specified, then intentionally placed empty
+commits from before the filtering operation are also pruned instead of
+just pruning commits that became empty due to filtering rules.
+
+* If --prune empty is specified, sometimes empty commits are missed
+and left around anyway (a somewhat rare bug, but it happens...)
+
+* A minor issue, but users who have a goal to update all names and
+emails in a repository may be led to --env-filter which will only update
+authors and committers, missing taggers.
+
+* If the user provides a --tag-name-filter that maps multiple tags to
+the same name, no warning or error is provided; git-filter-branch simply
+overwrites each tag in some undocumented pre-defined order resulting in
+only one tag at the end. (A git-filter-branch regression test requires
+this surprising behavior.)
+
+Also, the poor performance of git-filter-branch often leads to safety
+issues:
+
+* Coming up with the correct shell snippet to do the filtering you want
+is sometimes difficult unless you're just doing a trivial modification
+such as deleting a couple files. Unfortunately, people often learn if
+the snippet is right or wrong by trying it out, but the rightness or
+wrongness can vary depending on special circumstances (spaces in
+filenames, non-ascii filenames, funny author names or emails, invalid
+timezones, presence of grafts or replace objects, etc.), meaning they
+may have to wait a long time, hit an error, then restart. The
+performance of git-filter-branch is so bad that this cycle is painful,
+reducing the time available to carefully re-check (to say nothing about
+what it does to the patience of the person doing the rewrite even if
+they do technically have more time available). This problem is extra
+compounded because errors from broken filters may not be shown for a
+long time and/or get lost in a sea of output. Even worse, broken
+filters often just result in silent incorrect rewrites.
+
+* To top it all off, even when users finally find working commands, they
+naturally want to share them. But they may be unaware that their repo
+didn't have some special cases that someone else's does. So, when
+someone else with a different repository runs the same commands, they
+get hit by the problems above. Or, the user just runs commands that
+really were vetted for special cases, but they run it on a different OS
+where it doesn't work, as noted above.
GIT
---
diff --git a/Documentation/git-format-patch.txt b/Documentation/git-format-patch.txt
index b9b97e63ae..0ac56f4b70 100644
--- a/Documentation/git-format-patch.txt
+++ b/Documentation/git-format-patch.txt
@@ -17,9 +17,9 @@ SYNOPSIS
[--signature-file=<file>]
[-n | --numbered | -N | --no-numbered]
[--start-number <n>] [--numbered-files]
- [--in-reply-to=Message-Id] [--suffix=.<sfx>]
+ [--in-reply-to=<message id>] [--suffix=.<sfx>]
[--ignore-if-in-upstream]
- [--rfc] [--subject-prefix=Subject-Prefix]
+ [--rfc] [--subject-prefix=<subject prefix>]
[(--reroll-count|-v) <n>]
[--to=<email>] [--cc=<email>]
[--[no-]cover-letter] [--quiet]
@@ -159,9 +159,9 @@ Beware that the default for 'git send-email' is to thread emails
itself. If you want `git format-patch` to take care of threading, you
will want to ensure that threading is disabled for `git send-email`.
---in-reply-to=Message-Id::
+--in-reply-to=<message id>::
Make the first mail (or all the mails with `--no-thread`) appear as a
- reply to the given Message-Id, which avoids breaking threads to
+ reply to the given <message id>, which avoids breaking threads to
provide a new patch series.
--ignore-if-in-upstream::
@@ -171,9 +171,9 @@ will want to ensure that threading is disabled for `git send-email`.
patches being generated, and any patch that matches is
ignored.
---subject-prefix=<Subject-Prefix>::
+--subject-prefix=<subject prefix>::
Instead of the standard '[PATCH]' prefix in the subject
- line, instead use '[<Subject-Prefix>]'. This
+ line, instead use '[<subject prefix>]'. This
allows for useful naming of a patch series, and can be
combined with the `--numbered` option.
@@ -314,7 +314,8 @@ you can use `--suffix=-patch` to get `0001-description-of-my-change-patch`.
--base=<commit>::
Record the base tree information to identify the state the
patch series applies to. See the BASE TREE INFORMATION section
- below for details.
+ below for details. If <commit> is "auto", a base commit is
+ automatically chosen.
--root::
Treat the revision argument as a <revision range>, even if it
@@ -330,8 +331,9 @@ CONFIGURATION
-------------
You can specify extra mail header lines to be added to each message,
defaults for the subject prefix and file suffix, number patches when
-outputting more than one patch, add "To" or "Cc:" headers, configure
-attachments, and sign off patches with configuration variables.
+outputting more than one patch, add "To:" or "Cc:" headers, configure
+attachments, change the patch output directory, and sign off patches
+with configuration variables.
------------
[format]
@@ -343,7 +345,8 @@ attachments, and sign off patches with configuration variables.
cc = <email>
attach [ = mime-boundary-string ]
signOff = true
- coverletter = auto
+ outputDirectory = <directory>
+ coverLetter = auto
------------
diff --git a/Documentation/git-gc.txt b/Documentation/git-gc.txt
index 247f765604..0c114ad1ca 100644
--- a/Documentation/git-gc.txt
+++ b/Documentation/git-gc.txt
@@ -115,15 +115,14 @@ NOTES
-----
'git gc' tries very hard not to delete objects that are referenced
-anywhere in your repository. In
-particular, it will keep not only objects referenced by your current set
-of branches and tags, but also objects referenced by the index,
-remote-tracking branches, refs saved by 'git filter-branch' in
-refs/original/, reflogs (which may reference commits in branches
-that were later amended or rewound), and anything else in the refs/* namespace.
-If you are expecting some objects to be deleted and they aren't, check
-all of those locations and decide whether it makes sense in your case to
-remove those references.
+anywhere in your repository. In particular, it will keep not only
+objects referenced by your current set of branches and tags, but also
+objects referenced by the index, remote-tracking branches, notes saved
+by 'git notes' under refs/notes/, reflogs (which may reference commits
+in branches that were later amended or rewound), and anything else in
+the refs/* namespace. If you are expecting some objects to be deleted
+and they aren't, check all of those locations and decide whether it
+makes sense in your case to remove those references.
On the other hand, when 'git gc' runs concurrently with another process,
there is a risk of it deleting an object that the other process is using
diff --git a/Documentation/git-grep.txt b/Documentation/git-grep.txt
index 2d27969057..c89fb569e3 100644
--- a/Documentation/git-grep.txt
+++ b/Documentation/git-grep.txt
@@ -271,6 +271,23 @@ providing this option will cause it to die.
-f <file>::
Read patterns from <file>, one per line.
++
+Passing the pattern via <file> allows for providing a search pattern
+containing a \0.
++
+Not all pattern types support patterns containing \0. Git will error
+out if a given pattern type can't support such a pattern. The
+`--perl-regexp` pattern type when compiled against the PCRE v2 backend
+has the widest support for these types of patterns.
++
+In versions of Git before 2.23.0 patterns containing \0 would be
+silently considered fixed. This was never documented, there were also
+odd and undocumented interactions between e.g. non-ASCII patterns
+containing \0 and `--ignore-case`.
++
+In future versions we may learn to support patterns containing \0 for
+more search backends, until then we'll die when the pattern type in
+question doesn't support them.
-e::
The next parameter is the pattern. This option has to be
diff --git a/Documentation/git-gui.txt b/Documentation/git-gui.txt
index 5f93f8003d..c9d7e96214 100644
--- a/Documentation/git-gui.txt
+++ b/Documentation/git-gui.txt
@@ -112,15 +112,9 @@ Other
versions are distributed as part of the Git suite for the convenience
of end users.
-A 'git gui' development repository can be obtained from:
+The official repository of the 'git gui' project can be found at:
- git clone git://repo.or.cz/git-gui.git
-
-or
-
- git clone http://repo.or.cz/r/git-gui.git
-
-or browsed online at http://repo.or.cz/w/git-gui.git/[].
+ https://github.com/prati0100/git-gui.git/
GIT
---
diff --git a/Documentation/git-ls-remote.txt b/Documentation/git-ls-remote.txt
index 0b057cbb10..a2ea1fd687 100644
--- a/Documentation/git-ls-remote.txt
+++ b/Documentation/git-ls-remote.txt
@@ -92,21 +92,23 @@ OPTIONS
EXAMPLES
--------
- $ git ls-remote --tags ./.
- d6602ec5194c87b0fc87103ca4d67251c76f233a refs/tags/v0.99
- f25a265a342aed6041ab0cc484224d9ca54b6f41 refs/tags/v0.99.1
- 7ceca275d047c90c0c7d5afb13ab97efdf51bd6e refs/tags/v0.99.3
- c5db5456ae3b0873fc659c19fafdde22313cc441 refs/tags/v0.99.2
- 0918385dbd9656cab0d1d81ba7453d49bbc16250 refs/tags/junio-gpg-pub
- $ git ls-remote http://www.kernel.org/pub/scm/git/git.git master pu rc
- 5fe978a5381f1fbad26a80e682ddd2a401966740 refs/heads/master
- c781a84b5204fb294c9ccc79f8b3baceeb32c061 refs/heads/pu
- $ git remote add korg http://www.kernel.org/pub/scm/git/git.git
- $ git ls-remote --tags korg v\*
- d6602ec5194c87b0fc87103ca4d67251c76f233a refs/tags/v0.99
- f25a265a342aed6041ab0cc484224d9ca54b6f41 refs/tags/v0.99.1
- c5db5456ae3b0873fc659c19fafdde22313cc441 refs/tags/v0.99.2
- 7ceca275d047c90c0c7d5afb13ab97efdf51bd6e refs/tags/v0.99.3
+----
+$ git ls-remote --tags ./.
+d6602ec5194c87b0fc87103ca4d67251c76f233a refs/tags/v0.99
+f25a265a342aed6041ab0cc484224d9ca54b6f41 refs/tags/v0.99.1
+7ceca275d047c90c0c7d5afb13ab97efdf51bd6e refs/tags/v0.99.3
+c5db5456ae3b0873fc659c19fafdde22313cc441 refs/tags/v0.99.2
+0918385dbd9656cab0d1d81ba7453d49bbc16250 refs/tags/junio-gpg-pub
+$ git ls-remote http://www.kernel.org/pub/scm/git/git.git master pu rc
+5fe978a5381f1fbad26a80e682ddd2a401966740 refs/heads/master
+c781a84b5204fb294c9ccc79f8b3baceeb32c061 refs/heads/pu
+$ git remote add korg http://www.kernel.org/pub/scm/git/git.git
+$ git ls-remote --tags korg v\*
+d6602ec5194c87b0fc87103ca4d67251c76f233a refs/tags/v0.99
+f25a265a342aed6041ab0cc484224d9ca54b6f41 refs/tags/v0.99.1
+c5db5456ae3b0873fc659c19fafdde22313cc441 refs/tags/v0.99.2
+7ceca275d047c90c0c7d5afb13ab97efdf51bd6e refs/tags/v0.99.3
+----
SEE ALSO
--------
diff --git a/Documentation/git-merge-base.txt b/Documentation/git-merge-base.txt
index 261d5c1164..2d944e0851 100644
--- a/Documentation/git-merge-base.txt
+++ b/Documentation/git-merge-base.txt
@@ -80,9 +80,11 @@ which is reachable from both 'A' and 'B' through the parent relationship.
For example, with this topology:
- o---o---o---B
- /
- ---o---1---o---o---o---A
+....
+ o---o---o---B
+ /
+---o---1---o---o---o---A
+....
the merge base between 'A' and 'B' is '1'.
@@ -90,21 +92,25 @@ Given three commits 'A', 'B' and 'C', `git merge-base A B C` will compute the
merge base between 'A' and a hypothetical commit 'M', which is a merge
between 'B' and 'C'. For example, with this topology:
- o---o---o---o---C
- /
- / o---o---o---B
- / /
- ---2---1---o---o---o---A
+....
+ o---o---o---o---C
+ /
+ / o---o---o---B
+ / /
+---2---1---o---o---o---A
+....
the result of `git merge-base A B C` is '1'. This is because the
equivalent topology with a merge commit 'M' between 'B' and 'C' is:
- o---o---o---o---o
- / \
- / o---o---o---o---M
- / /
- ---2---1---o---o---o---A
+....
+ o---o---o---o---o
+ / \
+ / o---o---o---o---M
+ / /
+---2---1---o---o---o---A
+....
and the result of `git merge-base A M` is '1'. Commit '2' is also a
common ancestor between 'A' and 'M', but '1' is a better common ancestor,
@@ -116,11 +122,13 @@ the best common ancestor of all commits.
When the history involves criss-cross merges, there can be more than one
'best' common ancestor for two commits. For example, with this topology:
- ---1---o---A
- \ /
- X
- / \
- ---2---o---o---B
+....
+---1---o---A
+ \ /
+ X
+ / \
+---2---o---o---B
+....
both '1' and '2' are merge-bases of A and B. Neither one is better than
the other (both are 'best' merge bases). When the `--all` option is not given,
@@ -131,18 +139,22 @@ and B is (or at least used to be) to compute the merge base between
A and B, and check if it is the same as A, in which case, A is an
ancestor of B. You will see this idiom used often in older scripts.
- A=$(git rev-parse --verify A)
- if test "$A" = "$(git merge-base A B)"
- then
- ... A is an ancestor of B ...
- fi
+....
+A=$(git rev-parse --verify A)
+if test "$A" = "$(git merge-base A B)"
+then
+ ... A is an ancestor of B ...
+fi
+....
In modern git, you can say this in a more direct way:
- if git merge-base --is-ancestor A B
- then
- ... A is an ancestor of B ...
- fi
+....
+if git merge-base --is-ancestor A B
+then
+ ... A is an ancestor of B ...
+fi
+....
instead.
@@ -154,13 +166,15 @@ topic origin/master`, the history of remote-tracking branch
`origin/master` may have been rewound and rebuilt, leading to a
history of this shape:
- o---B2
- /
- ---o---o---B1--o---o---o---B (origin/master)
- \
- B0
- \
- D0---D1---D (topic)
+....
+ o---B2
+ /
+---o---o---B1--o---o---o---B (origin/master)
+ \
+ B0
+ \
+ D0---D1---D (topic)
+....
where `origin/master` used to point at commits B0, B1, B2 and now it
points at B, and your `topic` branch was started on top of it back
@@ -193,13 +207,15 @@ will find B0, and
will replay D0, D1 and D on top of B to create a new history of this
shape:
- o---B2
- /
- ---o---o---B1--o---o---o---B (origin/master)
- \ \
- B0 D0'--D1'--D' (topic - updated)
- \
- D0---D1---D (topic - old)
+....
+ o---B2
+ /
+---o---o---B1--o---o---o---B (origin/master)
+ \ \
+ B0 D0'--D1'--D' (topic - updated)
+ \
+ D0---D1---D (topic - old)
+....
A caveat is that older reflog entries in your repository may be
expired by `git gc`. If B0 no longer appears in the reflog of the
diff --git a/Documentation/git-merge-index.txt b/Documentation/git-merge-index.txt
index 02676fb391..2ab84a91e5 100644
--- a/Documentation/git-merge-index.txt
+++ b/Documentation/git-merge-index.txt
@@ -54,20 +54,24 @@ original is first. But the argument order to the 3-way merge program
Examples:
- torvalds@ppc970:~/merge-test> git merge-index cat MM
- This is MM from the original tree. # original
- This is modified MM in the branch A. # merge1
- This is modified MM in the branch B. # merge2
- This is modified MM in the branch B. # current contents
+----
+torvalds@ppc970:~/merge-test> git merge-index cat MM
+This is MM from the original tree. # original
+This is modified MM in the branch A. # merge1
+This is modified MM in the branch B. # merge2
+This is modified MM in the branch B. # current contents
+----
or
- torvalds@ppc970:~/merge-test> git merge-index cat AA MM
- cat: : No such file or directory
- This is added AA in the branch A.
- This is added AA in the branch B.
- This is added AA in the branch B.
- fatal: merge program failed
+----
+torvalds@ppc970:~/merge-test> git merge-index cat AA MM
+cat: : No such file or directory
+This is added AA in the branch A.
+This is added AA in the branch B.
+This is added AA in the branch B.
+fatal: merge program failed
+----
where the latter example shows how 'git merge-index' will stop trying to
merge once anything has returned an error (i.e., `cat` returned an error
diff --git a/Documentation/git-merge.txt b/Documentation/git-merge.txt
index 01fd52dc70..092529c619 100644
--- a/Documentation/git-merge.txt
+++ b/Documentation/git-merge.txt
@@ -10,7 +10,7 @@ SYNOPSIS
--------
[verse]
'git merge' [-n] [--stat] [--no-commit] [--squash] [--[no-]edit]
- [-s <strategy>] [-X <strategy-option>] [-S[<keyid>]]
+ [--no-verify] [-s <strategy>] [-X <strategy-option>] [-S[<keyid>]]
[--[no-]allow-unrelated-histories]
[--[no-]rerere-autoupdate] [-m <msg>] [-F <file>] [<commit>...]
'git merge' (--continue | --abort | --quit)
diff --git a/Documentation/git-rebase.txt b/Documentation/git-rebase.txt
index 6156609cf7..639a4179d1 100644
--- a/Documentation/git-rebase.txt
+++ b/Documentation/git-rebase.txt
@@ -8,8 +8,8 @@ git-rebase - Reapply commits on top of another base tip
SYNOPSIS
--------
[verse]
-'git rebase' [-i | --interactive] [<options>] [--exec <cmd>] [--onto <newbase>]
- [<upstream> [<branch>]]
+'git rebase' [-i | --interactive] [<options>] [--exec <cmd>]
+ [--onto <newbase> | --keep-base] [<upstream> [<branch>]]
'git rebase' [-i | --interactive] [<options>] [--exec <cmd>] [--onto <newbase>]
--root [<branch>]
'git rebase' (--continue | --skip | --abort | --quit | --edit-todo | --show-current-patch)
@@ -217,6 +217,24 @@ As a special case, you may use "A\...B" as a shortcut for the
merge base of A and B if there is exactly one merge base. You can
leave out at most one of A and B, in which case it defaults to HEAD.
+--keep-base::
+ Set the starting point at which to create the new commits to the
+ merge base of <upstream> <branch>. Running
+ 'git rebase --keep-base <upstream> <branch>' is equivalent to
+ running 'git rebase --onto <upstream>... <upstream>'.
++
+This option is useful in the case where one is developing a feature on
+top of an upstream branch. While the feature is being worked on, the
+upstream branch may advance and it may not be the best idea to keep
+rebasing on top of the upstream but to keep the base commit as-is.
++
+Although both this option and --fork-point find the merge base between
+<upstream> and <branch>, this option uses the merge base as the _starting
+point_ on which new commits will be created, whereas --fork-point uses
+the merge base to determine the _set of commits_ which will be rebased.
++
+See also INCOMPATIBLE OPTIONS below.
+
<upstream>::
Upstream branch to compare against. May be any valid commit,
not just an existing branch name. Defaults to the configured
@@ -369,6 +387,10 @@ ends up being empty, the <upstream> will be used as a fallback.
+
If either <upstream> or --root is given on the command line, then the
default is `--no-fork-point`, otherwise the default is `--fork-point`.
++
+If your branch was based on <upstream> but <upstream> was rewound and
+your branch contains commits which were dropped, this option can be used
+with `--keep-base` in order to drop those commits from your branch.
--ignore-whitespace::
--whitespace=<option>::
@@ -543,8 +565,8 @@ In addition, the following pairs of options are incompatible:
* --preserve-merges and --interactive
* --preserve-merges and --signoff
* --preserve-merges and --rebase-merges
- * --rebase-merges and --strategy
- * --rebase-merges and --strategy-option
+ * --keep-base and --onto
+ * --keep-base and --root
BEHAVIORAL DIFFERENCES
-----------------------
@@ -832,7 +854,8 @@ Hard case: The changes are not the same.::
This happens if the 'subsystem' rebase had conflicts, or used
`--interactive` to omit, edit, squash, or fixup commits; or
if the upstream used one of `commit --amend`, `reset`, or
- `filter-branch`.
+ a full history rewriting command like
+ https://github.com/newren/git-filter-repo[`filter-repo`].
The easy case
@@ -870,7 +893,7 @@ NOTE: While an "easy case recovery" sometimes appears to be successful
--interactive` will be **resurrected**!
The idea is to manually tell 'git rebase' "where the old 'subsystem'
-ended and your 'topic' began", that is, what the old merge-base
+ended and your 'topic' began", that is, what the old merge base
between them was. You will have to find a way to name the last commit
of the old 'subsystem', for example:
diff --git a/Documentation/git-receive-pack.txt b/Documentation/git-receive-pack.txt
index dedf97efbb..25702ed730 100644
--- a/Documentation/git-receive-pack.txt
+++ b/Documentation/git-receive-pack.txt
@@ -165,29 +165,31 @@ ref listing the commits pushed to the repository, and logs the push
certificates of signed pushes with good signatures to a logger
service:
- #!/bin/sh
- # mail out commit update information.
- while read oval nval ref
- do
- if expr "$oval" : '0*$' >/dev/null
- then
- echo "Created a new ref, with the following commits:"
- git rev-list --pretty "$nval"
- else
- echo "New commits:"
- git rev-list --pretty "$nval" "^$oval"
- fi |
- mail -s "Changes to ref $ref" commit-list@mydomain
- done
- # log signed push certificate, if any
- if test -n "${GIT_PUSH_CERT-}" && test ${GIT_PUSH_CERT_STATUS} = G
+----
+#!/bin/sh
+# mail out commit update information.
+while read oval nval ref
+do
+ if expr "$oval" : '0*$' >/dev/null
then
- (
- echo expected nonce is ${GIT_PUSH_NONCE}
- git cat-file blob ${GIT_PUSH_CERT}
- ) | mail -s "push certificate from $GIT_PUSH_CERT_SIGNER" push-log@mydomain
- fi
- exit 0
+ echo "Created a new ref, with the following commits:"
+ git rev-list --pretty "$nval"
+ else
+ echo "New commits:"
+ git rev-list --pretty "$nval" "^$oval"
+ fi |
+ mail -s "Changes to ref $ref" commit-list@mydomain
+done
+# log signed push certificate, if any
+if test -n "${GIT_PUSH_CERT-}" && test ${GIT_PUSH_CERT_STATUS} = G
+then
+ (
+ echo expected nonce is ${GIT_PUSH_NONCE}
+ git cat-file blob ${GIT_PUSH_CERT}
+ ) | mail -s "push certificate from $GIT_PUSH_CERT_SIGNER" push-log@mydomain
+fi
+exit 0
+----
The exit code from this hook invocation is ignored, however a
non-zero exit code will generate an error message.
@@ -212,8 +214,10 @@ anyway.
This hook can be used, for example, to run `git update-server-info`
if the repository is packed and is served via a dumb transport.
- #!/bin/sh
- exec git update-server-info
+----
+#!/bin/sh
+exec git update-server-info
+----
QUARANTINE ENVIRONMENT
diff --git a/Documentation/git-replace.txt b/Documentation/git-replace.txt
index 246dc9943c..f271d758c3 100644
--- a/Documentation/git-replace.txt
+++ b/Documentation/git-replace.txt
@@ -123,10 +123,10 @@ The following format are available:
CREATING REPLACEMENT OBJECTS
----------------------------
-linkgit:git-filter-branch[1], linkgit:git-hash-object[1] and
-linkgit:git-rebase[1], among other git commands, can be used to create
-replacement objects from existing objects. The `--edit` option can
-also be used with 'git replace' to create a replacement object by
+linkgit:git-hash-object[1], linkgit:git-rebase[1], and
+https://github.com/newren/git-filter-repo[git-filter-repo], among other git commands, can be used to
+create replacement objects from existing objects. The `--edit` option
+can also be used with 'git replace' to create a replacement object by
editing an existing object.
If you want to replace many blobs, trees or commits that are part of a
@@ -148,13 +148,13 @@ pending objects.
SEE ALSO
--------
linkgit:git-hash-object[1]
-linkgit:git-filter-branch[1]
linkgit:git-rebase[1]
linkgit:git-tag[1]
linkgit:git-branch[1]
linkgit:git-commit[1]
linkgit:git-var[1]
linkgit:git[1]
+https://github.com/newren/git-filter-repo[git-filter-repo]
GIT
---
diff --git a/Documentation/git-rev-list.txt b/Documentation/git-rev-list.txt
index 9392760b25..025c911436 100644
--- a/Documentation/git-rev-list.txt
+++ b/Documentation/git-rev-list.txt
@@ -9,59 +9,7 @@ git-rev-list - Lists commit objects in reverse chronological order
SYNOPSIS
--------
[verse]
-'git rev-list' [ --max-count=<number> ]
- [ --skip=<number> ]
- [ --max-age=<timestamp> ]
- [ --min-age=<timestamp> ]
- [ --sparse ]
- [ --merges ]
- [ --no-merges ]
- [ --min-parents=<number> ]
- [ --no-min-parents ]
- [ --max-parents=<number> ]
- [ --no-max-parents ]
- [ --first-parent ]
- [ --remove-empty ]
- [ --full-history ]
- [ --not ]
- [ --all ]
- [ --branches[=<pattern>] ]
- [ --tags[=<pattern>] ]
- [ --remotes[=<pattern>] ]
- [ --glob=<glob-pattern> ]
- [ --ignore-missing ]
- [ --stdin ]
- [ --quiet ]
- [ --topo-order ]
- [ --parents ]
- [ --timestamp ]
- [ --left-right ]
- [ --left-only ]
- [ --right-only ]
- [ --cherry-mark ]
- [ --cherry-pick ]
- [ --encoding=<encoding> ]
- [ --(author|committer|grep)=<pattern> ]
- [ --regexp-ignore-case | -i ]
- [ --extended-regexp | -E ]
- [ --fixed-strings | -F ]
- [ --date=<format>]
- [ [ --objects | --objects-edge | --objects-edge-aggressive ]
- [ --unpacked ]
- [ --object-names | --no-object-names ]
- [ --filter=<filter-spec> [ --filter-print-omitted ] ] ]
- [ --missing=<missing-action> ]
- [ --pretty | --header ]
- [ --bisect ]
- [ --bisect-vars ]
- [ --bisect-all ]
- [ --merge ]
- [ --reverse ]
- [ --walk-reflogs ]
- [ --no-walk ] [ --do-walk ]
- [ --count ]
- [ --use-bitmap-index ]
- <commit>... [ \-- <paths>... ]
+'git rev-list' [<options>] <commit>... [[--] <path>...]
DESCRIPTION
-----------
diff --git a/Documentation/git-send-email.txt b/Documentation/git-send-email.txt
index d93e5d0f58..0a69810147 100644
--- a/Documentation/git-send-email.txt
+++ b/Documentation/git-send-email.txt
@@ -486,11 +486,13 @@ Use gmail as the smtp server
To use 'git send-email' to send your patches through the GMail SMTP server,
edit ~/.gitconfig to specify your account settings:
- [sendemail]
- smtpEncryption = tls
- smtpServer = smtp.gmail.com
- smtpUser = yourname@gmail.com
- smtpServerPort = 587
+----
+[sendemail]
+ smtpEncryption = tls
+ smtpServer = smtp.gmail.com
+ smtpUser = yourname@gmail.com
+ smtpServerPort = 587
+----
If you have multifactor authentication setup on your gmail account, you will
need to generate an app-specific password for use with 'git send-email'. Visit
diff --git a/Documentation/git-stash.txt b/Documentation/git-stash.txt
index 8fbe12c66c..53e1a1205d 100644
--- a/Documentation/git-stash.txt
+++ b/Documentation/git-stash.txt
@@ -87,8 +87,9 @@ The `--patch` option implies `--keep-index`. You can use
save [-p|--patch] [-k|--[no-]keep-index] [-u|--include-untracked] [-a|--all] [-q|--quiet] [<message>]::
This option is deprecated in favour of 'git stash push'. It
- differs from "stash push" in that it cannot take pathspecs,
- and any non-option arguments form the message.
+ differs from "stash push" in that it cannot take pathspecs.
+ Instead, all non-option arguments are concatenated to form the stash
+ message.
list [<options>]::
diff --git a/Documentation/git-status.txt b/Documentation/git-status.txt
index d4e8f24f0c..7731b45f07 100644
--- a/Documentation/git-status.txt
+++ b/Documentation/git-status.txt
@@ -59,16 +59,17 @@ This is optional and defaults to the original version 'v1' format.
--untracked-files[=<mode>]::
Show untracked files.
+
+--
The mode parameter is used to specify the handling of untracked files.
It is optional: it defaults to 'all', and if specified, it must be
stuck to the option (e.g. `-uno`, but not `-u no`).
-+
+
The possible options are:
-+
+
- 'no' - Show no untracked files.
- 'normal' - Shows untracked files and directories.
- 'all' - Also shows individual files in untracked directories.
-+
+
When `-u` option is not used, untracked files and directories are
shown (i.e. the same as specifying `normal`), to help you avoid
forgetting to add newly created files. Because it takes extra work
@@ -78,9 +79,10 @@ Consider enabling untracked cache and split index if supported (see
`git update-index --untracked-cache` and `git update-index
--split-index`), Otherwise you can use `no` to have `git status`
return more quickly without showing untracked files.
-+
+
The default can be changed using the status.showUntrackedFiles
configuration variable documented in linkgit:git-config[1].
+--
--ignore-submodules[=<when>]::
Ignore changes to submodules when looking for changes. <when> can be
@@ -100,11 +102,12 @@ configuration variable documented in linkgit:git-config[1].
--ignored[=<mode>]::
Show ignored files as well.
+
+--
The mode parameter is used to specify the handling of ignored files.
It is optional: it defaults to 'traditional'.
-+
+
The possible options are:
-+
+
- 'traditional' - Shows ignored files and directories, unless
--untracked-files=all is specified, in which case
individual files in ignored directories are
@@ -112,12 +115,13 @@ The possible options are:
- 'no' - Show no ignored files.
- 'matching' - Shows ignored files and directories matching an
ignore pattern.
-+
+
When 'matching' mode is specified, paths that explicitly match an
ignored pattern are shown. If a directory matches an ignore pattern,
then it is shown, but not paths contained in the ignored directory. If
a directory does not match an ignore pattern, but all contents are
ignored, then the directory is not shown, but all contents are shown.
+--
-z::
Terminate entries with NUL, instead of LF. This implies
diff --git a/Documentation/git-submodule.txt b/Documentation/git-submodule.txt
index 0ed5c24dc1..1f46380af2 100644
--- a/Documentation/git-submodule.txt
+++ b/Documentation/git-submodule.txt
@@ -173,7 +173,8 @@ submodule with the `--init` option.
If `--recursive` is specified, this command will recurse into the
registered submodules, and update any nested submodules within.
--
-set-branch ((-d|--default)|(-b|--branch <branch>)) [--] <path>::
+set-branch (-b|--branch) <branch> [--] <path>::
+set-branch (-d|--default) [--] <path>::
Sets the default remote tracking branch for the submodule. The
`--branch` option allows the remote branch to be specified. The
`--default` option removes the submodule.<name>.branch configuration
diff --git a/Documentation/git-svn.txt b/Documentation/git-svn.txt
index 30711625fd..53774f5b64 100644
--- a/Documentation/git-svn.txt
+++ b/Documentation/git-svn.txt
@@ -769,11 +769,11 @@ option for (hopefully) obvious reasons.
+
This option is NOT recommended as it makes it difficult to track down
old references to SVN revision numbers in existing documentation, bug
-reports and archives. If you plan to eventually migrate from SVN to Git
-and are certain about dropping SVN history, consider
-linkgit:git-filter-branch[1] instead. filter-branch also allows
-reformatting of metadata for ease-of-reading and rewriting authorship
-info for non-"svn.authorsFile" users.
+reports, and archives. If you plan to eventually migrate from SVN to
+Git and are certain about dropping SVN history, consider
+https://github.com/newren/git-filter-repo[git-filter-repo] instead.
+filter-repo also allows reformatting of metadata for ease-of-reading
+and rewriting authorship info for non-"svn.authorsFile" users.
svn.useSvmProps::
svn-remote.<name>.useSvmProps::
diff --git a/Documentation/gitattributes.txt b/Documentation/gitattributes.txt
index fb1d188d44..c5a528c667 100644
--- a/Documentation/gitattributes.txt
+++ b/Documentation/gitattributes.txt
@@ -810,6 +810,8 @@ patterns are available:
- `css` suitable for cascading style sheets.
+- `dts` suitable for devicetree (DTS) files.
+
- `fortran` suitable for source code in the Fortran language.
- `fountain` suitable for Fountain documents.
diff --git a/Documentation/gitcli.txt b/Documentation/gitcli.txt
index 1ed3ca33b7..4b32876b6e 100644
--- a/Documentation/gitcli.txt
+++ b/Documentation/gitcli.txt
@@ -37,6 +37,12 @@ arguments. Here are the rules:
file called HEAD in your work tree, `git diff HEAD` is ambiguous, and
you have to say either `git diff HEAD --` or `git diff -- HEAD` to
disambiguate.
+
+ * Because `--` disambiguates revisions and paths in some commands, it
+ cannot be used for those commands to separate options and revisions.
+ You can use `--end-of-options` for this (it also works for commands
+ that do not distinguish between revisions in paths, in which case it
+ is simply an alias for `--`).
+
When writing a script that is expected to handle random user-input, it is
a good practice to make it explicit which arguments are which by placing
diff --git a/Documentation/githooks.txt b/Documentation/githooks.txt
index 82cd573776..50365f2914 100644
--- a/Documentation/githooks.txt
+++ b/Documentation/githooks.txt
@@ -103,6 +103,28 @@ The default 'pre-commit' hook, when enabled--and with the
`hooks.allownonascii` config option unset or set to false--prevents
the use of non-ASCII filenames.
+pre-merge-commit
+~~~~~~~~~~~~~~~~
+
+This hook is invoked by linkgit:git-merge[1], and can be bypassed
+with the `--no-verify` option. It takes no parameters, and is
+invoked after the merge has been carried out successfully and before
+obtaining the proposed commit log message to
+make a commit. Exiting with a non-zero status from this script
+causes the `git merge` command to abort before creating a commit.
+
+The default 'pre-merge-commit' hook, when enabled, runs the
+'pre-commit' hook, if the latter is enabled.
+
+This hook is invoked with the environment variable
+`GIT_EDITOR=:` if the command will not bring up an editor
+to modify the commit message.
+
+If the merge cannot be carried out automatically, the conflicts
+need to be resolved and the result committed separately (see
+linkgit:git-merge[1]). At that point, this hook will not be executed,
+but the 'pre-commit' hook will, if it is enabled.
+
prepare-commit-msg
~~~~~~~~~~~~~~~~~~
@@ -425,10 +447,12 @@ post-rewrite
This hook is invoked by commands that rewrite commits
(linkgit:git-commit[1] when called with `--amend` and
-linkgit:git-rebase[1]; currently `git filter-branch` does 'not' call
-it!). Its first argument denotes the command it was invoked by:
-currently one of `amend` or `rebase`. Further command-dependent
-arguments may be passed in the future.
+linkgit:git-rebase[1]; however, full-history (re)writing tools like
+linkgit:git-fast-import[1] or
+https://github.com/newren/git-filter-repo[git-filter-repo] typically
+do not call it!). Its first argument denotes the command it was
+invoked by: currently one of `amend` or `rebase`. Further
+command-dependent arguments may be passed in the future.
The hook receives a list of the rewritten commits on stdin, in the
format
diff --git a/Documentation/gitmodules.txt b/Documentation/gitmodules.txt
index a66e95b70c..f2a65ba0ca 100644
--- a/Documentation/gitmodules.txt
+++ b/Documentation/gitmodules.txt
@@ -90,7 +90,7 @@ of the superproject, the setting there will override the one found in
.gitmodules.
Both settings can be overridden on the command line by using the
-"--ignore-submodule" option. The 'git submodule' commands are not
+"--ignore-submodules" option. The 'git submodule' commands are not
affected by this setting.
--
@@ -105,14 +105,15 @@ EXAMPLES
Consider the following .gitmodules file:
- [submodule "libfoo"]
- path = include/foo
- url = git://foo.com/git/lib.git
-
- [submodule "libbar"]
- path = include/bar
- url = git://bar.com/git/lib.git
+----
+[submodule "libfoo"]
+ path = include/foo
+ url = git://foo.com/git/lib.git
+[submodule "libbar"]
+ path = include/bar
+ url = git://bar.com/git/lib.git
+----
This defines two submodules, `libfoo` and `libbar`. These are expected to
be checked out in the paths `include/foo` and `include/bar`, and for both
diff --git a/Documentation/gitremote-helpers.txt b/Documentation/gitremote-helpers.txt
index 43f80c8068..a5c3c04371 100644
--- a/Documentation/gitremote-helpers.txt
+++ b/Documentation/gitremote-helpers.txt
@@ -297,9 +297,13 @@ Supported if the helper has the "option" capability.
same batch are complete. Only objects which were reported
in the output of 'list' with a sha1 may be fetched this way.
+
-Optionally may output a 'lock <file>' line indicating a file under
-GIT_DIR/objects/pack which is keeping a pack until refs can be
-suitably updated.
+Optionally may output a 'lock <file>' line indicating the full path of
+a file under `$GIT_DIR/objects/pack` which is keeping a pack until
+refs can be suitably updated. The path must end with `.keep`. This is
+a mechanism to name a <pack,idx,keep> tuple by giving only the keep
+component. The kept pack will not be deleted by a concurrent repack,
+even though its objects may not be referenced until the fetch completes.
+The `.keep` file will be deleted at the conclusion of the fetch.
+
If option 'check-connectivity' is requested, the helper must output
'connectivity-ok' if the clone is self-contained and connected.
diff --git a/Documentation/gitrepository-layout.txt b/Documentation/gitrepository-layout.txt
index 216b11ee88..d6388f10bb 100644
--- a/Documentation/gitrepository-layout.txt
+++ b/Documentation/gitrepository-layout.txt
@@ -59,7 +59,7 @@ objects/[0-9a-f][0-9a-f]::
here are often called 'unpacked' (or 'loose') objects.
objects/pack::
- Packs (files that store many object in compressed form,
+ Packs (files that store many objects in compressed form,
along with index files to allow them to be randomly
accessed) are found in this directory.
diff --git a/Documentation/gitweb.conf.txt b/Documentation/gitweb.conf.txt
index 35317e71c8..7963a79ba9 100644
--- a/Documentation/gitweb.conf.txt
+++ b/Documentation/gitweb.conf.txt
@@ -786,9 +786,9 @@ forks::
subdirectories of project root (basename) to be forks of existing
projects. For each project +$projname.git+, projects in the
+$projname/+ directory and its subdirectories will not be
- shown in the main projects list. Instead, a \'\+' mark is shown
- next to +$projname+, which links to a "forks" view that lists all
- the forks (all projects in +$projname/+ subdirectory). Additionally
+ shown in the main projects list. Instead, a \'+' mark is shown
+ next to `$projname`, which links to a "forks" view that lists all
+ the forks (all projects in `$projname/` subdirectory). Additionally
a "forks" view for a project is linked from project summary page.
+
If the project list is taken from a file (+$projects_list+ points to a
diff --git a/Documentation/manpage.xsl b/Documentation/manpage.xsl
new file mode 100644
index 0000000000..ef64bab17a
--- /dev/null
+++ b/Documentation/manpage.xsl
@@ -0,0 +1,3 @@
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
+ <xsl:import href="http://docbook.sourceforge.net/release/xsl-ns/current/manpages/docbook.xsl" />
+</xsl:stylesheet>
diff --git a/Documentation/merge-options.txt b/Documentation/merge-options.txt
index 79a00d2a4a..59b8ff1e51 100644
--- a/Documentation/merge-options.txt
+++ b/Documentation/merge-options.txt
@@ -40,20 +40,24 @@ set to `no` at the beginning of them.
case of a merge conflict.
--ff::
- When the merge resolves as a fast-forward, only update the branch
- pointer, without creating a merge commit. This is the default
- behavior.
-
--no-ff::
- Create a merge commit even when the merge resolves as a
- fast-forward. This is the default behaviour when merging an
- annotated (and possibly signed) tag that is not stored in
- its natural place in 'refs/tags/' hierarchy.
-
--ff-only::
- Refuse to merge and exit with a non-zero status unless the
- current `HEAD` is already up to date or the merge can be
- resolved as a fast-forward.
+ Specifies how a merge is handled when the merged-in history is
+ already a descendant of the current history. `--ff` is the
+ default unless merging an annotated (and possibly signed) tag
+ that is not stored in its natural place in the `refs/tags/`
+ hierarchy, in which case `--no-ff` is assumed.
++
+With `--ff`, when possible resolve the merge as a fast-forward (only
+update the branch pointer to match the merged branch; do not create a
+merge commit). When not possible (when the merged-in history is not a
+descendant of the current history), create a merge commit.
++
+With `--no-ff`, create a merge commit in all cases, even when the merge
+could instead be resolved as a fast-forward.
++
+With `--ff-only`, resolve the merge as a fast-forward when possible.
+When not possible, refuse to merge and exit with a non-zero status.
-S[<keyid>]::
--gpg-sign[=<keyid>]::
@@ -105,6 +109,10 @@ option can be used to override --squash.
+
With --squash, --commit is not allowed, and will fail.
+--no-verify::
+ This option bypasses the pre-merge and commit-msg hooks.
+ See also linkgit:githooks[5].
+
-s <strategy>::
--strategy=<strategy>::
Use the given merge strategy; can be supplied more than
diff --git a/Documentation/pretty-formats.txt b/Documentation/pretty-formats.txt
index 079598307a..b87e2e83e6 100644
--- a/Documentation/pretty-formats.txt
+++ b/Documentation/pretty-formats.txt
@@ -208,7 +208,7 @@ endif::git-rev-list[]
'%GP':: show the fingerprint of the primary key whose subkey was used
to sign a signed commit
'%gD':: reflog selector, e.g., `refs/stash@{1}` or `refs/stash@{2
- minutes ago`}; the format follows the rules described for the
+ minutes ago}`; the format follows the rules described for the
`-g` option. The portion before the `@` is the refname as
given on the command line (so `git log -g refs/heads/master`
would yield `refs/heads/master@{0}`).
diff --git a/Documentation/rev-list-options.txt b/Documentation/rev-list-options.txt
index bb1251c036..90ff9e2bea 100644
--- a/Documentation/rev-list-options.txt
+++ b/Documentation/rev-list-options.txt
@@ -756,6 +756,22 @@ explicitly-given commit or tree.
Note that the form '--filter=sparse:path=<path>' that wants to read
from an arbitrary path on the filesystem has been dropped for security
reasons.
++
+Multiple '--filter=' flags can be specified to combine filters. Only
+objects which are accepted by every filter are included.
++
+The form '--filter=combine:<filter1>+<filter2>+...<filterN>' can also be
+used to combined several filters, but this is harder than just repeating
+the '--filter' flag and is usually not necessary. Filters are joined by
+'{plus}' and individual filters are %-encoded (i.e. URL-encoded).
+Besides the '{plus}' and '%' characters, the following characters are
+reserved and also must be encoded: `~!@#$^&*()[]{}\;",<>?`+&#39;&#96;+
+as well as all characters with ASCII code &lt;= `0x20`, which includes
+space and newline.
++
+Other arbitrary characters can also be encoded. For instance,
+'combine:tree:3+blob:none' and 'combine:tree%3A3+blob%3Anone' are
+equivalent.
--no-filter::
Turn off any previous `--filter=` argument.
diff --git a/Documentation/technical/api-directory-listing.txt b/Documentation/technical/api-directory-listing.txt
index 5abb8e8b1f..76b6e4f71b 100644
--- a/Documentation/technical/api-directory-listing.txt
+++ b/Documentation/technical/api-directory-listing.txt
@@ -111,11 +111,11 @@ marked. If you to exclude files, make sure you have loaded index first.
* Prepare `struct dir_struct dir` and clear it with `memset(&dir, 0,
sizeof(dir))`.
-* To add single exclude pattern, call `add_exclude_list()` and then
- `add_exclude()`.
+* To add single exclude pattern, call `add_pattern_list()` and then
+ `add_pattern()`.
* To add patterns from a file (e.g. `.git/info/exclude`), call
- `add_excludes_from_file()` , and/or set `dir.exclude_per_dir`. A
+ `add_patterns_from_file()` , and/or set `dir.exclude_per_dir`. A
short-hand function `setup_standard_excludes()` can be used to set
up the standard set of exclude settings.
diff --git a/Documentation/technical/api-trace2.txt b/Documentation/technical/api-trace2.txt
index 71eb081fed..a045dbe422 100644
--- a/Documentation/technical/api-trace2.txt
+++ b/Documentation/technical/api-trace2.txt
@@ -128,7 +128,7 @@ yields
------------
$ cat ~/log.event
-{"event":"version","sid":"sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.620713Z","file":"common-main.c","line":38,"evt":"1","exe":"2.20.1.155.g426c96fcdb"}
+{"event":"version","sid":"sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.620713Z","file":"common-main.c","line":38,"evt":"2","exe":"2.20.1.155.g426c96fcdb"}
{"event":"start","sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.621027Z","file":"common-main.c","line":39,"t_abs":0.001173,"argv":["git","version"]}
{"event":"cmd_name","sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.621122Z","file":"git.c","line":432,"name":"version","hierarchy":"version"}
{"event":"exit","sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.621236Z","file":"git.c","line":662,"t_abs":0.001227,"code":0}
@@ -142,10 +142,9 @@ system or global config value to one of the following:
include::../trace2-target-values.txt[]
-If the target already exists and is a directory, the traces will be
-written to files (one per process) underneath the given directory. They
-will be named according to the last component of the SID (optionally
-followed by a counter to avoid filename collisions).
+When trace files are written to a target directory, they will be named according
+to the last component of the SID (optionally followed by a counter to avoid
+filename collisions).
== Trace2 API
@@ -605,17 +604,35 @@ only present on the "start" and "atexit" events.
==== Event-Specific Key/Value Pairs
`"version"`::
- This event gives the version of the executable and the EVENT format.
+ This event gives the version of the executable and the EVENT format. It
+ should always be the first event in a trace session. The EVENT format
+ version will be incremented if new event types are added, if existing
+ fields are removed, or if there are significant changes in
+ interpretation of existing events or fields. Smaller changes, such as
+ adding a new field to an existing event, will not require an increment
+ to the EVENT format version.
+
------------
{
"event":"version",
...
- "evt":"1", # EVENT format version
+ "evt":"2", # EVENT format version
"exe":"2.20.1.155.g426c96fcdb" # git version
}
------------
+`"discard"`::
+ This event is written to the git-trace2-discard sentinel file if there
+ are too many files in the target trace directory (see the
+ trace2.maxFiles config option).
++
+------------
+{
+ "event":"discard",
+ ...
+}
+------------
+
`"start"`::
This event contains the complete argv received by main().
+
diff --git a/Documentation/technical/api-tree-walking.txt b/Documentation/technical/api-tree-walking.txt
index bde18622a8..7962e32854 100644
--- a/Documentation/technical/api-tree-walking.txt
+++ b/Documentation/technical/api-tree-walking.txt
@@ -62,9 +62,7 @@ Initializing
`setup_traverse_info`::
Initialize a `traverse_info` given the pathname of the tree to start
- traversing from. The `base` argument is assumed to be the `path`
- member of the `name_entry` being recursed into unless the tree is a
- top-level tree in which case the empty string ("") is used.
+ traversing from.
Walking
-------
@@ -140,6 +138,10 @@ same in the next callback invocation.
This utilizes the memory structure of a tree entry to avoid the
overhead of using a generic strlen().
+`strbuf_make_traverse_path`::
+
+ Convenience wrapper to `make_traverse_path` into a strbuf.
+
Authors
-------
diff --git a/Documentation/technical/partial-clone.txt b/Documentation/technical/partial-clone.txt
index 896c7b3878..210373e258 100644
--- a/Documentation/technical/partial-clone.txt
+++ b/Documentation/technical/partial-clone.txt
@@ -30,12 +30,20 @@ advance* during clone and fetch operations and thereby reduce download
times and disk usage. Missing objects can later be "demand fetched"
if/when needed.
+A remote that can later provide the missing objects is called a
+promisor remote, as it promises to send the objects when
+requested. Initialy Git supported only one promisor remote, the origin
+remote from which the user cloned and that was configured in the
+"extensions.partialClone" config option. Later support for more than
+one promisor remote has been implemented.
+
Use of partial clone requires that the user be online and the origin
-remote be available for on-demand fetching of missing objects. This may
-or may not be problematic for the user. For example, if the user can
-stay within the pre-selected subset of the source tree, they may not
-encounter any missing objects. Alternatively, the user could try to
-pre-fetch various objects if they know that they are going offline.
+remote or other promisor remotes be available for on-demand fetching
+of missing objects. This may or may not be problematic for the user.
+For example, if the user can stay within the pre-selected subset of
+the source tree, they may not encounter any missing objects.
+Alternatively, the user could try to pre-fetch various objects if they
+know that they are going offline.
Non-Goals
@@ -100,18 +108,18 @@ or commits that reference missing trees.
Handling Missing Objects
------------------------
-- An object may be missing due to a partial clone or fetch, or missing due
- to repository corruption. To differentiate these cases, the local
- repository specially indicates such filtered packfiles obtained from the
- promisor remote as "promisor packfiles".
+- An object may be missing due to a partial clone or fetch, or missing
+ due to repository corruption. To differentiate these cases, the
+ local repository specially indicates such filtered packfiles
+ obtained from promisor remotes as "promisor packfiles".
+
These promisor packfiles consist of a "<name>.promisor" file with
arbitrary contents (like the "<name>.keep" files), in addition to
their "<name>.pack" and "<name>.idx" files.
- The local repository considers a "promisor object" to be an object that
- it knows (to the best of its ability) that the promisor remote has promised
- that it has, either because the local repository has that object in one of
+ it knows (to the best of its ability) that promisor remotes have promised
+ that they have, either because the local repository has that object in one of
its promisor packfiles, or because another promisor object refers to it.
+
When Git encounters a missing object, Git can see if it is a promisor object
@@ -123,12 +131,12 @@ expensive-to-modify list of missing objects.[a]
- Since almost all Git code currently expects any referenced object to be
present locally and because we do not want to force every command to do
a dry-run first, a fallback mechanism is added to allow Git to attempt
- to dynamically fetch missing objects from the promisor remote.
+ to dynamically fetch missing objects from promisor remotes.
+
When the normal object lookup fails to find an object, Git invokes
-fetch-object to try to get the object from the server and then retry
-the object lookup. This allows objects to be "faulted in" without
-complicated prediction algorithms.
+promisor_remote_get_direct() to try to get the object from a promisor
+remote and then retry the object lookup. This allows objects to be
+"faulted in" without complicated prediction algorithms.
+
For efficiency reasons, no check as to whether the missing object is
actually a promisor object is performed.
@@ -157,8 +165,7 @@ and prefetch those objects in bulk.
+
We are not happy with this global variable and would like to remove it,
but that requires significant refactoring of the object code to pass an
-additional flag. We hope that concurrent efforts to add an ODB API can
-encompass this.
+additional flag.
Fetching Missing Objects
@@ -182,21 +189,63 @@ has been updated to not use any object flags when the corresponding argument
though they are not necessary.
+Using many promisor remotes
+---------------------------
+
+Many promisor remotes can be configured and used.
+
+This allows for example a user to have multiple geographically-close
+cache servers for fetching missing blobs while continuing to do
+filtered `git-fetch` commands from the central server.
+
+When fetching objects, promisor remotes are tried one after the other
+until all the objects have been fetched.
+
+Remotes that are considered "promisor" remotes are those specified by
+the following configuration variables:
+
+- `extensions.partialClone = <name>`
+
+- `remote.<name>.promisor = true`
+
+- `remote.<name>.partialCloneFilter = ...`
+
+Only one promisor remote can be configured using the
+`extensions.partialClone` config variable. This promisor remote will
+be the last one tried when fetching objects.
+
+We decided to make it the last one we try, because it is likely that
+someone using many promisor remotes is doing so because the other
+promisor remotes are better for some reason (maybe they are closer or
+faster for some kind of objects) than the origin, and the origin is
+likely to be the remote specified by extensions.partialClone.
+
+This justification is not very strong, but one choice had to be made,
+and anyway the long term plan should be to make the order somehow
+fully configurable.
+
+For now though the other promisor remotes will be tried in the order
+they appear in the config file.
+
Current Limitations
-------------------
-- The remote used for a partial clone (or the first partial fetch
- following a regular clone) is marked as the "promisor remote".
+- It is not possible to specify the order in which the promisor
+ remotes are tried in other ways than the order in which they appear
+ in the config file.
+
-We are currently limited to a single promisor remote and only that
-remote may be used for subsequent partial fetches.
+It is also not possible to specify an order to be used when fetching
+from one remote and a different order when fetching from another
+remote.
+
+- It is not possible to push only specific objects to a promisor
+ remote.
+
-We accept this limitation because we believe initial users of this
-feature will be using it on repositories with a strong single central
-server.
+It is not possible to push at the same time to multiple promisor
+remote in a specific order.
-- Dynamic object fetching will only ask the promisor remote for missing
- objects. We assume that the promisor remote has a complete view of the
+- Dynamic object fetching will only ask promisor remotes for missing
+ objects. We assume that promisor remotes have a complete view of the
repository and can satisfy all such requests.
- Repack essentially treats promisor and non-promisor packfiles as 2
@@ -218,15 +267,17 @@ server.
Future Work
-----------
-- Allow more than one promisor remote and define a strategy for fetching
- missing objects from specific promisor remotes or of iterating over the
- set of promisor remotes until a missing object is found.
+- Improve the way to specify the order in which promisor remotes are
+ tried.
+
-A user might want to have multiple geographically-close cache servers
-for fetching missing blobs while continuing to do filtered `git-fetch`
-commands from the central server, for example.
+For example this could allow to specify explicitly something like:
+"When fetching from this remote, I want to use these promisor remotes
+in this order, though, when pushing or fetching to that remote, I want
+to use those promisor remotes in that order."
+
+- Allow pushing to promisor remotes.
+
-Or the user might want to work in a triangular work flow with multiple
+The user might want to work in a triangular work flow with multiple
promisor remotes that each have an incomplete view of the repository.
- Allow repack to work on promisor packfiles (while keeping them distinct
diff --git a/Documentation/trace2-target-values.txt b/Documentation/trace2-target-values.txt
index 27d3c64e66..3985b6d3c2 100644
--- a/Documentation/trace2-target-values.txt
+++ b/Documentation/trace2-target-values.txt
@@ -2,7 +2,9 @@
* `0` or `false` - Disables the target.
* `1` or `true` - Writes to `STDERR`.
* `[2-9]` - Writes to the already opened file descriptor.
-* `<absolute-pathname>` - Writes to the file in append mode.
+* `<absolute-pathname>` - Writes to the file in append mode. If the target
+already exists and is a directory, the traces will be written to files (one
+per process) underneath the given directory.
* `af_unix:[<socket_type>:]<absolute-pathname>` - Write to a
Unix DomainSocket (on platforms that support them). Socket
type can be either `stream` or `dgram`; if omitted Git will
diff --git a/Documentation/user-manual.txt b/Documentation/user-manual.txt
index 8bce75b2cf..06bd8994ee 100644
--- a/Documentation/user-manual.txt
+++ b/Documentation/user-manual.txt
@@ -1,5 +1,4 @@
-Git User Manual
-===============
+= Git User Manual
Git is a fast distributed revision control system.
@@ -41,12 +40,10 @@ complete.
[[repositories-and-branches]]
-Repositories and Branches
-=========================
+== Repositories and Branches
[[how-to-get-a-git-repository]]
-How to get a Git repository
----------------------------
+=== How to get a Git repository
It will be useful to have a Git repository to experiment with as you
read this manual.
@@ -73,8 +70,7 @@ top-level directory named `.git`, which contains all the information
about the history of the project.
[[how-to-check-out]]
-How to check out a different version of a project
--------------------------------------------------
+=== How to check out a different version of a project
Git is best thought of as a tool for storing the history of a collection
of files. It stores the history as a compressed collection of
@@ -151,8 +147,7 @@ with no way to find the history it used to point to; so use this command
carefully.
[[understanding-commits]]
-Understanding History: Commits
-------------------------------
+=== Understanding History: Commits
Every change in the history of a project is represented by a commit.
The linkgit:git-show[1] command shows the most recent commit on the
@@ -202,8 +197,7 @@ history, including file data and directory contents, is stored in an object
with a name that is a hash of its contents.
[[understanding-reachability]]
-Understanding history: commits, parents, and reachability
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Understanding history: commits, parents, and reachability
Every commit (except the very first commit in a project) also has a
parent commit which shows what happened before this commit.
@@ -227,8 +221,7 @@ that Y is a descendant of X, or that there is a chain of parents
leading from commit Y to commit X.
[[history-diagrams]]
-Understanding history: History diagrams
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Understanding history: History diagrams
We will sometimes represent Git history using diagrams like the one
below. Commits are shown as "o", and the links between them with
@@ -247,8 +240,7 @@ If we need to talk about a particular commit, the character "o" may
be replaced with another letter or number.
[[what-is-a-branch]]
-Understanding history: What is a branch?
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Understanding history: What is a branch?
When we need to be precise, we will use the word "branch" to mean a line
of development, and "branch head" (or just "head") to mean a reference
@@ -261,8 +253,7 @@ However, when no confusion will result, we often just use the term
"branch" both for branches and for branch heads.
[[manipulating-branches]]
-Manipulating branches
----------------------
+=== Manipulating branches
Creating, deleting, and modifying branches is quick and easy; here's
a summary of the commands:
@@ -299,8 +290,7 @@ ref: refs/heads/master
------------------------------------------------
[[detached-head]]
-Examining an old version without creating a new branch
-------------------------------------------------------
+=== Examining an old version without creating a new branch
The `git switch` command normally expects a branch head, but will also
accept an arbitrary commit when invoked with --detach; for example,
@@ -340,8 +330,7 @@ make up a name for the new branch. You can still create a new branch
(or tag) for this version later if you decide to.
[[examining-remote-branches]]
-Examining branches from a remote repository
--------------------------------------------
+=== Examining branches from a remote repository
The "master" branch that was created at the time you cloned is a copy
of the HEAD in the repository that you cloned from. That repository
@@ -383,8 +372,7 @@ Note that the name "origin" is just the name that Git uses by default
to refer to the repository that you cloned from.
[[how-git-stores-references]]
-Naming branches, tags, and other references
--------------------------------------------
+=== Naming branches, tags, and other references
Branches, remote-tracking branches, and tags are all references to
commits. All references are named with a slash-separated path name
@@ -413,8 +401,7 @@ references with the same shorthand name, see the "SPECIFYING
REVISIONS" section of linkgit:gitrevisions[7].
[[Updating-a-repository-With-git-fetch]]
-Updating a repository with git fetch
-------------------------------------
+=== Updating a repository with git fetch
After you clone a repository and commit a few changes of your own, you
may wish to check the original repository for updates.
@@ -425,8 +412,7 @@ repository. It will not touch any of your own branches--not even the
"master" branch that was created for you on clone.
[[fetching-branches]]
-Fetching branches from other repositories
------------------------------------------
+=== Fetching branches from other repositories
You can also track branches from repositories other than the one you
cloned from, using linkgit:git-remote[1]:
@@ -474,8 +460,7 @@ text editor. (See the "CONFIGURATION FILE" section of
linkgit:git-config[1] for details.)
[[exploring-git-history]]
-Exploring Git history
-=====================
+== Exploring Git history
Git is best thought of as a tool for storing the history of a
collection of files. It does this by storing compressed snapshots of
@@ -489,8 +474,7 @@ We start with one specialized tool that is useful for finding the
commit that introduced a bug into a project.
[[using-bisect]]
-How to use bisect to find a regression
---------------------------------------
+=== How to use bisect to find a regression
Suppose version 2.6.18 of your project worked, but the version at
"master" crashes. Sometimes the best way to find the cause of such a
@@ -572,8 +556,7 @@ linkgit:git-bisect[1] for more information about this and other `git
bisect` features.
[[naming-commits]]
-Naming commits
---------------
+=== Naming commits
We have seen several ways of naming commits already:
@@ -637,8 +620,7 @@ e05db0fd4f31dde7005f075a84f96b360d05984b
-------------------------------------------------
[[creating-tags]]
-Creating tags
--------------
+=== Creating tags
We can also create a tag to refer to a particular commit; after
running
@@ -655,8 +637,7 @@ should create a tag object instead; see the linkgit:git-tag[1] man page
for details.
[[browsing-revisions]]
-Browsing revisions
-------------------
+=== Browsing revisions
The linkgit:git-log[1] command can show lists of commits. On its
own, it shows all commits reachable from the parent commit; but you
@@ -697,8 +678,7 @@ multiple independent lines of development, the particular order that
commits are listed in may be somewhat arbitrary.
[[generating-diffs]]
-Generating diffs
-----------------
+=== Generating diffs
You can generate diffs between any two versions using
linkgit:git-diff[1]:
@@ -726,8 +706,7 @@ will generate a file with a patch for each commit reachable from test
but not from master.
[[viewing-old-file-versions]]
-Viewing old file versions
--------------------------
+=== Viewing old file versions
You can always view an old version of a file by just checking out the
correct revision first. But sometimes it is more convenient to be
@@ -742,12 +721,10 @@ Before the colon may be anything that names a commit, and after it
may be any path to a file tracked by Git.
[[history-examples]]
-Examples
---------
+=== Examples
[[counting-commits-on-a-branch]]
-Counting the number of commits on a branch
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Counting the number of commits on a branch
Suppose you want to know how many commits you've made on `mybranch`
since it diverged from `origin`:
@@ -765,8 +742,7 @@ $ git rev-list origin..mybranch | wc -l
-------------------------------------------------
[[checking-for-equal-branches]]
-Check whether two branches point at the same history
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Check whether two branches point at the same history
Suppose you want to check whether two branches point at the same point
in history.
@@ -798,8 +774,7 @@ $ git log origin...master
will return no commits when the two branches are equal.
[[finding-tagged-descendants]]
-Find first tagged version including a given fix
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Find first tagged version including a given fix
Suppose you know that the commit e05db0fd fixed a certain problem.
You'd like to find the earliest tagged release that contains that
@@ -883,8 +858,7 @@ shows that e05db0fd is reachable from itself, from v1.5.0-rc1,
and from v1.5.0-rc2, and not from v1.5.0-rc0.
[[showing-commits-unique-to-a-branch]]
-Showing commits unique to a given branch
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Showing commits unique to a given branch
Suppose you would like to see all the commits reachable from the branch
head named `master` but not from any other head in your repository.
@@ -931,8 +905,7 @@ $ gitk $( git show-ref --heads ) --not $( git show-ref --tags )
syntax such as `--not`.)
[[making-a-release]]
-Creating a changelog and tarball for a software release
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Creating a changelog and tarball for a software release
The linkgit:git-archive[1] command can create a tar or zip archive from
any version of a project; for example:
@@ -983,8 +956,7 @@ and then he just cut-and-pastes the output commands after verifying that
they look OK.
[[Finding-commits-With-given-Content]]
-Finding commits referencing a file with given content
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Finding commits referencing a file with given content
Somebody hands you a copy of a file, and asks which commits modified a
file such that it contained the given content either before or after the
@@ -1000,12 +972,10 @@ student. The linkgit:git-log[1], linkgit:git-diff-tree[1], and
linkgit:git-hash-object[1] man pages may prove helpful.
[[Developing-With-git]]
-Developing with Git
-===================
+== Developing with Git
[[telling-git-your-name]]
-Telling Git your name
----------------------
+=== Telling Git your name
Before creating any commits, you should introduce yourself to Git.
The easiest way to do so is to use linkgit:git-config[1]:
@@ -1030,8 +1000,7 @@ also edit it with your favorite editor.
[[creating-a-new-repository]]
-Creating a new repository
--------------------------
+=== Creating a new repository
Creating a new repository from scratch is very easy:
@@ -1052,8 +1021,7 @@ $ git commit
-------------------------------------------------
[[how-to-make-a-commit]]
-How to make a commit
---------------------
+=== How to make a commit
Creating a new commit takes three steps:
@@ -1148,8 +1116,7 @@ for inclusion in the index (by right-clicking on the diff hunk and
choosing "Stage Hunk For Commit").
[[creating-good-commit-messages]]
-Creating good commit messages
------------------------------
+=== Creating good commit messages
Though not required, it's a good idea to begin the commit message
with a single short (less than 50 character) line summarizing the
@@ -1162,8 +1129,7 @@ rest of the commit in the body.
[[ignoring-files]]
-Ignoring files
---------------
+=== Ignoring files
A project will often generate files that you do 'not' want to track with Git.
This typically includes files generated by a build process or temporary
@@ -1205,8 +1171,7 @@ Some Git commands can also take exclude patterns directly on the
command line. See linkgit:gitignore[5] for the details.
[[how-to-merge]]
-How to merge
-------------
+=== How to merge
You can rejoin two diverging branches of development using
linkgit:git-merge[1]:
@@ -1254,8 +1219,7 @@ has two parents, one pointing to the top of the current branch, and
one to the top of the other branch.
[[resolving-a-merge]]
-Resolving a merge
------------------
+=== Resolving a merge
When a merge isn't resolved automatically, Git leaves the index and
the working tree in a special state that gives you all the
@@ -1297,8 +1261,7 @@ The above is all you need to know to resolve a simple merge. But Git
also provides more information to help resolve conflicts:
[[conflict-resolution]]
-Getting conflict-resolution help during a merge
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Getting conflict-resolution help during a merge
All of the changes that Git was able to merge automatically are
already added to the index file, so linkgit:git-diff[1] shows only
@@ -1401,8 +1364,7 @@ the different stages of that file will be "collapsed", after which
`git diff` will (by default) no longer show diffs for that file.
[[undoing-a-merge]]
-Undoing a merge
----------------
+=== Undoing a merge
If you get stuck and decide to just give up and throw the whole mess
away, you can always return to the pre-merge state with
@@ -1423,8 +1385,7 @@ itself have been merged into another branch, as doing so may confuse
further merges.
[[fast-forwards]]
-Fast-forward merges
--------------------
+=== Fast-forward merges
There is one special case not mentioned above, which is treated
differently. Normally, a merge results in a merge commit, with two
@@ -1438,8 +1399,7 @@ to point at the head of the merged-in branch, without any new commits being
created.
[[fixing-mistakes]]
-Fixing mistakes
----------------
+=== Fixing mistakes
If you've messed up the working tree, but haven't yet committed your
mistake, you can return the entire working tree to the last committed
@@ -1463,8 +1423,7 @@ fundamentally different ways to fix the problem:
a branch that has had its history changed.
[[reverting-a-commit]]
-Fixing a mistake with a new commit
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Fixing a mistake with a new commit
Creating a new commit that reverts an earlier change is very easy;
just pass the linkgit:git-revert[1] command a reference to the bad
@@ -1490,8 +1449,7 @@ conflicts manually, just as in the case of <<resolving-a-merge,
resolving a merge>>.
[[fixing-a-mistake-by-rewriting-history]]
-Fixing a mistake by rewriting history
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Fixing a mistake by rewriting history
If the problematic commit is the most recent commit, and you have not
yet made that commit public, then you may just
@@ -1518,8 +1476,7 @@ this is an advanced topic to be left for
<<cleaning-up-history,another chapter>>.
[[checkout-of-path]]
-Checking out an old version of a file
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Checking out an old version of a file
In the process of undoing a previous bad change, you may find it
useful to check out an older version of a particular file using
@@ -1543,8 +1500,7 @@ $ git show HEAD^:path/to/file
which will display the given version of the file.
[[interrupted-work]]
-Temporarily setting aside work in progress
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Temporarily setting aside work in progress
While you are in the middle of working on something complicated, you
find an unrelated but obvious and trivial bug. You would like to fix it
@@ -1575,8 +1531,7 @@ $ git stash pop
[[ensuring-good-performance]]
-Ensuring good performance
--------------------------
+=== Ensuring good performance
On large repositories, Git depends on compression to keep the history
information from taking up too much space on disk or in memory. Some
@@ -1587,12 +1542,10 @@ to avoid automatic compression kicking in when it is not convenient.
[[ensuring-reliability]]
-Ensuring reliability
---------------------
+=== Ensuring reliability
[[checking-for-corruption]]
-Checking the repository for corruption
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Checking the repository for corruption
The linkgit:git-fsck[1] command runs a number of self-consistency checks
on the repository, and reports on any problems. This may take some
@@ -1618,12 +1571,10 @@ You can run `git fsck --no-dangling` to suppress these messages, and still
view real errors.
[[recovering-lost-changes]]
-Recovering lost changes
-~~~~~~~~~~~~~~~~~~~~~~~
+==== Recovering lost changes
[[reflogs]]
-Reflogs
-^^^^^^^
+===== Reflogs
Say you modify a branch with <<fixing-mistakes,`git reset --hard`>>,
and then realize that the branch was the only reference you had to
@@ -1670,8 +1621,7 @@ same project, the reflog history is not shared: it tells you only about
how the branches in your local repository have changed over time.
[[dangling-object-recovery]]
-Examining dangling objects
-^^^^^^^^^^^^^^^^^^^^^^^^^^
+===== Examining dangling objects
In some situations the reflog may not be able to save you. For example,
suppose you delete a branch, then realize you need the history it
@@ -1715,12 +1665,10 @@ dangling objects can arise in other situations.
[[sharing-development]]
-Sharing development with others
-===============================
+== Sharing development with others
[[getting-updates-With-git-pull]]
-Getting updates with git pull
------------------------------
+=== Getting updates with git pull
After you clone a repository and commit a few changes of your own, you
may wish to check the original repository for updates and merge them
@@ -1783,8 +1731,7 @@ $ git merge branch
are roughly equivalent.
[[submitting-patches]]
-Submitting patches to a project
--------------------------------
+=== Submitting patches to a project
If you just have a few changes, the simplest way to submit them may
just be to send them as patches in email:
@@ -1812,8 +1759,7 @@ Consult the mailing list for your project first to determine
their requirements for submitting patches.
[[importing-patches]]
-Importing patches to a project
-------------------------------
+=== Importing patches to a project
Git also provides a tool called linkgit:git-am[1] (am stands for
"apply mailbox"), for importing such an emailed series of patches.
@@ -1845,8 +1791,7 @@ the original mailbox, with authorship and commit log message each
taken from the message containing each patch.
[[public-repositories]]
-Public Git repositories
------------------------
+=== Public Git repositories
Another way to submit changes to a project is to tell the maintainer
of that project to pull the changes from your repository using
@@ -1886,21 +1831,22 @@ pull from that repository. So the flow of changes, in a situation
where there is one other developer with a public repository, looks
like this:
- you push
- your personal repo ------------------> your public repo
- ^ |
- | |
- | you pull | they pull
- | |
- | |
- | they push V
- their public repo <------------------- their repo
+....
+ you push
+your personal repo ------------------> your public repo
+ ^ |
+ | |
+ | you pull | they pull
+ | |
+ | |
+ | they push V
+their public repo <------------------- their repo
+....
We explain how to do this in the following sections.
[[setting-up-a-public-repository]]
-Setting up a public repository
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Setting up a public repository
Assume your personal repository is in the directory `~/proj`. We
first create a new clone of the repository and tell `git daemon` that it
@@ -1920,8 +1866,7 @@ public repository. You can use scp, rsync, or whatever is most
convenient.
[[exporting-via-git]]
-Exporting a Git repository via the Git protocol
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Exporting a Git repository via the Git protocol
This is the preferred method.
@@ -1942,8 +1887,7 @@ linkgit:git-daemon[1] man page for details. (See especially the
examples section.)
[[exporting-via-http]]
-Exporting a git repository via HTTP
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Exporting a git repository via HTTP
The Git protocol gives better performance and reliability, but on a
host with a web server set up, HTTP exports may be simpler to set up.
@@ -1975,8 +1919,7 @@ for a slightly more sophisticated setup using WebDAV which also
allows pushing over HTTP.)
[[pushing-changes-to-a-public-repository]]
-Pushing changes to a public repository
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Pushing changes to a public repository
Note that the two techniques outlined above (exporting via
<<exporting-via-http,http>> or <<exporting-via-git,git>>) allow other
@@ -2035,8 +1978,7 @@ See the explanations of the `remote.<name>.url`,
linkgit:git-config[1] for details.
[[forcing-push]]
-What to do when a push fails
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== What to do when a push fails
If a push would not result in a <<fast-forwards,fast-forward>> of the
remote branch, then it will fail with an error like:
@@ -2090,8 +2032,7 @@ pull, or by a fetch followed by a rebase; see the
linkgit:gitcvs-migration[7] for more.
[[setting-up-a-shared-repository]]
-Setting up a shared repository
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Setting up a shared repository
Another way to collaborate is by using a model similar to that
commonly used in CVS, where several developers with special rights
@@ -2121,8 +2062,7 @@ advantages over the central shared repository:
"out".
[[setting-up-gitweb]]
-Allowing web browsing of a repository
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Allowing web browsing of a repository
The gitweb cgi script provides users an easy way to browse your
project's revisions, file contents and logs without having to install
@@ -2138,8 +2078,7 @@ linkgit:gitweb[1] for instructions on details setting up a permanent
installation with a CGI or Perl capable server.
[[how-to-get-a-git-repository-with-minimal-history]]
-How to get a Git repository with minimal history
-------------------------------------------------
+=== How to get a Git repository with minimal history
A <<def_shallow_clone,shallow clone>>, with its truncated
history, is useful when one is interested only in recent history
@@ -2158,12 +2097,10 @@ have to result in huge conflicts. This limitation may make such
a repository unsuitable to be used in merge based workflows.
[[sharing-development-examples]]
-Examples
---------
+=== Examples
[[maintaining-topic-branches]]
-Maintaining topic branches for a Linux subsystem maintainer
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Maintaining topic branches for a Linux subsystem maintainer
This describes how Tony Luck uses Git in his role as maintainer of the
IA64 architecture for the Linux kernel.
@@ -2459,8 +2396,7 @@ done
[[cleaning-up-history]]
-Rewriting history and maintaining patch series
-==============================================
+== Rewriting history and maintaining patch series
Normally commits are only added to a project, never taken away or
replaced. Git is designed with this assumption, and violating it will
@@ -2470,8 +2406,7 @@ However, there is a situation in which it can be useful to violate this
assumption.
[[patch-series]]
-Creating the perfect patch series
----------------------------------
+=== Creating the perfect patch series
Suppose you are a contributor to a large project, and you want to add a
complicated feature, and to present it to the other developers in a way
@@ -2503,8 +2438,7 @@ use them, and then explain some of the problems that can arise because
you are rewriting history.
[[using-git-rebase]]
-Keeping a patch series up to date using git rebase
---------------------------------------------------
+=== Keeping a patch series up to date using git rebase
Suppose that you create a branch `mywork` on a remote-tracking branch
`origin`, and create some commits on top of it:
@@ -2591,8 +2525,7 @@ the rebase. See <<interactive-rebase>> for details, and
<<reordering-patch-series>> for alternatives.
[[rewriting-one-commit]]
-Rewriting a single commit
--------------------------
+=== Rewriting a single commit
We saw in <<fixing-a-mistake-by-rewriting-history>> that you can replace the
most recent commit using
@@ -2610,8 +2543,7 @@ If you need to amend commits from deeper in your history, you can
use <<interactive-rebase,interactive rebase's `edit` instruction>>.
[[reordering-patch-series]]
-Reordering or selecting from a patch series
--------------------------------------------
+=== Reordering or selecting from a patch series
Sometimes you want to edit a commit deeper in your history. One
approach is to use `git format-patch` to create a series of patches
@@ -2630,8 +2562,7 @@ $ git am *.patch
-------------------------------------------------
[[interactive-rebase]]
-Using interactive rebases
--------------------------
+=== Using interactive rebases
You can also edit a patch series with an interactive rebase. This is
the same as <<reordering-patch-series,reordering a patch series using
@@ -2688,16 +2619,14 @@ For a more detailed discussion of the procedure and additional tips,
see the "INTERACTIVE MODE" section of linkgit:git-rebase[1].
[[patch-series-tools]]
-Other tools
------------
+=== Other tools
There are numerous other tools, such as StGit, which exist for the
purpose of maintaining a patch series. These are outside of the scope of
this manual.
[[problems-With-rewriting-history]]
-Problems with rewriting history
--------------------------------
+=== Problems with rewriting history
The primary problem with rewriting the history of a branch has to do
with merging. Suppose somebody fetches your branch and merges it into
@@ -2745,8 +2674,7 @@ For true distributed development that supports proper merging,
published branches should never be rewritten.
[[bisect-merges]]
-Why bisecting merge commits can be harder than bisecting linear history
------------------------------------------------------------------------
+=== Why bisecting merge commits can be harder than bisecting linear history
The linkgit:git-bisect[1] command correctly handles history that
includes merge commits. However, when the commit that it finds is a
@@ -2811,12 +2739,10 @@ linear by rebasing against the latest upstream version before
publishing.
[[advanced-branch-management]]
-Advanced branch management
-==========================
+== Advanced branch management
[[fetching-individual-branches]]
-Fetching individual branches
-----------------------------
+=== Fetching individual branches
Instead of using linkgit:git-remote[1], you can also choose just
to update one branch at a time, and to store it locally under an
@@ -2844,8 +2770,7 @@ already have a branch named example-master, it will attempt to
master branch. In more detail:
[[fetch-fast-forwards]]
-git fetch and fast-forwards
----------------------------
+=== git fetch and fast-forwards
In the previous example, when updating an existing branch, `git fetch`
checks to make sure that the most recent commit on the remote
@@ -2882,8 +2807,7 @@ unless you've already created a reference of your own pointing to
them.
[[forcing-fetch]]
-Forcing git fetch to do non-fast-forward updates
-------------------------------------------------
+=== Forcing git fetch to do non-fast-forward updates
If git fetch fails because the new head of a branch is not a
descendant of the old head, you may force the update with:
@@ -2903,8 +2827,7 @@ Be aware that commits that the old version of example/master pointed at
may be lost, as we saw in the previous section.
[[remote-branch-configuration]]
-Configuring remote-tracking branches
-------------------------------------
+=== Configuring remote-tracking branches
We saw above that `origin` is just a shortcut to refer to the
repository that you originally cloned from. This information is
@@ -2955,8 +2878,7 @@ the refspec syntax.
[[git-concepts]]
-Git concepts
-============
+== Git concepts
Git is built on a small number of simple but powerful ideas. While it
is possible to get things done without understanding them, you will find
@@ -2966,8 +2888,7 @@ We start with the most important, the <<def_object_database,object
database>> and the <<def_index,index>>.
[[the-object-database]]
-The Object Database
--------------------
+=== The Object Database
We already saw in <<understanding-commits>> that all commits are stored
@@ -3011,8 +2932,7 @@ There are four different types of objects: "blob", "tree", "commit", and
The object types in some more detail:
[[commit-object]]
-Commit Object
-~~~~~~~~~~~~~
+==== Commit Object
The "commit" object links a physical state of a tree with a description
of how we got there and why. Use the `--pretty=raw` option to
@@ -3064,8 +2984,7 @@ commit whose parent is normally the current HEAD, and whose tree is
taken from the content currently stored in the index.
[[tree-object]]
-Tree Object
-~~~~~~~~~~~
+==== Tree Object
The ever-versatile linkgit:git-show[1] command can also be used to
examine tree objects, but linkgit:git-ls-tree[1] will give you more
@@ -3104,8 +3023,7 @@ Note that the files all have mode 644 or 755: Git actually only pays
attention to the executable bit.
[[blob-object]]
-Blob Object
-~~~~~~~~~~~
+==== Blob Object
You can use linkgit:git-show[1] to examine the contents of a blob; take,
for example, the blob in the entry for `COPYING` from the tree above:
@@ -3134,8 +3052,7 @@ sometimes be useful for browsing the contents of a tree that is not
currently checked out.
[[trust]]
-Trust
-~~~~~
+==== Trust
If you receive the SHA-1 name of a blob from one source, and its contents
from another (possibly untrusted) source, you can still trust that those
@@ -3164,8 +3081,7 @@ like GPG/PGP.
To assist in this, Git also provides the tag object...
[[tag-object]]
-Tag Object
-~~~~~~~~~~
+==== Tag Object
A tag object contains an object, object type, tag name, the name of the
person ("tagger") who created the tag, and a message, which may contain
@@ -3194,8 +3110,7 @@ objects. (Note that linkgit:git-tag[1] can also be used to create
references whose names begin with `refs/tags/`).
[[pack-files]]
-How Git stores objects efficiently: pack files
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== How Git stores objects efficiently: pack files
Newly created objects are initially created in a file named after the
object's SHA-1 hash (stored in `.git/objects`).
@@ -3253,8 +3168,7 @@ The linkgit:git-gc[1] command performs packing, pruning, and more for
you, so is normally the only high-level command you need.
[[dangling-objects]]
-Dangling objects
-~~~~~~~~~~~~~~~~
+==== Dangling objects
The linkgit:git-fsck[1] command will sometimes complain about dangling
objects. They are not a problem.
@@ -3334,8 +3248,7 @@ don't want to do that while the filesystem is mounted.
accesses to a repository but you might receive confusing or scary messages.)
[[recovering-from-repository-corruption]]
-Recovering from repository corruption
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== Recovering from repository corruption
By design, Git treats data trusted to it with caution. However, even in
the absence of bugs in Git itself, it is still possible that hardware or
@@ -3452,8 +3365,7 @@ whole thing. It's up to you--Git does *have* a lot of information, it is
just missing one particular blob version.
[[the-index]]
-The index
----------
+=== The index
The index is a binary file (generally kept in `.git/index`) containing a
sorted list of path names, each with permissions and the SHA-1 of a blob
@@ -3511,8 +3423,7 @@ If you blow the index away entirely, you generally haven't lost any
information as long as you have the name of the tree that it described.
[[submodules]]
-Submodules
-==========
+== Submodules
Large projects are often composed of smaller, self-contained modules. For
example, an embedded Linux distribution's source tree would include every
@@ -3698,8 +3609,8 @@ $ git push
You have to run `git submodule update` after `git pull` if you want to update
submodules, too.
-Pitfalls with submodules
-------------------------
+[[pitfalls-with-submodules]]
+=== Pitfalls with submodules
Always publish the submodule change before publishing the change to the
superproject that references it. If you forget to publish the submodule change,
@@ -3768,8 +3679,7 @@ submodule update` will not overwrite them. Instead, you get the usual
warning about not being able switch from a dirty branch.
[[low-level-operations]]
-Low-level Git operations
-========================
+== Low-level Git operations
Many of the higher-level commands were originally implemented as shell
scripts using a smaller core of low-level Git commands. These can still
@@ -3777,8 +3687,7 @@ be useful when doing unusual things with Git, or just as a way to
understand its inner workings.
[[object-manipulation]]
-Object access and manipulation
-------------------------------
+=== Object access and manipulation
The linkgit:git-cat-file[1] command can show the contents of any object,
though the higher-level linkgit:git-show[1] is usually more useful.
@@ -3795,8 +3704,7 @@ verified by linkgit:git-verify-tag[1], though it is normally simpler to
use linkgit:git-tag[1] for both.
[[the-workflow]]
-The Workflow
-------------
+=== The Workflow
High-level operations such as linkgit:git-commit[1] and
linkgit:git-restore[1] work by moving data
@@ -3811,8 +3719,7 @@ the database or the working directory. Thus there are four main
combinations:
[[working-directory-to-index]]
-working directory -> index
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== working directory -> index
The linkgit:git-update-index[1] command updates the index with
information from the working directory. You generally update the
@@ -3848,8 +3755,7 @@ The previously introduced linkgit:git-add[1] is just a wrapper for
linkgit:git-update-index[1].
[[index-to-object-database]]
-index -> object database
-~~~~~~~~~~~~~~~~~~~~~~~~
+==== index -> object database
You write your current index file to a "tree" object with the program
@@ -3864,8 +3770,7 @@ use that tree to re-generate the index at any time by going in the
other direction:
[[object-database-to-index]]
-object database -> index
-~~~~~~~~~~~~~~~~~~~~~~~~
+==== object database -> index
You read a "tree" file from the object database, and use that to
populate (and overwrite--don't do this if your index contains any
@@ -3881,8 +3786,7 @@ earlier. However, that is only your 'index' file: your working
directory contents have not been modified.
[[index-to-working-directory]]
-index -> working directory
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+==== index -> working directory
You update your working directory from the index by "checking out"
files. This is not a very common operation, since normally you'd just
@@ -3911,8 +3815,7 @@ Finally, there are a few odds and ends which are not purely moving
from one representation to the other:
[[tying-it-all-together]]
-Tying it all together
-~~~~~~~~~~~~~~~~~~~~~
+==== Tying it all together
To commit a tree you have instantiated with `git write-tree`, you'd
create a "commit" object that refers to that tree and the history
@@ -3986,8 +3889,7 @@ Here is a picture that illustrates how various pieces fit together:
[[examining-the-data]]
-Examining the data
-------------------
+=== Examining the data
You can examine the data represented in the object database and the
index with various helper tools. For every object, you can use
@@ -4022,8 +3924,7 @@ $ git cat-file commit HEAD
to see what the top commit was.
[[merging-multiple-trees]]
-Merging multiple trees
-----------------------
+=== Merging multiple trees
Git can help you perform a three-way merge, which can in turn be
used for a many-way merge by repeating the merge procedure several
@@ -4073,8 +3974,7 @@ index file, and you can just write the result out with
[[merging-multiple-trees-2]]
-Merging multiple trees, continued
----------------------------------
+=== Merging multiple trees, continued
Sadly, many merges aren't trivial. If there are files that have
been added, moved or removed, or if both branches have modified the
@@ -4144,15 +4044,13 @@ $ git merge-index git-merge-one-file hello.c
and that is what higher level `git merge -s resolve` is implemented with.
[[hacking-git]]
-Hacking Git
-===========
+== Hacking Git
This chapter covers internal details of the Git implementation which
probably only Git developers need to understand.
[[object-details]]
-Object storage format
----------------------
+=== Object storage format
All objects have a statically determined "type" which identifies the
format of the object (i.e. how it is used, and how it can refer to other
@@ -4182,8 +4080,7 @@ of all objects, and verifies their internal consistency (in addition
to just verifying their superficial consistency through the hash).
[[birdview-on-the-source-code]]
-A birds-eye view of Git's source code
--------------------------------------
+=== A birds-eye view of Git's source code
It is not always easy for new developers to find their way through Git's
source code. This section gives you a little guidance to show where to
@@ -4392,25 +4289,22 @@ You see, Git is actually the best tool to find out about the source of Git
itself!
[[glossary]]
-Git Glossary
-============
+== Git Glossary
[[git-explained]]
-Git explained
--------------
+=== Git explained
include::glossary-content.txt[]
[[git-quick-start]]
-Appendix A: Git Quick Reference
-===============================
+[appendix]
+== Git Quick Reference
This is a quick summary of the major commands; the previous chapters
explain how these work in more detail.
[[quick-creating-a-new-repository]]
-Creating a new repository
--------------------------
+=== Creating a new repository
From a tarball:
@@ -4431,8 +4325,7 @@ $ cd project
-----------------------------------------------
[[managing-branches]]
-Managing branches
------------------
+=== Managing branches
-----------------------------------------------
$ git branch # list all local branches in this repo
@@ -4496,8 +4389,7 @@ $ git branch -r # list all remote branches
[[exploring-history]]
-Exploring history
------------------
+=== Exploring history
-----------------------------------------------
$ gitk # visualize and browse history
@@ -4532,8 +4424,7 @@ $ git bisect bad # if this revision is bad.
-----------------------------------------------
[[making-changes]]
-Making changes
---------------
+=== Making changes
Make sure Git knows who to blame:
@@ -4563,8 +4454,7 @@ $ git commit -a # use latest content of all tracked files
-----------------------------------------------
[[merging]]
-Merging
--------
+=== Merging
-----------------------------------------------
$ git merge test # merge branch "test" into the current branch
@@ -4574,8 +4464,7 @@ $ git pull . test # equivalent to git merge test
-----------------------------------------------
[[sharing-your-changes]]
-Sharing your changes
---------------------
+=== Sharing your changes
Importing or exporting patches:
@@ -4620,8 +4509,7 @@ $ git push example test
-----------------------------------------------
[[repository-maintenance]]
-Repository maintenance
-----------------------
+=== Repository maintenance
Check for corruption:
@@ -4637,12 +4525,11 @@ $ git gc
[[todo]]
-Appendix B: Notes and todo list for this manual
-===============================================
+[appendix]
+== Notes and todo list for this manual
[[todo-list]]
-Todo list
----------
+=== Todo list
This is a work in progress.
diff --git a/GIT-VERSION-GEN b/GIT-VERSION-GEN
index a1539a7ce6..98f88a28d3 100755
--- a/GIT-VERSION-GEN
+++ b/GIT-VERSION-GEN
@@ -1,7 +1,7 @@
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v2.23.0
+DEF_VER=v2.23.GIT
LF='
'
diff --git a/Makefile b/Makefile
index f9255344ae..de60c8e7aa 100644
--- a/Makefile
+++ b/Makefile
@@ -34,13 +34,8 @@ all::
# library. Support for version 1 will likely be removed in some future
# release of Git, as upstream has all but abandoned it.
#
-# When using USE_LIBPCRE1, define NO_LIBPCRE1_JIT if the PCRE v1
-# library is compiled without --enable-jit. We will auto-detect
-# whether the version of the PCRE v1 library in use has JIT support at
-# all, but we unfortunately can't auto-detect whether JIT support
-# hasn't been compiled in in an otherwise JIT-supporting version. If
-# you have link-time errors about a missing `pcre_jit_exec` define
-# this, or recompile PCRE v1 with --enable-jit.
+# When using USE_LIBPCRE1, define NO_LIBPCRE1_JIT if you want to
+# disable JIT even if supported by your library.
#
# Define LIBPCREDIR=/foo/bar if your PCRE header and library files are
# in /foo/bar/include and /foo/bar/lib directories. Which version of
@@ -598,6 +593,7 @@ SCRIPT_SH =
SCRIPT_LIB =
TEST_BUILTINS_OBJS =
TEST_PROGRAMS_NEED_X =
+THIRD_PARTY_SOURCES =
# Having this variable in your environment would break pipelines because
# you cause "cd" to echo its destination to stdout. It can also take
@@ -728,6 +724,7 @@ TEST_BUILTINS_OBJS += test-parse-options.o
TEST_BUILTINS_OBJS += test-path-utils.o
TEST_BUILTINS_OBJS += test-pkt-line.o
TEST_BUILTINS_OBJS += test-prio-queue.o
+TEST_BUILTINS_OBJS += test-progress.o
TEST_BUILTINS_OBJS += test-reach.o
TEST_BUILTINS_OBJS += test-read-cache.o
TEST_BUILTINS_OBJS += test-read-midx.o
@@ -818,12 +815,12 @@ VCSSVN_LIB = vcs-svn/lib.a
GENERATED_H += command-list.h
-LIB_H := $(sort $(shell git ls-files '*.h' ':!t/' ':!Documentation/' 2>/dev/null || \
+LIB_H := $(sort $(patsubst ./%,%,$(shell git ls-files '*.h' ':!t/' ':!Documentation/' 2>/dev/null || \
$(FIND) . \
-name .git -prune -o \
-name t -prune -o \
-name Documentation -prune -o \
- -name '*.h' -print))
+ -name '*.h' -print)))
LIB_OBJS += abspath.o
LIB_OBJS += advice.o
@@ -884,7 +881,6 @@ LIB_OBJS += ewah/ewah_io.o
LIB_OBJS += ewah/ewah_rlw.o
LIB_OBJS += exec-cmd.o
LIB_OBJS += fetch-negotiator.o
-LIB_OBJS += fetch-object.o
LIB_OBJS += fetch-pack.o
LIB_OBJS += fsck.o
LIB_OBJS += fsmonitor.o
@@ -948,6 +944,7 @@ LIB_OBJS += preload-index.o
LIB_OBJS += pretty.o
LIB_OBJS += prio-queue.o
LIB_OBJS += progress.o
+LIB_OBJS += promisor-remote.o
LIB_OBJS += prompt.o
LIB_OBJS += protocol.o
LIB_OBJS += quote.o
@@ -965,6 +962,7 @@ LIB_OBJS += refspec.o
LIB_OBJS += ref-filter.o
LIB_OBJS += remote.o
LIB_OBJS += replace-object.o
+LIB_OBJS += repo-settings.o
LIB_OBJS += repository.o
LIB_OBJS += rerere.o
LIB_OBJS += resolve-undo.o
@@ -983,6 +981,7 @@ LIB_OBJS += shallow.o
LIB_OBJS += sideband.o
LIB_OBJS += sigchain.o
LIB_OBJS += split-index.o
+LIB_OBJS += stable-qsort.o
LIB_OBJS += strbuf.o
LIB_OBJS += streaming.o
LIB_OBJS += string-list.o
@@ -1145,6 +1144,20 @@ BUILTIN_OBJS += builtin/verify-tag.o
BUILTIN_OBJS += builtin/worktree.o
BUILTIN_OBJS += builtin/write-tree.o
+# THIRD_PARTY_SOURCES is a list of patterns compatible with the
+# $(filter) and $(filter-out) family of functions. They specify source
+# files which are taken from some third-party source where we want to be
+# less strict about issues such as coding style so we don't diverge from
+# upstream unnecessarily (making merging in future changes easier).
+THIRD_PARTY_SOURCES += compat/inet_ntop.c
+THIRD_PARTY_SOURCES += compat/inet_pton.c
+THIRD_PARTY_SOURCES += compat/nedmalloc/%
+THIRD_PARTY_SOURCES += compat/obstack.%
+THIRD_PARTY_SOURCES += compat/poll/%
+THIRD_PARTY_SOURCES += compat/regex/%
+THIRD_PARTY_SOURCES += sha1collisiondetection/%
+THIRD_PARTY_SOURCES += sha1dc/%
+
GITLIBS = common-main.o $(LIB_FILE) $(XDIFF_LIB)
EXTLIBS =
@@ -1714,7 +1727,6 @@ ifdef NO_GETPAGESIZE
endif
ifdef INTERNAL_QSORT
COMPAT_CFLAGS += -DINTERNAL_QSORT
- COMPAT_OBJS += compat/qsort.o
endif
ifdef HAVE_ISO_QSORT_S
COMPAT_CFLAGS += -DHAVE_ISO_QSORT_S
@@ -1871,7 +1883,7 @@ ifndef V
QUIET_MSGFMT = @echo ' ' MSGFMT $@;
QUIET_GCOV = @echo ' ' GCOV $@;
QUIET_SP = @echo ' ' SP $<;
- QUIET_HDR = @echo ' ' HDR $<;
+ QUIET_HDR = @echo ' ' HDR $(<:hcc=h);
QUIET_RC = @echo ' ' RC $@;
QUIET_SUBDIR0 = +@subdir=
QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo ' ' SUBDIR $$subdir; \
@@ -2599,6 +2611,7 @@ FIND_SOURCE_FILES = ( \
-o \( -name 'trash*' -type d -prune \) \
-o \( -name '*.[hcS]' -type f -print \) \
-o \( -name '*.sh' -type f -print \) \
+ | sed -e 's|^\./||' \
)
$(ETAGS_TARGET): FORCE
@@ -2768,11 +2781,16 @@ EXCEPT_HDRS := $(GEN_HDRS) compat/% xdiff/%
ifndef GCRYPT_SHA256
EXCEPT_HDRS += sha256/gcrypt.h
endif
-CHK_HDRS = $(filter-out $(EXCEPT_HDRS),$(patsubst ./%,%,$(LIB_H)))
+CHK_HDRS = $(filter-out $(EXCEPT_HDRS),$(LIB_H))
HCO = $(patsubst %.h,%.hco,$(CHK_HDRS))
+HCC = $(HCO:hco=hcc)
+
+%.hcc: %.h
+ @echo '#include "git-compat-util.h"' >$@
+ @echo '#include "$<"' >>$@
-$(HCO): %.hco: %.h FORCE
- $(QUIET_HDR)$(CC) -include git-compat-util.h -I. -o /dev/null -c -xc $<
+$(HCO): %.hco: %.hcc FORCE
+ $(QUIET_HDR)$(CC) $(ALL_CFLAGS) -o /dev/null -c -xc $<
.PHONY: hdr-check $(HCO)
hdr-check: $(HCO)
@@ -2791,12 +2809,8 @@ check: command-list.h
exit 1; \
fi
-C_SOURCES = $(patsubst %.o,%.c,$(C_OBJ))
-ifdef DC_SHA1_SUBMODULE
-COCCI_SOURCES = $(filter-out sha1collisiondetection/%,$(C_SOURCES))
-else
-COCCI_SOURCES = $(filter-out sha1dc/%,$(C_SOURCES))
-endif
+FOUND_C_SOURCES = $(filter %.c,$(shell $(FIND_SOURCE_FILES)))
+COCCI_SOURCES = $(filter-out $(THIRD_PARTY_SOURCES),$(FOUND_C_SOURCES))
%.cocci.patch: %.cocci $(COCCI_SOURCES)
@echo ' ' SPATCH $<; \
@@ -3028,6 +3042,10 @@ rpm::
@false
.PHONY: rpm
+ifneq ($(INCLUDE_DLLS_IN_ARTIFACTS),)
+OTHER_PROGRAMS += $(shell echo *.dll t/helper/*.dll)
+endif
+
artifacts-tar:: $(ALL_PROGRAMS) $(SCRIPT_LIB) $(BUILT_INS) $(OTHER_PROGRAMS) \
GIT-BUILD-OPTIONS $(TEST_PROGRAMS) $(test_bindir_programs) \
$(MOFILES)
@@ -3081,6 +3099,7 @@ clean: profile-clean coverage-clean cocciclean
$(RM) $(ALL_PROGRAMS) $(SCRIPT_LIB) $(BUILT_INS) git$X
$(RM) $(TEST_PROGRAMS)
$(RM) $(FUZZ_PROGRAMS)
+ $(RM) $(HCC)
$(RM) -r bin-wrappers $(dep_dirs)
$(RM) -r po/build/
$(RM) *.pyc *.pyo */*.pyc */*.pyo command-list.h $(ETAGS_TARGET) tags cscope*
diff --git a/RelNotes b/RelNotes
index 248d137c43..fc657e7d2f 120000
--- a/RelNotes
+++ b/RelNotes
@@ -1 +1 @@
-Documentation/RelNotes/2.23.0.txt \ No newline at end of file
+Documentation/RelNotes/2.24.0.txt \ No newline at end of file
diff --git a/apply.c b/apply.c
index cde95369bb..f8a046a6a5 100644
--- a/apply.c
+++ b/apply.c
@@ -1361,11 +1361,32 @@ int parse_git_diff_header(struct strbuf *root,
if (check_header_line(*linenr, patch))
return -1;
if (res > 0)
- return offset;
+ goto done;
break;
}
}
+done:
+ if (!patch->old_name && !patch->new_name) {
+ if (!patch->def_name) {
+ error(Q_("git diff header lacks filename information when removing "
+ "%d leading pathname component (line %d)",
+ "git diff header lacks filename information when removing "
+ "%d leading pathname components (line %d)",
+ parse_hdr_state.p_value),
+ parse_hdr_state.p_value, *linenr);
+ return -128;
+ }
+ patch->old_name = xstrdup(patch->def_name);
+ patch->new_name = xstrdup(patch->def_name);
+ }
+ if ((!patch->new_name && !patch->is_delete) ||
+ (!patch->old_name && !patch->is_new)) {
+ error(_("git diff header lacks filename information "
+ "(line %d)"), *linenr);
+ return -128;
+ }
+ patch->is_toplevel_relative = 1;
return offset;
}
@@ -1546,26 +1567,6 @@ static int find_header(struct apply_state *state,
return -128;
if (git_hdr_len <= len)
continue;
- if (!patch->old_name && !patch->new_name) {
- if (!patch->def_name) {
- error(Q_("git diff header lacks filename information when removing "
- "%d leading pathname component (line %d)",
- "git diff header lacks filename information when removing "
- "%d leading pathname components (line %d)",
- state->p_value),
- state->p_value, state->linenr);
- return -128;
- }
- patch->old_name = xstrdup(patch->def_name);
- patch->new_name = xstrdup(patch->def_name);
- }
- if ((!patch->new_name && !patch->is_delete) ||
- (!patch->old_name && !patch->is_new)) {
- error(_("git diff header lacks filename information "
- "(line %d)"), state->linenr);
- return -128;
- }
- patch->is_toplevel_relative = 1;
*hdrsize = git_hdr_len;
return offset;
}
@@ -4643,6 +4644,7 @@ static int apply_patch(struct apply_state *state,
struct patch *list = NULL, **listp = &list;
int skipped_patch = 0;
int res = 0;
+ int flush_attributes = 0;
state->patch_input_file = filename;
if (read_patch_file(&buf, fd) < 0)
@@ -4670,6 +4672,14 @@ static int apply_patch(struct apply_state *state,
patch_stats(state, patch);
*listp = patch;
listp = &patch->next;
+
+ if ((patch->new_name &&
+ ends_with_path_components(patch->new_name,
+ GITATTRIBUTES_FILE)) ||
+ (patch->old_name &&
+ ends_with_path_components(patch->old_name,
+ GITATTRIBUTES_FILE)))
+ flush_attributes = 1;
}
else {
if (state->apply_verbosity > verbosity_normal)
@@ -4746,6 +4756,8 @@ static int apply_patch(struct apply_state *state,
if (state->summary && state->apply_verbosity > verbosity_silent)
summary_patch_list(list);
+ if (flush_attributes)
+ reset_parsed_attributes();
end:
free_patch_list(list);
strbuf_release(&buf);
diff --git a/apply.h b/apply.h
index a795193435..da3d95fa50 100644
--- a/apply.h
+++ b/apply.h
@@ -1,6 +1,7 @@
#ifndef APPLY_H
#define APPLY_H
+#include "hash.h"
#include "lockfile.h"
#include "string-list.h"
diff --git a/archive-tar.c b/archive-tar.c
index 3e53aac1e6..e16d3f756d 100644
--- a/archive-tar.c
+++ b/archive-tar.c
@@ -142,19 +142,25 @@ static int stream_blocked(const struct object_id *oid)
* string and appends it to a struct strbuf.
*/
static void strbuf_append_ext_header(struct strbuf *sb, const char *keyword,
- const char *value, unsigned int valuelen)
+ const char *value, size_t valuelen)
{
- int len, tmp;
+ size_t orig_len = sb->len;
+ size_t len, tmp;
/* "%u %s=%s\n" */
len = 1 + 1 + strlen(keyword) + 1 + valuelen + 1;
- for (tmp = len; tmp > 9; tmp /= 10)
+ for (tmp = 1; len / 10 >= tmp; tmp *= 10)
len++;
strbuf_grow(sb, len);
- strbuf_addf(sb, "%u %s=", len, keyword);
+ strbuf_addf(sb, "%"PRIuMAX" %s=", (uintmax_t)len, keyword);
strbuf_add(sb, value, valuelen);
strbuf_addch(sb, '\n');
+
+ if (len != sb->len - orig_len)
+ BUG("pax extended header length miscalculated as %"PRIuMAX
+ ", should be %"PRIuMAX,
+ (uintmax_t)len, (uintmax_t)(sb->len - orig_len));
}
/*
diff --git a/attr.c b/attr.c
index 93dc16b59c..11f19b541c 100644
--- a/attr.c
+++ b/attr.c
@@ -62,7 +62,7 @@ static struct attr_hashmap g_attr_hashmap;
/* The container for objects stored in "struct attr_hashmap" */
struct attr_hash_entry {
- struct hashmap_entry ent; /* must be the first member! */
+ struct hashmap_entry ent;
const char *key; /* the key; memory should be owned by value */
size_t keylen; /* length of the key */
void *value; /* the stored value */
@@ -70,12 +70,14 @@ struct attr_hash_entry {
/* attr_hashmap comparison function */
static int attr_hash_entry_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
- const struct attr_hash_entry *a = entry;
- const struct attr_hash_entry *b = entry_or_key;
+ const struct attr_hash_entry *a, *b;
+
+ a = container_of(eptr, const struct attr_hash_entry, ent);
+ b = container_of(entry_or_key, const struct attr_hash_entry, ent);
return (a->keylen != b->keylen) || strncmp(a->key, b->key, a->keylen);
}
@@ -98,10 +100,10 @@ static void *attr_hashmap_get(struct attr_hashmap *map,
if (!map->map.tablesize)
attr_hashmap_init(map);
- hashmap_entry_init(&k, memhash(key, keylen));
+ hashmap_entry_init(&k.ent, memhash(key, keylen));
k.key = key;
k.keylen = keylen;
- e = hashmap_get(&map->map, &k, NULL);
+ e = hashmap_get_entry(&map->map, &k, ent, NULL);
return e ? e->value : NULL;
}
@@ -117,12 +119,12 @@ static void attr_hashmap_add(struct attr_hashmap *map,
attr_hashmap_init(map);
e = xmalloc(sizeof(struct attr_hash_entry));
- hashmap_entry_init(e, memhash(key, keylen));
+ hashmap_entry_init(&e->ent, memhash(key, keylen));
e->key = key;
e->keylen = keylen;
e->value = value;
- hashmap_add(&map->map, e);
+ hashmap_add(&map->map, &e->ent);
}
struct all_attrs_item {
@@ -161,12 +163,12 @@ static void all_attrs_init(struct attr_hashmap *map, struct attr_check *check)
if (size != check->all_attrs_nr) {
struct attr_hash_entry *e;
struct hashmap_iter iter;
- hashmap_iter_init(&map->map, &iter);
REALLOC_ARRAY(check->all_attrs, size);
check->all_attrs_nr = size;
- while ((e = hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(&map->map, &iter, e,
+ ent /* member name */) {
const struct git_attr *a = e->value;
check->all_attrs[a->attr_nr].attr = a;
}
@@ -259,7 +261,7 @@ struct pattern {
const char *pattern;
int patternlen;
int nowildcardlen;
- unsigned flags; /* EXC_FLAG_* */
+ unsigned flags; /* PATTERN_FLAG_* */
};
/*
@@ -400,11 +402,11 @@ static struct match_attr *parse_attr_line(const char *line, const char *src,
char *p = (char *)&(res->state[num_attr]);
memcpy(p, name, namelen);
res->u.pat.pattern = p;
- parse_exclude_pattern(&res->u.pat.pattern,
+ parse_path_pattern(&res->u.pat.pattern,
&res->u.pat.patternlen,
&res->u.pat.flags,
&res->u.pat.nowildcardlen);
- if (res->u.pat.flags & EXC_FLAG_NEGATIVE) {
+ if (res->u.pat.flags & PATTERN_FLAG_NEGATIVE) {
warning(_("Negative patterns are ignored in git attributes\n"
"Use '\\!' for literal leading exclamation."));
goto fail_return;
@@ -991,10 +993,10 @@ static int path_matches(const char *pathname, int pathlen,
int prefix = pat->nowildcardlen;
int isdir = (pathlen && pathname[pathlen - 1] == '/');
- if ((pat->flags & EXC_FLAG_MUSTBEDIR) && !isdir)
+ if ((pat->flags & PATTERN_FLAG_MUSTBEDIR) && !isdir)
return 0;
- if (pat->flags & EXC_FLAG_NODIR) {
+ if (pat->flags & PATTERN_FLAG_NODIR) {
return match_basename(pathname + basename_offset,
pathlen - basename_offset - isdir,
pattern, prefix,
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index c329b7218b..9f099b9529 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -1,6 +1,5 @@
-resources:
-- repo: self
- fetchDepth: 1
+variables:
+ Agent.Source.Git.ShallowFetchDepth: 1
jobs:
- job: windows_build
@@ -131,6 +130,165 @@ jobs:
PathtoPublish: t/failed-test-artifacts
ArtifactName: failed-test-artifacts
+- job: vs_build
+ displayName: Visual Studio Build
+ condition: succeeded()
+ pool: Hosted VS2017
+ timeoutInMinutes: 240
+ steps:
+ - powershell: |
+ if ("$GITFILESHAREPWD" -ne "" -and "$GITFILESHAREPWD" -ne "`$`(gitfileshare.pwd)") {
+ net use s: \\gitfileshare.file.core.windows.net\test-cache "$GITFILESHAREPWD" /user:AZURE\gitfileshare /persistent:no
+ cmd /c mklink /d "$(Build.SourcesDirectory)\test-cache" S:\
+ }
+ displayName: 'Mount test-cache'
+ env:
+ GITFILESHAREPWD: $(gitfileshare.pwd)
+ - powershell: |
+ $urlbase = "https://dev.azure.com/git-for-windows/git/_apis/build/builds"
+ $id = ((Invoke-WebRequest -UseBasicParsing "${urlbase}?definitions=22&statusFilter=completed&resultFilter=succeeded&`$top=1").content | ConvertFrom-JSON).value[0].id
+ $downloadUrl = ((Invoke-WebRequest -UseBasicParsing "${urlbase}/$id/artifacts").content | ConvertFrom-JSON).value[1].resource.downloadUrl
+ (New-Object Net.WebClient).DownloadFile($downloadUrl,"git-sdk-64-minimal.zip")
+ Expand-Archive git-sdk-64-minimal.zip -DestinationPath . -Force
+ Remove-Item git-sdk-64-minimal.zip
+
+ # Let Git ignore the SDK and the test-cache
+ "/git-sdk-64-minimal/`n/test-cache/`n" | Out-File -NoNewLine -Encoding ascii -Append "$(Build.SourcesDirectory)\.git\info\exclude"
+ displayName: 'Download git-sdk-64-minimal'
+ - powershell: |
+ & git-sdk-64-minimal\usr\bin\bash.exe -lc @"
+ make vcxproj
+ "@
+ if (!$?) { exit(1) }
+ displayName: Generate Visual Studio Solution
+ env:
+ HOME: $(Build.SourcesDirectory)
+ MSYSTEM: MINGW64
+ DEVELOPER: 1
+ NO_PERL: 1
+ GIT_CONFIG_PARAMETERS: "'user.name=CI' 'user.email=ci@git'"
+ - powershell: |
+ $urlbase = "https://dev.azure.com/git/git/_apis/build/builds"
+ $id = ((Invoke-WebRequest -UseBasicParsing "${urlbase}?definitions=9&statusFilter=completed&resultFilter=succeeded&`$top=1").content | ConvertFrom-JSON).value[0].id
+ $downloadUrl = ((Invoke-WebRequest -UseBasicParsing "${urlbase}/$id/artifacts").content | ConvertFrom-JSON).value[0].resource.downloadUrl
+ (New-Object Net.WebClient).DownloadFile($downloadUrl, "compat.zip")
+ Expand-Archive compat.zip -DestinationPath . -Force
+ Remove-Item compat.zip
+ displayName: 'Download vcpkg artifacts'
+ - task: MSBuild@1
+ inputs:
+ solution: git.sln
+ platform: x64
+ configuration: Release
+ maximumCpuCount: 4
+ - powershell: |
+ & compat\vcbuild\vcpkg_copy_dlls.bat release
+ if (!$?) { exit(1) }
+ & git-sdk-64-minimal\usr\bin\bash.exe -lc @"
+ mkdir -p artifacts &&
+ eval \"`$(make -n artifacts-tar INCLUDE_DLLS_IN_ARTIFACTS=YesPlease ARTIFACTS_DIRECTORY=artifacts | grep ^tar)\"
+ "@
+ if (!$?) { exit(1) }
+ displayName: Bundle artifact tar
+ env:
+ HOME: $(Build.SourcesDirectory)
+ MSYSTEM: MINGW64
+ DEVELOPER: 1
+ NO_PERL: 1
+ MSVC: 1
+ VCPKG_ROOT: $(Build.SourcesDirectory)\compat\vcbuild\vcpkg
+ - powershell: |
+ $tag = (Invoke-WebRequest -UseBasicParsing "https://gitforwindows.org/latest-tag.txt").content
+ $version = (Invoke-WebRequest -UseBasicParsing "https://gitforwindows.org/latest-version.txt").content
+ $url = "https://github.com/git-for-windows/git/releases/download/${tag}/PortableGit-${version}-64-bit.7z.exe"
+ (New-Object Net.WebClient).DownloadFile($url,"PortableGit.exe")
+ & .\PortableGit.exe -y -oartifacts\PortableGit
+ # Wait until it is unpacked
+ while (-not @(Remove-Item -ErrorAction SilentlyContinue PortableGit.exe; $?)) { sleep 1 }
+ displayName: Download & extract portable Git
+ - task: PublishPipelineArtifact@0
+ displayName: 'Publish Pipeline Artifact: MSVC test artifacts'
+ inputs:
+ artifactName: 'vs-artifacts'
+ targetPath: '$(Build.SourcesDirectory)\artifacts'
+ - powershell: |
+ if ("$GITFILESHAREPWD" -ne "" -and "$GITFILESHAREPWD" -ne "`$`(gitfileshare.pwd)") {
+ cmd /c rmdir "$(Build.SourcesDirectory)\test-cache"
+ }
+ displayName: 'Unmount test-cache'
+ condition: true
+ env:
+ GITFILESHAREPWD: $(gitfileshare.pwd)
+
+- job: vs_test
+ displayName: Visual Studio Test
+ dependsOn: vs_build
+ condition: succeeded()
+ pool: Hosted
+ timeoutInMinutes: 240
+ strategy:
+ parallel: 10
+ steps:
+ - powershell: |
+ if ("$GITFILESHAREPWD" -ne "" -and "$GITFILESHAREPWD" -ne "`$`(gitfileshare.pwd)") {
+ net use s: \\gitfileshare.file.core.windows.net\test-cache "$GITFILESHAREPWD" /user:AZURE\gitfileshare /persistent:no
+ cmd /c mklink /d "$(Build.SourcesDirectory)\test-cache" S:\
+ }
+ displayName: 'Mount test-cache'
+ env:
+ GITFILESHAREPWD: $(gitfileshare.pwd)
+ - task: DownloadPipelineArtifact@0
+ displayName: 'Download Pipeline Artifact: VS test artifacts'
+ inputs:
+ artifactName: 'vs-artifacts'
+ targetPath: '$(Build.SourcesDirectory)'
+ - powershell: |
+ & PortableGit\git-cmd.exe --command=usr\bin\bash.exe -lc @"
+ test -f artifacts.tar.gz || {
+ echo No test artifacts found\; skipping >&2
+ exit 0
+ }
+ tar xf artifacts.tar.gz || exit 1
+
+ # Let Git ignore the SDK and the test-cache
+ printf '%s\n' /PortableGit/ /test-cache/ >>.git/info/exclude
+
+ cd t &&
+ PATH=\"`$PWD/helper:`$PATH\" &&
+ test-tool.exe run-command testsuite -V -x --write-junit-xml \
+ `$(test-tool.exe path-utils slice-tests \
+ `$SYSTEM_JOBPOSITIONINPHASE `$SYSTEM_TOTALJOBSINPHASE t[0-9]*.sh)
+ "@
+ if (!$?) { exit(1) }
+ displayName: 'Test (parallel)'
+ env:
+ HOME: $(Build.SourcesDirectory)
+ MSYSTEM: MINGW64
+ NO_SVN_TESTS: 1
+ GIT_TEST_SKIP_REBASE_P: 1
+ - powershell: |
+ if ("$GITFILESHAREPWD" -ne "" -and "$GITFILESHAREPWD" -ne "`$`(gitfileshare.pwd)") {
+ cmd /c rmdir "$(Build.SourcesDirectory)\test-cache"
+ }
+ displayName: 'Unmount test-cache'
+ condition: true
+ env:
+ GITFILESHAREPWD: $(gitfileshare.pwd)
+ - task: PublishTestResults@2
+ displayName: 'Publish Test Results **/TEST-*.xml'
+ inputs:
+ mergeTestResults: true
+ testRunTitle: 'vs'
+ platform: Windows
+ publishRunAttachments: false
+ condition: succeededOrFailed()
+ - task: PublishBuildArtifacts@1
+ displayName: 'Publish trash directories of failed tests'
+ condition: failed()
+ inputs:
+ PathtoPublish: t/failed-test-artifacts
+ ArtifactName: failed-vs-test-artifacts
+
- job: linux_clang
displayName: linux-clang
condition: succeeded()
@@ -354,7 +512,7 @@ jobs:
test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1
sudo apt-get update &&
- sudo apt-get install -y coccinelle &&
+ sudo apt-get install -y coccinelle libcurl4-openssl-dev libssl-dev libexpat-dev gettext &&
export jobname=StaticAnalysis &&
@@ -374,7 +532,7 @@ jobs:
test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1
sudo apt-get update &&
- sudo apt-get install -y asciidoc xmlto asciidoctor &&
+ sudo apt-get install -y asciidoc xmlto asciidoctor docbook-xsl-ns &&
export ALREADY_HAVE_ASCIIDOCTOR=yes. &&
export jobname=Documentation &&
diff --git a/banned.h b/banned.h
index 447af24807..60a18d4403 100644
--- a/banned.h
+++ b/banned.h
@@ -26,7 +26,7 @@
#define vsprintf(...) BANNED(vsprintf)
#else
#define sprintf(buf,fmt,arg) BANNED(sprintf)
-#define vsprintf(buf,fmt,arg) BANNED(sprintf)
+#define vsprintf(buf,fmt,arg) BANNED(vsprintf)
#endif
#endif /* BANNED_H */
diff --git a/bisect.c b/bisect.c
index e87ac29a51..e81c91d02c 100644
--- a/bisect.c
+++ b/bisect.c
@@ -707,7 +707,7 @@ static int bisect_checkout(const struct object_id *bisect_rev, int no_checkout)
{
char bisect_rev_hex[GIT_MAX_HEXSZ + 1];
- memcpy(bisect_rev_hex, oid_to_hex(bisect_rev), GIT_SHA1_HEXSZ + 1);
+ memcpy(bisect_rev_hex, oid_to_hex(bisect_rev), the_hash_algo->hexsz + 1);
update_ref(NULL, "BISECT_EXPECTED_REV", bisect_rev, NULL, 0, UPDATE_REFS_DIE_ON_ERR);
argv_checkout[2] = bisect_rev_hex;
diff --git a/blame.c b/blame.c
index 36a2e7ef11..29770e5c81 100644
--- a/blame.c
+++ b/blame.c
@@ -144,7 +144,7 @@ static void append_merge_parents(struct repository *r,
while (!strbuf_getwholeline_fd(&line, merge_head, '\n')) {
struct object_id oid;
- if (line.len < GIT_SHA1_HEXSZ || get_oid_hex(line.buf, &oid))
+ if (get_oid_hex(line.buf, &oid))
die("unknown line in '%s': %s",
git_path_merge_head(r), line.buf);
tail = append_parent(r, tail, &oid);
@@ -417,14 +417,15 @@ static void get_fingerprint(struct fingerprint *result,
/* Ignore whitespace pairs */
if (hash == 0)
continue;
- hashmap_entry_init(entry, hash);
+ hashmap_entry_init(&entry->entry, hash);
- found_entry = hashmap_get(&result->map, entry, NULL);
+ found_entry = hashmap_get_entry(&result->map, entry,
+ /* member name */ entry, NULL);
if (found_entry) {
found_entry->count += 1;
} else {
entry->count = 1;
- hashmap_add(&result->map, entry);
+ hashmap_add(&result->map, &entry->entry);
++entry;
}
}
@@ -432,7 +433,7 @@ static void get_fingerprint(struct fingerprint *result,
static void free_fingerprint(struct fingerprint *f)
{
- hashmap_free(&f->map, 0);
+ hashmap_free(&f->map);
free(f->entries);
}
@@ -449,10 +450,10 @@ static int fingerprint_similarity(struct fingerprint *a, struct fingerprint *b)
struct hashmap_iter iter;
const struct fingerprint_entry *entry_a, *entry_b;
- hashmap_iter_init(&b->map, &iter);
-
- while ((entry_b = hashmap_iter_next(&iter))) {
- if ((entry_a = hashmap_get(&a->map, entry_b, NULL))) {
+ hashmap_for_each_entry(&b->map, &iter, entry_b,
+ entry /* member name */) {
+ entry_a = hashmap_get_entry(&a->map, entry_b, entry, NULL);
+ if (entry_a) {
intersection += entry_a->count < entry_b->count ?
entry_a->count : entry_b->count;
}
@@ -470,10 +471,12 @@ static void fingerprint_subtract(struct fingerprint *a, struct fingerprint *b)
hashmap_iter_init(&b->map, &iter);
- while ((entry_b = hashmap_iter_next(&iter))) {
- if ((entry_a = hashmap_get(&a->map, entry_b, NULL))) {
+ hashmap_for_each_entry(&b->map, &iter, entry_b,
+ entry /* member name */) {
+ entry_a = hashmap_get_entry(&a->map, entry_b, entry, NULL);
+ if (entry_a) {
if (entry_a->count <= entry_b->count)
- hashmap_remove(&a->map, entry_b, NULL);
+ hashmap_remove(&a->map, &entry_b->entry, NULL);
else
entry_a->count -= entry_b->count;
}
diff --git a/builtin/am.c b/builtin/am.c
index 1aea657a7f..8181c2aef3 100644
--- a/builtin/am.c
+++ b/builtin/am.c
@@ -24,7 +24,6 @@
#include "sequencer.h"
#include "revision.h"
#include "merge-recursive.h"
-#include "revision.h"
#include "log-tree.h"
#include "notes-utils.h"
#include "rerere.h"
@@ -1072,19 +1071,6 @@ static const char *msgnum(const struct am_state *state)
}
/**
- * Refresh and write index.
- */
-static void refresh_and_write_cache(void)
-{
- struct lock_file lock_file = LOCK_INIT;
-
- hold_locked_index(&lock_file, LOCK_DIE_ON_ERROR);
- refresh_cache(REFRESH_QUIET);
- if (write_locked_index(&the_index, &lock_file, COMMIT_LOCK))
- die(_("unable to write index file"));
-}
-
-/**
* Dies with a user-friendly message on how to proceed after resolving the
* problem. This message can be overridden with state->resolvemsg.
*/
@@ -1272,7 +1258,9 @@ static void get_commit_info(struct am_state *state, struct commit *commit)
buffer = logmsg_reencode(commit, NULL, get_commit_output_encoding());
ident_line = find_commit_header(buffer, "author", &ident_len);
-
+ if (!ident_line)
+ die(_("missing author line in commit %s"),
+ oid_to_hex(&commit->object.oid));
if (split_ident_line(&id, ident_line, ident_len) < 0)
die(_("invalid ident line: %.*s"), (int)ident_len, ident_line);
@@ -1538,7 +1526,7 @@ static int fall_back_threeway(const struct am_state *state, const char *index_pa
o.branch1 = "HEAD";
their_tree_name = xstrfmt("%.*s", linelen(state->msg), state->msg);
o.branch2 = their_tree_name;
- o.detect_directory_renames = 0;
+ o.detect_directory_renames = MERGE_DIRECTORY_RENAMES_NONE;
if (state->quiet)
o.verbosity = 0;
@@ -1703,7 +1691,8 @@ static void am_run(struct am_state *state, int resume)
unlink(am_path(state, "dirtyindex"));
- refresh_and_write_cache();
+ if (refresh_and_write_cache(REFRESH_QUIET, 0, 0) < 0)
+ die(_("unable to write index file"));
if (repo_index_has_changes(the_repository, NULL, &sb)) {
write_state_bool(state, "dirtyindex", 1);
diff --git a/builtin/blame.c b/builtin/blame.c
index b6534d4dea..e946ba6cd9 100644
--- a/builtin/blame.c
+++ b/builtin/blame.c
@@ -26,7 +26,6 @@
#include "progress.h"
#include "object-store.h"
#include "blame.h"
-#include "string-list.h"
#include "refs.h"
static char blame_usage[] = N_("git blame [<options>] [<rev-opts>] [<rev>] [--] <file>");
@@ -460,7 +459,7 @@ static void emit_other(struct blame_scoreboard *sb, struct blame_entry *ent, int
for (cnt = 0; cnt < ent->num_lines; cnt++) {
char ch;
- int length = (opt & OUTPUT_LONG_OBJECT_NAME) ? GIT_SHA1_HEXSZ : abbrev;
+ int length = (opt & OUTPUT_LONG_OBJECT_NAME) ? the_hash_algo->hexsz : abbrev;
if (opt & OUTPUT_COLOR_LINE) {
if (cnt > 0) {
@@ -885,6 +884,7 @@ int cmd_blame(int argc, const char **argv, const char *prefix)
struct range_set ranges;
unsigned int range_i;
long anchor;
+ const int hexsz = the_hash_algo->hexsz;
setup_default_color_by_age();
git_config(git_blame_config, &output_option);
@@ -931,11 +931,11 @@ parse_done:
} else if (show_progress < 0)
show_progress = isatty(2);
- if (0 < abbrev && abbrev < GIT_SHA1_HEXSZ)
+ if (0 < abbrev && abbrev < hexsz)
/* one more abbrev length is needed for the boundary commit */
abbrev++;
else if (!abbrev)
- abbrev = GIT_SHA1_HEXSZ;
+ abbrev = hexsz;
if (revs_file && read_ancestry(revs_file))
die_errno("reading graft file '%s' failed", revs_file);
diff --git a/builtin/cat-file.c b/builtin/cat-file.c
index 995d47c85a..d6a1aa74cd 100644
--- a/builtin/cat-file.c
+++ b/builtin/cat-file.c
@@ -15,6 +15,7 @@
#include "sha1-array.h"
#include "packfile.h"
#include "object-store.h"
+#include "promisor-remote.h"
struct batch_options {
int enabled;
@@ -524,8 +525,8 @@ static int batch_objects(struct batch_options *opt)
if (opt->all_objects) {
struct object_cb_data cb;
- if (repository_format_partial_clone)
- warning("This repository has extensions.partialClone set. Some objects may not be loaded.");
+ if (has_promisor_remote())
+ warning("This repository uses promisor remotes. Some objects may not be loaded.");
cb.opt = opt;
cb.expand = &data;
diff --git a/builtin/check-ignore.c b/builtin/check-ignore.c
index 599097304b..5a4f92395f 100644
--- a/builtin/check-ignore.c
+++ b/builtin/check-ignore.c
@@ -32,19 +32,19 @@ static const struct option check_ignore_options[] = {
OPT_END()
};
-static void output_exclude(const char *path, struct exclude *exclude)
+static void output_pattern(const char *path, struct path_pattern *pattern)
{
- char *bang = (exclude && exclude->flags & EXC_FLAG_NEGATIVE) ? "!" : "";
- char *slash = (exclude && exclude->flags & EXC_FLAG_MUSTBEDIR) ? "/" : "";
+ char *bang = (pattern && pattern->flags & PATTERN_FLAG_NEGATIVE) ? "!" : "";
+ char *slash = (pattern && pattern->flags & PATTERN_FLAG_MUSTBEDIR) ? "/" : "";
if (!nul_term_line) {
if (!verbose) {
write_name_quoted(path, stdout, '\n');
} else {
- if (exclude) {
- quote_c_style(exclude->el->src, NULL, stdout, 0);
+ if (pattern) {
+ quote_c_style(pattern->pl->src, NULL, stdout, 0);
printf(":%d:%s%s%s\t",
- exclude->srcpos,
- bang, exclude->pattern, slash);
+ pattern->srcpos,
+ bang, pattern->pattern, slash);
}
else {
printf("::\t");
@@ -56,11 +56,11 @@ static void output_exclude(const char *path, struct exclude *exclude)
if (!verbose) {
printf("%s%c", path, '\0');
} else {
- if (exclude)
+ if (pattern)
printf("%s%c%d%c%s%s%s%c%s%c",
- exclude->el->src, '\0',
- exclude->srcpos, '\0',
- bang, exclude->pattern, slash, '\0',
+ pattern->pl->src, '\0',
+ pattern->srcpos, '\0',
+ bang, pattern->pattern, slash, '\0',
path, '\0');
else
printf("%c%c%c%s%c", '\0', '\0', '\0', path, '\0');
@@ -74,7 +74,7 @@ static int check_ignore(struct dir_struct *dir,
const char *full_path;
char *seen;
int num_ignored = 0, i;
- struct exclude *exclude;
+ struct path_pattern *pattern;
struct pathspec pathspec;
if (!argc) {
@@ -103,15 +103,15 @@ static int check_ignore(struct dir_struct *dir,
seen = find_pathspecs_matching_against_index(&pathspec, &the_index);
for (i = 0; i < pathspec.nr; i++) {
full_path = pathspec.items[i].match;
- exclude = NULL;
+ pattern = NULL;
if (!seen[i]) {
int dtype = DT_UNKNOWN;
- exclude = last_exclude_matching(dir, &the_index,
+ pattern = last_matching_pattern(dir, &the_index,
full_path, &dtype);
}
- if (!quiet && (exclude || show_non_matching))
- output_exclude(pathspec.items[i].original, exclude);
- if (exclude)
+ if (!quiet && (pattern || show_non_matching))
+ output_pattern(pathspec.items[i].original, pattern);
+ if (pattern)
num_ignored++;
}
free(seen);
diff --git a/builtin/checkout.c b/builtin/checkout.c
index 6123f732a2..3634a3dac1 100644
--- a/builtin/checkout.c
+++ b/builtin/checkout.c
@@ -126,6 +126,7 @@ static int update_some(const struct object_id *oid, struct strbuf *base,
if (pos >= 0) {
struct cache_entry *old = active_cache[pos];
if (ce->ce_mode == old->ce_mode &&
+ !ce_intent_to_add(old) &&
oideq(&ce->oid, &old->oid)) {
old->ce_flags |= CE_UPDATE;
discard_cache_entry(ce);
@@ -708,11 +709,11 @@ static int merge_working_tree(const struct checkout_opts *opts,
* give up or do a real merge, depending on
* whether the merge flag was used.
*/
- struct tree *result;
struct tree *work;
struct tree *old_tree;
struct merge_options o;
struct strbuf sb = STRBUF_INIT;
+ struct strbuf old_commit_shortname = STRBUF_INIT;
if (!opts->merge)
return 1;
@@ -730,13 +731,6 @@ static int merge_working_tree(const struct checkout_opts *opts,
"the following files:\n%s"), sb.buf);
strbuf_release(&sb);
- if (repo_index_has_changes(the_repository,
- get_commit_tree(old_branch_info->commit),
- &sb))
- warning(_("staged changes in the following files may be lost: %s"),
- sb.buf);
- strbuf_release(&sb);
-
/* Do more real merge */
/*
@@ -760,7 +754,7 @@ static int merge_working_tree(const struct checkout_opts *opts,
*/
init_merge_options(&o, the_repository);
o.verbosity = 0;
- work = write_tree_from_memory(&o);
+ work = write_in_core_index_as_tree(the_repository);
ret = reset_tree(new_tree,
opts, 1,
@@ -768,19 +762,25 @@ static int merge_working_tree(const struct checkout_opts *opts,
if (ret)
return ret;
o.ancestor = old_branch_info->name;
+ if (old_branch_info->name == NULL) {
+ strbuf_add_unique_abbrev(&old_commit_shortname,
+ &old_branch_info->commit->object.oid,
+ DEFAULT_ABBREV);
+ o.ancestor = old_commit_shortname.buf;
+ }
o.branch1 = new_branch_info->name;
o.branch2 = "local";
ret = merge_trees(&o,
new_tree,
work,
- old_tree,
- &result);
+ old_tree);
if (ret < 0)
exit(128);
ret = reset_tree(new_tree,
opts, 0,
writeout_error);
strbuf_release(&o.obuf);
+ strbuf_release(&old_commit_shortname);
if (ret)
return ret;
}
@@ -1714,6 +1714,15 @@ int cmd_checkout(int argc, const char **argv, const char *prefix)
opts.checkout_index = -2; /* default on */
opts.checkout_worktree = -2; /* default on */
+ if (argc == 3 && !strcmp(argv[1], "-b")) {
+ /*
+ * User ran 'git checkout -b <branch>' and expects
+ * the same behavior as 'git switch -c <branch>'.
+ */
+ opts.switch_branch_doing_nothing_is_ok = 0;
+ opts.only_merge_on_switching_branches = 1;
+ }
+
options = parse_options_dup(checkout_options);
options = add_common_options(&opts, options);
options = add_common_switch_branch_options(&opts, options);
diff --git a/builtin/clean.c b/builtin/clean.c
index d5579da716..5abf087e7c 100644
--- a/builtin/clean.c
+++ b/builtin/clean.c
@@ -158,7 +158,8 @@ static int remove_dirs(struct strbuf *path, const char *prefix, int force_flag,
*dir_gone = 1;
- if ((force_flag & REMOVE_DIR_KEEP_NESTED_GIT) && is_nonbare_repository_dir(path)) {
+ if ((force_flag & REMOVE_DIR_KEEP_NESTED_GIT) &&
+ is_nonbare_repository_dir(path)) {
if (!quiet) {
quote_path_relative(path->buf, prefix, &quoted);
printf(dry_run ? _(msg_would_skip_git_dir) : _(msg_skip_git_dir),
@@ -648,7 +649,7 @@ static int filter_by_patterns_cmd(void)
struct strbuf confirm = STRBUF_INIT;
struct strbuf **ignore_list;
struct string_list_item *item;
- struct exclude_list *el;
+ struct pattern_list *pl;
int changed = -1, i;
for (;;) {
@@ -671,7 +672,7 @@ static int filter_by_patterns_cmd(void)
break;
memset(&dir, 0, sizeof(dir));
- el = add_exclude_list(&dir, EXC_CMDL, "manual exclude");
+ pl = add_pattern_list(&dir, EXC_CMDL, "manual exclude");
ignore_list = strbuf_split_max(&confirm, ' ', 0);
for (i = 0; ignore_list[i]; i++) {
@@ -679,7 +680,7 @@ static int filter_by_patterns_cmd(void)
if (!ignore_list[i]->len)
continue;
- add_exclude(ignore_list[i]->buf, "", 0, el, -(i+1));
+ add_pattern(ignore_list[i]->buf, "", 0, pl, -(i+1));
}
changed = 0;
@@ -901,7 +902,7 @@ int cmd_clean(int argc, const char **argv, const char *prefix)
struct pathspec pathspec;
struct strbuf buf = STRBUF_INIT;
struct string_list exclude_list = STRING_LIST_INIT_NODUP;
- struct exclude_list *el;
+ struct pattern_list *pl;
struct string_list_item *item;
const char *qname;
struct option options[] = {
@@ -946,9 +947,19 @@ int cmd_clean(int argc, const char **argv, const char *prefix)
if (force > 1)
rm_flags = 0;
+ else
+ dir.flags |= DIR_SKIP_NESTED_GIT;
dir.flags |= DIR_SHOW_OTHER_DIRECTORIES;
+ if (argc) {
+ /*
+ * Remaining args implies pathspecs specified, and we should
+ * recurse within those.
+ */
+ remove_directories = 1;
+ }
+
if (remove_directories)
dir.flags |= DIR_SHOW_IGNORED_TOO | DIR_KEEP_UNTRACKED_CONTENTS;
@@ -958,9 +969,9 @@ int cmd_clean(int argc, const char **argv, const char *prefix)
if (!ignored)
setup_standard_excludes(&dir);
- el = add_exclude_list(&dir, EXC_CMDL, "--exclude option");
+ pl = add_pattern_list(&dir, EXC_CMDL, "--exclude option");
for (i = 0; i < exclude_list.nr; i++)
- add_exclude(exclude_list.items[i].string, "", 0, el, -(i+1));
+ add_pattern(exclude_list.items[i].string, "", 0, pl, -(i+1));
parse_pathspec(&pathspec, 0,
PATHSPEC_PREFER_CWD,
@@ -1007,6 +1018,7 @@ int cmd_clean(int argc, const char **argv, const char *prefix)
for_each_string_list_item(item, &del_list) {
struct stat st;
+ strbuf_reset(&abs_path);
if (prefix)
strbuf_addstr(&abs_path, prefix);
@@ -1040,7 +1052,6 @@ int cmd_clean(int argc, const char **argv, const char *prefix)
printf(dry_run ? _(msg_would_remove) : _(msg_remove), qname);
}
}
- strbuf_reset(&abs_path);
}
strbuf_release(&abs_path);
diff --git a/builtin/clone.c b/builtin/clone.c
index f665b28ccc..c46ee29f0a 100644
--- a/builtin/clone.c
+++ b/builtin/clone.c
@@ -32,7 +32,6 @@
#include "connected.h"
#include "packfile.h"
#include "list-objects-filter-options.h"
-#include "object-store.h"
/*
* Overall FIXMEs:
@@ -785,7 +784,7 @@ static int checkout(int submodule_progress)
if (write_locked_index(&the_index, &lock_file, COMMIT_LOCK))
die(_("unable to write new index file"));
- err |= run_hook_le(NULL, "post-checkout", sha1_to_hex(null_sha1),
+ err |= run_hook_le(NULL, "post-checkout", oid_to_hex(&null_oid),
oid_to_hex(&oid), "1", NULL);
if (!err && (option_recurse_submodules.nr > 0)) {
@@ -1160,13 +1159,11 @@ int cmd_clone(int argc, const char **argv, const char *prefix)
transport->server_options = &server_options;
if (filter_options.choice) {
- struct strbuf expanded_filter_spec = STRBUF_INIT;
- expand_list_objects_filter_spec(&filter_options,
- &expanded_filter_spec);
+ const char *spec =
+ expand_list_objects_filter_spec(&filter_options);
transport_set_option(transport, TRANS_OPT_LIST_OBJECTS_FILTER,
- expanded_filter_spec.buf);
+ spec);
transport_set_option(transport, TRANS_OPT_FROM_PROMISOR, "1");
- strbuf_release(&expanded_filter_spec);
}
if (transport->smart_options && !deepen && !filter_options.choice)
diff --git a/builtin/commit-graph.c b/builtin/commit-graph.c
index 38027b83d9..addc8d4cc0 100644
--- a/builtin/commit-graph.c
+++ b/builtin/commit-graph.c
@@ -10,13 +10,13 @@
static char const * const builtin_commit_graph_usage[] = {
N_("git commit-graph [--object-dir <objdir>]"),
N_("git commit-graph read [--object-dir <objdir>]"),
- N_("git commit-graph verify [--object-dir <objdir>] [--shallow]"),
- N_("git commit-graph write [--object-dir <objdir>] [--append|--split] [--reachable|--stdin-packs|--stdin-commits] <split options>"),
+ N_("git commit-graph verify [--object-dir <objdir>] [--shallow] [--[no-]progress]"),
+ N_("git commit-graph write [--object-dir <objdir>] [--append|--split] [--reachable|--stdin-packs|--stdin-commits] [--[no-]progress] <split options>"),
NULL
};
static const char * const builtin_commit_graph_verify_usage[] = {
- N_("git commit-graph verify [--object-dir <objdir>] [--shallow]"),
+ N_("git commit-graph verify [--object-dir <objdir>] [--shallow] [--[no-]progress]"),
NULL
};
@@ -26,7 +26,7 @@ static const char * const builtin_commit_graph_read_usage[] = {
};
static const char * const builtin_commit_graph_write_usage[] = {
- N_("git commit-graph write [--object-dir <objdir>] [--append|--split] [--reachable|--stdin-packs|--stdin-commits] <split options>"),
+ N_("git commit-graph write [--object-dir <objdir>] [--append|--split] [--reachable|--stdin-packs|--stdin-commits] [--[no-]progress] <split options>"),
NULL
};
@@ -38,6 +38,7 @@ static struct opts_commit_graph {
int append;
int split;
int shallow;
+ int progress;
} opts;
static int graph_verify(int argc, const char **argv)
@@ -55,9 +56,13 @@ static int graph_verify(int argc, const char **argv)
N_("The object directory to store the graph")),
OPT_BOOL(0, "shallow", &opts.shallow,
N_("if the commit-graph is split, only verify the tip file")),
+ OPT_BOOL(0, "progress", &opts.progress, N_("force progress reporting")),
OPT_END(),
};
+ trace2_cmd_mode("verify");
+
+ opts.progress = isatty(2);
argc = parse_options(argc, argv, NULL,
builtin_commit_graph_verify_options,
builtin_commit_graph_verify_usage, 0);
@@ -66,6 +71,8 @@ static int graph_verify(int argc, const char **argv)
opts.obj_dir = get_object_directory();
if (opts.shallow)
flags |= COMMIT_GRAPH_VERIFY_SHALLOW;
+ if (opts.progress)
+ flags |= COMMIT_GRAPH_WRITE_PROGRESS;
graph_name = get_commit_graph_filename(opts.obj_dir);
open_ok = open_commit_graph(graph_name, &fd, &st);
@@ -102,6 +109,8 @@ static int graph_read(int argc, const char **argv)
OPT_END(),
};
+ trace2_cmd_mode("read");
+
argc = parse_options(argc, argv, NULL,
builtin_commit_graph_read_options,
builtin_commit_graph_read_usage, 0);
@@ -154,7 +163,7 @@ static int graph_write(int argc, const char **argv)
struct string_list *commit_hex = NULL;
struct string_list lines;
int result = 0;
- unsigned int flags = COMMIT_GRAPH_PROGRESS;
+ enum commit_graph_write_flags flags = 0;
static struct option builtin_commit_graph_write_options[] = {
OPT_STRING(0, "object-dir", &opts.obj_dir,
@@ -168,6 +177,7 @@ static int graph_write(int argc, const char **argv)
N_("start walk at commits listed by stdin")),
OPT_BOOL(0, "append", &opts.append,
N_("include all commits already in the commit-graph file")),
+ OPT_BOOL(0, "progress", &opts.progress, N_("force progress reporting")),
OPT_BOOL(0, "split", &opts.split,
N_("allow writing an incremental commit-graph file")),
OPT_INTEGER(0, "max-commits", &split_opts.max_commits,
@@ -179,10 +189,13 @@ static int graph_write(int argc, const char **argv)
OPT_END(),
};
+ opts.progress = isatty(2);
split_opts.size_multiple = 2;
split_opts.max_commits = 0;
split_opts.expire_time = 0;
+ trace2_cmd_mode("write");
+
argc = parse_options(argc, argv, NULL,
builtin_commit_graph_write_options,
builtin_commit_graph_write_usage, 0);
@@ -192,9 +205,11 @@ static int graph_write(int argc, const char **argv)
if (!opts.obj_dir)
opts.obj_dir = get_object_directory();
if (opts.append)
- flags |= COMMIT_GRAPH_APPEND;
+ flags |= COMMIT_GRAPH_WRITE_APPEND;
if (opts.split)
- flags |= COMMIT_GRAPH_SPLIT;
+ flags |= COMMIT_GRAPH_WRITE_SPLIT;
+ if (opts.progress)
+ flags |= COMMIT_GRAPH_WRITE_PROGRESS;
read_replace_refs = 0;
@@ -213,8 +228,10 @@ static int graph_write(int argc, const char **argv)
if (opts.stdin_packs)
pack_indexes = &lines;
- if (opts.stdin_commits)
+ if (opts.stdin_commits) {
commit_hex = &lines;
+ flags |= COMMIT_GRAPH_WRITE_CHECK_OIDS;
+ }
UNLEAK(buf);
}
@@ -249,6 +266,8 @@ int cmd_commit_graph(int argc, const char **argv, const char *prefix)
builtin_commit_graph_usage,
PARSE_OPT_STOP_AT_NON_OPTION);
+ save_commit_buffer = 0;
+
if (argc > 0) {
if (!strcmp(argv[0], "read"))
return graph_read(argc, argv);
diff --git a/builtin/commit.c b/builtin/commit.c
index ae7aaf6dc6..e588bc6ad3 100644
--- a/builtin/commit.c
+++ b/builtin/commit.c
@@ -510,7 +510,7 @@ static int run_status(FILE *fp, const char *index_file, const char *prefix, int
s->nowarn = nowarn;
s->is_initial = get_oid(s->reference, &oid) ? 1 : 0;
if (!s->is_initial)
- hashcpy(s->sha1_commit, oid.hash);
+ oidcpy(&s->oid_commit, &oid);
s->status_format = status_format;
s->ignore_submodule_arg = ignore_submodule_arg;
@@ -1406,7 +1406,7 @@ int cmd_status(int argc, const char **argv, const char *prefix)
s.is_initial = get_oid(s.reference, &oid) ? 1 : 0;
if (!s.is_initial)
- hashcpy(s.sha1_commit, oid.hash);
+ oidcpy(&s.oid_commit, &oid);
s.ignore_submodule_arg = ignore_submodule_arg;
s.status_format = status_format;
diff --git a/builtin/describe.c b/builtin/describe.c
index 200154297d..b6df81d8d0 100644
--- a/builtin/describe.c
+++ b/builtin/describe.c
@@ -15,7 +15,6 @@
#include "argv-array.h"
#include "run-command.h"
#include "object-store.h"
-#include "revision.h"
#include "list-objects.h"
#include "commit-slab.h"
@@ -64,19 +63,22 @@ static const char *prio_names[] = {
};
static int commit_name_neq(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *peeled)
{
- const struct commit_name *cn1 = entry;
- const struct commit_name *cn2 = entry_or_key;
+ const struct commit_name *cn1, *cn2;
+
+ cn1 = container_of(eptr, const struct commit_name, entry);
+ cn2 = container_of(entry_or_key, const struct commit_name, entry);
return !oideq(&cn1->peeled, peeled ? peeled : &cn2->peeled);
}
static inline struct commit_name *find_commit_name(const struct object_id *peeled)
{
- return hashmap_get_from_hash(&names, oidhash(peeled), peeled);
+ return hashmap_get_entry_from_hash(&names, oidhash(peeled), peeled,
+ struct commit_name, entry);
}
static int replace_name(struct commit_name *e,
@@ -123,8 +125,8 @@ static void add_to_known_names(const char *path,
if (!e) {
e = xmalloc(sizeof(struct commit_name));
oidcpy(&e->peeled, peeled);
- hashmap_entry_init(e, oidhash(peeled));
- hashmap_add(&names, e);
+ hashmap_entry_init(&e->entry, oidhash(peeled));
+ hashmap_add(&names, &e->entry);
e->path = NULL;
}
e->tag = tag;
@@ -313,7 +315,7 @@ static void describe_commit(struct object_id *oid, struct strbuf *dst)
*/
append_name(n, dst);
if (longformat)
- append_suffix(0, n->tag ? &n->tag->tagged->oid : oid, dst);
+ append_suffix(0, n->tag ? get_tagged_oid(n->tag) : oid, dst);
if (suffix)
strbuf_addstr(dst, suffix);
return;
@@ -330,8 +332,8 @@ static void describe_commit(struct object_id *oid, struct strbuf *dst)
struct commit_name *n;
init_commit_names(&commit_names);
- n = hashmap_iter_first(&names, &iter);
- for (; n; n = hashmap_iter_next(&iter)) {
+ hashmap_for_each_entry(&names, &iter, n,
+ entry /* member name */) {
c = lookup_commit_reference_gently(the_repository,
&n->peeled, 1);
if (c)
diff --git a/builtin/difftool.c b/builtin/difftool.c
index 16eb8b70ea..c280e682b2 100644
--- a/builtin/difftool.c
+++ b/builtin/difftool.c
@@ -125,12 +125,15 @@ struct working_tree_entry {
};
static int working_tree_entry_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
- const struct working_tree_entry *a = entry;
- const struct working_tree_entry *b = entry_or_key;
+ const struct working_tree_entry *a, *b;
+
+ a = container_of(eptr, const struct working_tree_entry, entry);
+ b = container_of(entry_or_key, const struct working_tree_entry, entry);
+
return strcmp(a->path, b->path);
}
@@ -145,12 +148,14 @@ struct pair_entry {
};
static int pair_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
- const struct pair_entry *a = entry;
- const struct pair_entry *b = entry_or_key;
+ const struct pair_entry *a, *b;
+
+ a = container_of(eptr, const struct pair_entry, entry);
+ b = container_of(entry_or_key, const struct pair_entry, entry);
return strcmp(a->path, b->path);
}
@@ -161,14 +166,14 @@ static void add_left_or_right(struct hashmap *map, const char *path,
struct pair_entry *e, *existing;
FLEX_ALLOC_STR(e, path, path);
- hashmap_entry_init(e, strhash(path));
- existing = hashmap_get(map, e, NULL);
+ hashmap_entry_init(&e->entry, strhash(path));
+ existing = hashmap_get_entry(map, e, entry, NULL);
if (existing) {
free(e);
e = existing;
} else {
e->left[0] = e->right[0] = '\0';
- hashmap_add(map, e);
+ hashmap_add(map, &e->entry);
}
strlcpy(is_right ? e->right : e->left, content, PATH_MAX);
}
@@ -179,12 +184,14 @@ struct path_entry {
};
static int path_entry_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *key)
{
- const struct path_entry *a = entry;
- const struct path_entry *b = entry_or_key;
+ const struct path_entry *a, *b;
+
+ a = container_of(eptr, const struct path_entry, entry);
+ b = container_of(entry_or_key, const struct path_entry, entry);
return strcmp(a->path, key ? key : b->path);
}
@@ -234,8 +241,8 @@ static void changed_files(struct hashmap *result, const char *index_path,
while (!strbuf_getline_nul(&buf, fp)) {
struct path_entry *entry;
FLEX_ALLOC_STR(entry, path, buf.buf);
- hashmap_entry_init(entry, strhash(buf.buf));
- hashmap_add(result, entry);
+ hashmap_entry_init(&entry->entry, strhash(buf.buf));
+ hashmap_add(result, &entry->entry);
}
fclose(fp);
if (finish_command(&diff_files))
@@ -461,12 +468,13 @@ static int run_dir_diff(const char *extcmd, int symlinks, const char *prefix,
/* Avoid duplicate working_tree entries */
FLEX_ALLOC_STR(entry, path, dst_path);
- hashmap_entry_init(entry, strhash(dst_path));
- if (hashmap_get(&working_tree_dups, entry, NULL)) {
+ hashmap_entry_init(&entry->entry, strhash(dst_path));
+ if (hashmap_get(&working_tree_dups, &entry->entry,
+ NULL)) {
free(entry);
continue;
}
- hashmap_add(&working_tree_dups, entry);
+ hashmap_add(&working_tree_dups, &entry->entry);
if (!use_wt_file(workdir, dst_path, &roid)) {
if (checkout_path(rmode, &roid, dst_path,
@@ -530,8 +538,8 @@ static int run_dir_diff(const char *extcmd, int symlinks, const char *prefix,
* temporary file to both the left and right directories to show the
* change in the recorded SHA1 for the submodule.
*/
- hashmap_iter_init(&submodules, &iter);
- while ((entry = hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(&submodules, &iter, entry,
+ entry /* member name */) {
if (*entry->left) {
add_path(&ldir, ldir_len, entry->path);
ensure_leading_directories(ldir.buf);
@@ -549,8 +557,8 @@ static int run_dir_diff(const char *extcmd, int symlinks, const char *prefix,
* shows only the link itself, not the contents of the link target.
* This loop replicates that behavior.
*/
- hashmap_iter_init(&symlinks2, &iter);
- while ((entry = hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(&symlinks2, &iter, entry,
+ entry /* member name */) {
if (*entry->left) {
add_path(&ldir, ldir_len, entry->path);
ensure_leading_directories(ldir.buf);
diff --git a/builtin/fast-export.c b/builtin/fast-export.c
index f541f55d33..dbec4df92b 100644
--- a/builtin/fast-export.c
+++ b/builtin/fast-export.c
@@ -40,6 +40,7 @@ static int no_data;
static int full_tree;
static int reference_excluded_commits;
static int show_original_ids;
+static int mark_tags;
static struct string_list extra_refs = STRING_LIST_INIT_NODUP;
static struct string_list tag_refs = STRING_LIST_INIT_NODUP;
static struct refspec refspecs = REFSPEC_INIT_FETCH;
@@ -126,10 +127,15 @@ struct anonymized_entry {
};
static int anonymized_entry_cmp(const void *unused_cmp_data,
- const void *va, const void *vb,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
- const struct anonymized_entry *a = va, *b = vb;
+ const struct anonymized_entry *a, *b;
+
+ a = container_of(eptr, const struct anonymized_entry, hash);
+ b = container_of(entry_or_key, const struct anonymized_entry, hash);
+
return a->orig_len != b->orig_len ||
memcmp(a->orig, b->orig, a->orig_len);
}
@@ -148,10 +154,10 @@ static const void *anonymize_mem(struct hashmap *map,
if (!map->cmpfn)
hashmap_init(map, anonymized_entry_cmp, NULL, 0);
- hashmap_entry_init(&key, memhash(orig, *len));
+ hashmap_entry_init(&key.hash, memhash(orig, *len));
key.orig = orig;
key.orig_len = *len;
- ret = hashmap_get(map, &key, NULL);
+ ret = hashmap_get_entry(map, &key, hash, NULL);
if (!ret) {
ret = xmalloc(sizeof(*ret));
@@ -160,7 +166,7 @@ static const void *anonymize_mem(struct hashmap *map,
ret->orig_len = *len;
ret->anon = generate(orig, len);
ret->anon_len = *len;
- hashmap_put(map, ret);
+ hashmap_put(map, &ret->hash);
}
*len = ret->anon_len;
@@ -842,25 +848,40 @@ static void handle_tag(const char *name, struct tag *tag)
free(buf);
return;
case REWRITE:
- if (tagged->type != OBJ_COMMIT) {
- die("tag %s tags unexported %s!",
- oid_to_hex(&tag->object.oid),
- type_name(tagged->type));
- }
- p = rewrite_commit((struct commit *)tagged);
- if (!p) {
- printf("reset %s\nfrom %s\n\n",
- name, oid_to_hex(&null_oid));
- free(buf);
- return;
+ if (tagged->type == OBJ_TAG && !mark_tags) {
+ die(_("Error: Cannot export nested tags unless --mark-tags is specified."));
+ } else if (tagged->type == OBJ_COMMIT) {
+ p = rewrite_commit((struct commit *)tagged);
+ if (!p) {
+ printf("reset %s\nfrom %s\n\n",
+ name, oid_to_hex(&null_oid));
+ free(buf);
+ return;
+ }
+ tagged_mark = get_object_mark(&p->object);
+ } else {
+ /* tagged->type is either OBJ_BLOB or OBJ_TAG */
+ tagged_mark = get_object_mark(tagged);
}
- tagged_mark = get_object_mark(&p->object);
}
}
+ if (tagged->type == OBJ_TAG) {
+ printf("reset %s\nfrom %s\n\n",
+ name, oid_to_hex(&null_oid));
+ }
if (starts_with(name, "refs/tags/"))
name += 10;
- printf("tag %s\nfrom :%d\n", name, tagged_mark);
+ printf("tag %s\n", name);
+ if (mark_tags) {
+ mark_next_object(&tag->object);
+ printf("mark :%"PRIu32"\n", last_idnum);
+ }
+ if (tagged_mark)
+ printf("from :%d\n", tagged_mark);
+ else
+ printf("from %s\n", oid_to_hex(&tagged->oid));
+
if (show_original_ids)
printf("original-oid %s\n", oid_to_hex(&tag->object.oid));
printf("%.*s%sdata %d\n%.*s\n",
@@ -1047,11 +1068,16 @@ static void export_marks(char *file)
error("Unable to write marks file %s.", file);
}
-static void import_marks(char *input_file)
+static void import_marks(char *input_file, int check_exists)
{
char line[512];
- FILE *f = xfopen(input_file, "r");
+ FILE *f;
+ struct stat sb;
+
+ if (check_exists && stat(input_file, &sb))
+ return;
+ f = xfopen(input_file, "r");
while (fgets(line, sizeof(line), f)) {
uint32_t mark;
char *line_end, *mark_end;
@@ -1115,7 +1141,9 @@ int cmd_fast_export(int argc, const char **argv, const char *prefix)
struct rev_info revs;
struct object_array commits = OBJECT_ARRAY_INIT;
struct commit *commit;
- char *export_filename = NULL, *import_filename = NULL;
+ char *export_filename = NULL,
+ *import_filename = NULL,
+ *import_filename_if_exists = NULL;
uint32_t lastimportid;
struct string_list refspecs_list = STRING_LIST_INIT_NODUP;
struct string_list paths_of_changed_objects = STRING_LIST_INIT_DUP;
@@ -1135,6 +1163,10 @@ int cmd_fast_export(int argc, const char **argv, const char *prefix)
N_("Dump marks to this file")),
OPT_STRING(0, "import-marks", &import_filename, N_("file"),
N_("Import marks from this file")),
+ OPT_STRING(0, "import-marks-if-exists",
+ &import_filename_if_exists,
+ N_("file"),
+ N_("Import marks from this file if it exists")),
OPT_BOOL(0, "fake-missing-tagger", &fake_missing_tagger,
N_("Fake a tagger when tags lack one")),
OPT_BOOL(0, "full-tree", &full_tree,
@@ -1149,6 +1181,8 @@ int cmd_fast_export(int argc, const char **argv, const char *prefix)
&reference_excluded_commits, N_("Reference parents which are not in fast-export stream by object id")),
OPT_BOOL(0, "show-original-ids", &show_original_ids,
N_("Show original object ids of blobs/commits")),
+ OPT_BOOL(0, "mark-tags", &mark_tags,
+ N_("Label tags with mark ids")),
OPT_END()
};
@@ -1182,8 +1216,12 @@ int cmd_fast_export(int argc, const char **argv, const char *prefix)
if (use_done_feature)
printf("feature done\n");
+ if (import_filename && import_filename_if_exists)
+ die(_("Cannot pass both --import-marks and --import-marks-if-exists"));
if (import_filename)
- import_marks(import_filename);
+ import_marks(import_filename, 0);
+ else if (import_filename_if_exists)
+ import_marks(import_filename_if_exists, 1);
lastimportid = last_idnum;
if (import_filename && revs.prune_data.nr)
diff --git a/builtin/fetch.c b/builtin/fetch.c
index 717dd14e89..0c345b5dfe 100644
--- a/builtin/fetch.c
+++ b/builtin/fetch.c
@@ -7,6 +7,7 @@
#include "refs.h"
#include "refspec.h"
#include "object-store.h"
+#include "oidset.h"
#include "commit.h"
#include "builtin.h"
#include "string-list.h"
@@ -23,6 +24,9 @@
#include "packfile.h"
#include "list-objects-filter-options.h"
#include "commit-reach.h"
+#include "branch.h"
+#include "promisor-remote.h"
+#include "commit-graph.h"
#define FORCED_UPDATES_DELAY_WARNING_IN_MS (10 * 1000)
@@ -50,11 +54,13 @@ static int fetch_prune_tags_config = -1; /* unspecified */
static int prune_tags = -1; /* unspecified */
#define PRUNE_TAGS_BY_DEFAULT 0 /* do we prune tags by default? */
-static int all, append, dry_run, force, keep, multiple, update_head_ok, verbosity, deepen_relative;
+static int all, append, dry_run, force, keep, multiple, update_head_ok;
+static int verbosity, deepen_relative, set_upstream;
static int progress = -1;
static int enable_auto_gc = 1;
static int tags = TAGS_DEFAULT, unshallow, update_shallow, deepen;
-static int max_children = 1;
+static int max_jobs = -1, submodule_fetch_jobs_config = -1;
+static int fetch_parallel_config = 1;
static enum transport_family family;
static const char *depth;
static const char *deepen_since;
@@ -96,13 +102,20 @@ static int git_fetch_config(const char *k, const char *v, void *cb)
}
if (!strcmp(k, "submodule.fetchjobs")) {
- max_children = parse_submodule_fetchjobs(k, v);
+ submodule_fetch_jobs_config = parse_submodule_fetchjobs(k, v);
return 0;
} else if (!strcmp(k, "fetch.recursesubmodules")) {
recurse_submodules = parse_fetch_recurse_submodules_arg(k, v);
return 0;
}
+ if (!strcmp(k, "fetch.parallel")) {
+ fetch_parallel_config = git_config_int(k, v);
+ if (fetch_parallel_config < 0)
+ die(_("fetch.parallel cannot be negative"));
+ return 0;
+ }
+
return git_default_config(k, v, cb);
}
@@ -123,6 +136,8 @@ static struct option builtin_fetch_options[] = {
OPT__VERBOSITY(&verbosity),
OPT_BOOL(0, "all", &all,
N_("fetch from all remotes")),
+ OPT_BOOL(0, "set-upstream", &set_upstream,
+ N_("set upstream for git pull/fetch")),
OPT_BOOL('a', "append", &append,
N_("append to .git/FETCH_HEAD instead of overwriting")),
OPT_STRING(0, "upload-pack", &upload_pack, N_("path"),
@@ -134,7 +149,7 @@ static struct option builtin_fetch_options[] = {
N_("fetch all tags and associated objects"), TAGS_SET),
OPT_SET_INT('n', NULL, &tags,
N_("do not fetch all tags (--no-tags)"), TAGS_UNSET),
- OPT_INTEGER('j', "jobs", &max_children,
+ OPT_INTEGER('j', "jobs", &max_jobs,
N_("number of submodules fetched in parallel")),
OPT_BOOL('p', "prune", &prune,
N_("prune remote-tracking branches no longer on remote")),
@@ -239,32 +254,31 @@ static void add_merge_config(struct ref **head,
}
}
-static int will_fetch(struct ref **head, const unsigned char *sha1)
+static void create_fetch_oidset(struct ref **head, struct oidset *out)
{
struct ref *rm = *head;
while (rm) {
- if (hasheq(rm->old_oid.hash, sha1))
- return 1;
+ oidset_insert(out, &rm->old_oid);
rm = rm->next;
}
- return 0;
}
struct refname_hash_entry {
- struct hashmap_entry ent; /* must be the first member */
+ struct hashmap_entry ent;
struct object_id oid;
int ignore;
char refname[FLEX_ARRAY];
};
static int refname_hash_entry_cmp(const void *hashmap_cmp_fn_data,
- const void *e1_,
- const void *e2_,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *keydata)
{
- const struct refname_hash_entry *e1 = e1_;
- const struct refname_hash_entry *e2 = e2_;
+ const struct refname_hash_entry *e1, *e2;
+ e1 = container_of(eptr, const struct refname_hash_entry, ent);
+ e2 = container_of(entry_or_key, const struct refname_hash_entry, ent);
return strcmp(e1->refname, keydata ? keydata : e2->refname);
}
@@ -276,9 +290,9 @@ static struct refname_hash_entry *refname_hash_add(struct hashmap *map,
size_t len = strlen(refname);
FLEX_ALLOC_MEM(ent, refname, refname, len);
- hashmap_entry_init(ent, strhash(refname));
+ hashmap_entry_init(&ent->ent, strhash(refname));
oidcpy(&ent->oid, oid);
- hashmap_add(map, ent);
+ hashmap_add(map, &ent->ent);
return ent;
}
@@ -313,6 +327,7 @@ static void find_non_local_tags(const struct ref *refs,
{
struct hashmap existing_refs;
struct hashmap remote_refs;
+ struct oidset fetch_oids = OIDSET_INIT;
struct string_list remote_refs_list = STRING_LIST_INIT_NODUP;
struct string_list_item *remote_ref_item;
const struct ref *ref;
@@ -320,6 +335,7 @@ static void find_non_local_tags(const struct ref *refs,
refname_hash_init(&existing_refs);
refname_hash_init(&remote_refs);
+ create_fetch_oidset(head, &fetch_oids);
for_each_ref(add_one_refname, &existing_refs);
for (ref = refs; ref; ref = ref->next) {
@@ -336,9 +352,9 @@ static void find_non_local_tags(const struct ref *refs,
if (item &&
!has_object_file_with_flags(&ref->old_oid,
OBJECT_INFO_QUICK) &&
- !will_fetch(head, ref->old_oid.hash) &&
+ !oidset_contains(&fetch_oids, &ref->old_oid) &&
!has_object_file_with_flags(&item->oid, OBJECT_INFO_QUICK) &&
- !will_fetch(head, item->oid.hash))
+ !oidset_contains(&fetch_oids, &item->oid))
clear_item(item);
item = NULL;
continue;
@@ -352,7 +368,7 @@ static void find_non_local_tags(const struct ref *refs,
*/
if (item &&
!has_object_file_with_flags(&item->oid, OBJECT_INFO_QUICK) &&
- !will_fetch(head, item->oid.hash))
+ !oidset_contains(&fetch_oids, &item->oid))
clear_item(item);
item = NULL;
@@ -365,7 +381,7 @@ static void find_non_local_tags(const struct ref *refs,
item = refname_hash_add(&remote_refs, ref->name, &ref->old_oid);
string_list_insert(&remote_refs_list, ref->name);
}
- hashmap_free(&existing_refs, 1);
+ hashmap_free_entries(&existing_refs, struct refname_hash_entry, ent);
/*
* We may have a final lightweight tag that needs to be
@@ -373,7 +389,7 @@ static void find_non_local_tags(const struct ref *refs,
*/
if (item &&
!has_object_file_with_flags(&item->oid, OBJECT_INFO_QUICK) &&
- !will_fetch(head, item->oid.hash))
+ !oidset_contains(&fetch_oids, &item->oid))
clear_item(item);
/*
@@ -383,8 +399,10 @@ static void find_non_local_tags(const struct ref *refs,
for_each_string_list_item(remote_ref_item, &remote_refs_list) {
const char *refname = remote_ref_item->string;
struct ref *rm;
+ unsigned int hash = strhash(refname);
- item = hashmap_get_from_hash(&remote_refs, strhash(refname), refname);
+ item = hashmap_get_entry_from_hash(&remote_refs, hash, refname,
+ struct refname_hash_entry, ent);
if (!item)
BUG("unseen remote ref?");
@@ -398,8 +416,9 @@ static void find_non_local_tags(const struct ref *refs,
**tail = rm;
*tail = &rm->next;
}
- hashmap_free(&remote_refs, 1);
+ hashmap_free_entries(&remote_refs, struct refname_hash_entry, ent);
string_list_clear(&remote_refs_list, 0);
+ oidset_clear(&fetch_oids);
}
static struct ref *get_ref_map(struct remote *remote,
@@ -516,17 +535,18 @@ static struct ref *get_ref_map(struct remote *remote,
if (rm->peer_ref) {
const char *refname = rm->peer_ref->name;
struct refname_hash_entry *peer_item;
+ unsigned int hash = strhash(refname);
- peer_item = hashmap_get_from_hash(&existing_refs,
- strhash(refname),
- refname);
+ peer_item = hashmap_get_entry_from_hash(&existing_refs,
+ hash, refname,
+ struct refname_hash_entry, ent);
if (peer_item) {
struct object_id *old_oid = &peer_item->oid;
oidcpy(&rm->peer_ref->old_oid, old_oid);
}
}
}
- hashmap_free(&existing_refs, 1);
+ hashmap_free_entries(&existing_refs, struct refname_hash_entry, ent);
return ref_map;
}
@@ -1065,8 +1085,11 @@ static int check_exist_and_connected(struct ref *ref_map)
static int fetch_refs(struct transport *transport, struct ref *ref_map)
{
int ret = check_exist_and_connected(ref_map);
- if (ret)
+ if (ret) {
+ trace2_region_enter("fetch", "fetch_refs", the_repository);
ret = transport_fetch_refs(transport, ref_map);
+ trace2_region_leave("fetch", "fetch_refs", the_repository);
+ }
if (!ret)
/*
* Keep the new pack's ".keep" file around to allow the caller
@@ -1082,11 +1105,14 @@ static int consume_refs(struct transport *transport, struct ref *ref_map)
{
int connectivity_checked = transport->smart_options
? transport->smart_options->connectivity_checked : 0;
- int ret = store_updated_refs(transport->url,
- transport->remote->name,
- connectivity_checked,
- ref_map);
+ int ret;
+ trace2_region_enter("fetch", "consume_refs", the_repository);
+ ret = store_updated_refs(transport->url,
+ transport->remote->name,
+ connectivity_checked,
+ ref_map);
transport_unlock_pack(transport);
+ trace2_region_leave("fetch", "consume_refs", the_repository);
return ret;
}
@@ -1238,13 +1264,10 @@ static struct transport *prepare_transport(struct remote *remote, int deepen)
if (update_shallow)
set_option(transport, TRANS_OPT_UPDATE_SHALLOW, "yes");
if (filter_options.choice) {
- struct strbuf expanded_filter_spec = STRBUF_INIT;
- expand_list_objects_filter_spec(&filter_options,
- &expanded_filter_spec);
- set_option(transport, TRANS_OPT_LIST_OBJECTS_FILTER,
- expanded_filter_spec.buf);
+ const char *spec =
+ expand_list_objects_filter_spec(&filter_options);
+ set_option(transport, TRANS_OPT_LIST_OBJECTS_FILTER, spec);
set_option(transport, TRANS_OPT_FROM_PROMISOR, "1");
- strbuf_release(&expanded_filter_spec);
}
if (negotiation_tip.nr) {
if (transport->smart_options)
@@ -1334,9 +1357,11 @@ static int do_fetch(struct transport *transport,
argv_array_push(&ref_prefixes, "refs/tags/");
}
- if (must_list_refs)
+ if (must_list_refs) {
+ trace2_region_enter("fetch", "remote_refs", the_repository);
remote_refs = transport_get_remote_refs(transport, &ref_prefixes);
- else
+ trace2_region_leave("fetch", "remote_refs", the_repository);
+ } else
remote_refs = NULL;
argv_array_clear(&ref_prefixes);
@@ -1367,6 +1392,51 @@ static int do_fetch(struct transport *transport,
retcode = 1;
goto cleanup;
}
+
+ if (set_upstream) {
+ struct branch *branch = branch_get("HEAD");
+ struct ref *rm;
+ struct ref *source_ref = NULL;
+
+ /*
+ * We're setting the upstream configuration for the
+ * current branch. The relevent upstream is the
+ * fetched branch that is meant to be merged with the
+ * current one, i.e. the one fetched to FETCH_HEAD.
+ *
+ * When there are several such branches, consider the
+ * request ambiguous and err on the safe side by doing
+ * nothing and just emit a warning.
+ */
+ for (rm = ref_map; rm; rm = rm->next) {
+ if (!rm->peer_ref) {
+ if (source_ref) {
+ warning(_("multiple branch detected, incompatible with --set-upstream"));
+ goto skip;
+ } else {
+ source_ref = rm;
+ }
+ }
+ }
+ if (source_ref) {
+ if (!strcmp(source_ref->name, "HEAD") ||
+ starts_with(source_ref->name, "refs/heads/"))
+ install_branch_config(0,
+ branch->name,
+ transport->remote->name,
+ source_ref->name);
+ else if (starts_with(source_ref->name, "refs/remotes/"))
+ warning(_("not setting upstream for a remote remote-tracking branch"));
+ else if (starts_with(source_ref->name, "refs/tags/"))
+ warning(_("not setting upstream for a remote tag"));
+ else
+ warning(_("unknown branch type"));
+ } else {
+ warning(_("no source branch found.\n"
+ "you need to specify exactly one branch with the --set-upstream option."));
+ }
+ }
+ skip:
free_refs(ref_map);
/* if neither --no-tags nor --tags was specified, do automated tag
@@ -1463,7 +1533,62 @@ static void add_options_to_argv(struct argv_array *argv)
}
-static int fetch_multiple(struct string_list *list)
+/* Fetch multiple remotes in parallel */
+
+struct parallel_fetch_state {
+ const char **argv;
+ struct string_list *remotes;
+ int next, result;
+};
+
+static int fetch_next_remote(struct child_process *cp, struct strbuf *out,
+ void *cb, void **task_cb)
+{
+ struct parallel_fetch_state *state = cb;
+ char *remote;
+
+ if (state->next < 0 || state->next >= state->remotes->nr)
+ return 0;
+
+ remote = state->remotes->items[state->next++].string;
+ *task_cb = remote;
+
+ argv_array_pushv(&cp->args, state->argv);
+ argv_array_push(&cp->args, remote);
+ cp->git_cmd = 1;
+
+ if (verbosity >= 0)
+ printf(_("Fetching %s\n"), remote);
+
+ return 1;
+}
+
+static int fetch_failed_to_start(struct strbuf *out, void *cb, void *task_cb)
+{
+ struct parallel_fetch_state *state = cb;
+ const char *remote = task_cb;
+
+ state->result = error(_("Could not fetch %s"), remote);
+
+ return 0;
+}
+
+static int fetch_finished(int result, struct strbuf *out,
+ void *cb, void *task_cb)
+{
+ struct parallel_fetch_state *state = cb;
+ const char *remote = task_cb;
+
+ if (result) {
+ strbuf_addf(out, _("could not fetch '%s' (exit code: %d)\n"),
+ remote, result);
+ state->result = -1;
+ }
+
+ return 0;
+}
+
+static int fetch_multiple(struct string_list *list, int max_children)
{
int i, result = 0;
struct argv_array argv = ARGV_ARRAY_INIT;
@@ -1477,20 +1602,34 @@ static int fetch_multiple(struct string_list *list)
argv_array_pushl(&argv, "fetch", "--append", "--no-auto-gc", NULL);
add_options_to_argv(&argv);
- for (i = 0; i < list->nr; i++) {
- const char *name = list->items[i].string;
- argv_array_push(&argv, name);
- if (verbosity >= 0)
- printf(_("Fetching %s\n"), name);
- if (run_command_v_opt(argv.argv, RUN_GIT_CMD)) {
- error(_("Could not fetch %s"), name);
- result = 1;
+ if (max_children != 1 && list->nr != 1) {
+ struct parallel_fetch_state state = { argv.argv, list, 0, 0 };
+
+ argv_array_push(&argv, "--end-of-options");
+ result = run_processes_parallel_tr2(max_children,
+ &fetch_next_remote,
+ &fetch_failed_to_start,
+ &fetch_finished,
+ &state,
+ "fetch", "parallel/fetch");
+
+ if (!result)
+ result = state.result;
+ } else
+ for (i = 0; i < list->nr; i++) {
+ const char *name = list->items[i].string;
+ argv_array_push(&argv, name);
+ if (verbosity >= 0)
+ printf(_("Fetching %s\n"), name);
+ if (run_command_v_opt(argv.argv, RUN_GIT_CMD)) {
+ error(_("Could not fetch %s"), name);
+ result = 1;
+ }
+ argv_array_pop(&argv);
}
- argv_array_pop(&argv);
- }
argv_array_clear(&argv);
- return result;
+ return !!result;
}
/*
@@ -1510,37 +1649,27 @@ static inline void fetch_one_setup_partial(struct remote *remote)
* If no prior partial clone/fetch and the current fetch DID NOT
* request a partial-fetch, do a normal fetch.
*/
- if (!repository_format_partial_clone && !filter_options.choice)
+ if (!has_promisor_remote() && !filter_options.choice)
return;
/*
- * If this is the FIRST partial-fetch request, we enable partial
- * on this repo and remember the given filter-spec as the default
- * for subsequent fetches to this remote.
+ * If this is a partial-fetch request, we enable partial on
+ * this repo if not already enabled and remember the given
+ * filter-spec as the default for subsequent fetches to this
+ * remote.
*/
- if (!repository_format_partial_clone && filter_options.choice) {
+ if (filter_options.choice) {
partial_clone_register(remote->name, &filter_options);
return;
}
/*
- * We are currently limited to only ONE promisor remote and only
- * allow partial-fetches from the promisor remote.
- */
- if (strcmp(remote->name, repository_format_partial_clone)) {
- if (filter_options.choice)
- die(_("--filter can only be used with the remote "
- "configured in extensions.partialClone"));
- return;
- }
-
- /*
* Do a partial-fetch from the promisor remote using either the
* explicitly given filter-spec or inherit the filter-spec from
* the config.
*/
if (!filter_options.choice)
- partial_clone_get_default_filter_spec(&filter_options);
+ partial_clone_get_default_filter_spec(&filter_options, remote->name);
return;
}
@@ -1633,7 +1762,8 @@ int cmd_fetch(int argc, const char **argv, const char *prefix)
for (i = 1; i < argc; i++)
strbuf_addf(&default_rla, " %s", argv[i]);
- fetch_config_from_gitmodules(&max_children, &recurse_submodules);
+ fetch_config_from_gitmodules(&submodule_fetch_jobs_config,
+ &recurse_submodules);
git_config(git_fetch_config, NULL);
argc = parse_options(argc, argv, prefix,
@@ -1661,7 +1791,7 @@ int cmd_fetch(int argc, const char **argv, const char *prefix)
if (depth || deepen_since || deepen_not.nr)
deepen = 1;
- if (filter_options.choice && !repository_format_partial_clone)
+ if (filter_options.choice && !has_promisor_remote())
die("--filter can only be used when extensions.partialClone is set");
if (all) {
@@ -1695,19 +1825,31 @@ int cmd_fetch(int argc, const char **argv, const char *prefix)
}
if (remote) {
- if (filter_options.choice || repository_format_partial_clone)
+ if (filter_options.choice || has_promisor_remote())
fetch_one_setup_partial(remote);
result = fetch_one(remote, argc, argv, prune_tags_ok);
} else {
+ int max_children = max_jobs;
+
if (filter_options.choice)
die(_("--filter can only be used with the remote "
"configured in extensions.partialclone"));
+
+ if (max_children < 0)
+ max_children = fetch_parallel_config;
+
/* TODO should this also die if we have a previous partial-clone? */
- result = fetch_multiple(&list);
+ result = fetch_multiple(&list, max_children);
}
if (!result && (recurse_submodules != RECURSE_SUBMODULES_OFF)) {
struct argv_array options = ARGV_ARRAY_INIT;
+ int max_children = max_jobs;
+
+ if (max_children < 0)
+ max_children = submodule_fetch_jobs_config;
+ if (max_children < 0)
+ max_children = fetch_parallel_config;
add_options_to_argv(&options);
result = fetch_populated_submodules(the_repository,
@@ -1722,6 +1864,20 @@ int cmd_fetch(int argc, const char **argv, const char *prefix)
string_list_clear(&list, 0);
+ prepare_repo_settings(the_repository);
+ if (the_repository->settings.fetch_write_commit_graph) {
+ int commit_graph_flags = COMMIT_GRAPH_WRITE_SPLIT;
+ struct split_commit_graph_opts split_opts;
+ memset(&split_opts, 0, sizeof(struct split_commit_graph_opts));
+
+ if (progress)
+ commit_graph_flags |= COMMIT_GRAPH_WRITE_PROGRESS;
+
+ write_commit_graph_reachable(get_object_directory(),
+ commit_graph_flags,
+ &split_opts);
+ }
+
close_object_store(the_repository->objects);
if (enable_auto_gc) {
diff --git a/builtin/gc.c b/builtin/gc.c
index c18efadda5..fadb45489f 100644
--- a/builtin/gc.c
+++ b/builtin/gc.c
@@ -27,6 +27,7 @@
#include "pack-objects.h"
#include "blob.h"
#include "tree.h"
+#include "promisor-remote.h"
#define FAILED_RUN "failed to run %s"
@@ -41,7 +42,6 @@ static int aggressive_depth = 50;
static int aggressive_window = 250;
static int gc_auto_threshold = 6700;
static int gc_auto_pack_limit = 50;
-static int gc_write_commit_graph;
static int detach_auto = 1;
static timestamp_t gc_log_expire_time;
static const char *gc_log_expire = "1.day.ago";
@@ -148,7 +148,6 @@ static void gc_config(void)
git_config_get_int("gc.aggressivedepth", &aggressive_depth);
git_config_get_int("gc.auto", &gc_auto_threshold);
git_config_get_int("gc.autopacklimit", &gc_auto_pack_limit);
- git_config_get_bool("gc.writecommitgraph", &gc_write_commit_graph);
git_config_get_bool("gc.autodetach", &detach_auto);
git_config_get_expiry("gc.pruneexpire", &prune_expire);
git_config_get_expiry("gc.worktreepruneexpire", &prune_worktrees_expire);
@@ -661,7 +660,7 @@ int cmd_gc(int argc, const char **argv, const char *prefix)
argv_array_push(&prune, prune_expire);
if (quiet)
argv_array_push(&prune, "--no-progress");
- if (repository_format_partial_clone)
+ if (has_promisor_remote())
argv_array_push(&prune,
"--exclude-promisor-objects");
if (run_command_v_opt(prune.argv, RUN_GIT_CMD))
@@ -685,11 +684,11 @@ int cmd_gc(int argc, const char **argv, const char *prefix)
clean_pack_garbage();
}
- if (gc_write_commit_graph &&
- write_commit_graph_reachable(get_object_directory(),
- !quiet && !daemonized ? COMMIT_GRAPH_PROGRESS : 0,
- NULL))
- return 1;
+ prepare_repo_settings(the_repository);
+ if (the_repository->settings.gc_write_commit_graph == 1)
+ write_commit_graph_reachable(get_object_directory(),
+ !quiet && !daemonized ? COMMIT_GRAPH_WRITE_PROGRESS : 0,
+ NULL);
if (auto_gc && too_many_loose_objects())
warning(_("There are too many unreachable loose objects; "
diff --git a/builtin/grep.c b/builtin/grep.c
index 560051784e..69ac053acc 100644
--- a/builtin/grep.c
+++ b/builtin/grep.c
@@ -403,7 +403,7 @@ static int grep_tree(struct grep_opt *opt, const struct pathspec *pathspec,
static int grep_submodule(struct grep_opt *opt,
const struct pathspec *pathspec,
const struct object_id *oid,
- const char *filename, const char *path)
+ const char *filename, const char *path, int cached)
{
struct repository subrepo;
struct repository *superproject = opt->repo;
@@ -475,7 +475,7 @@ static int grep_submodule(struct grep_opt *opt,
strbuf_release(&base);
free(data);
} else {
- hit = grep_cache(&subopt, pathspec, 1);
+ hit = grep_cache(&subopt, pathspec, cached);
}
repo_clear(&subrepo);
@@ -523,7 +523,8 @@ static int grep_cache(struct grep_opt *opt,
}
} else if (recurse_submodules && S_ISGITLINK(ce->ce_mode) &&
submodule_path_match(repo->index, pathspec, name.buf, NULL)) {
- hit |= grep_submodule(opt, pathspec, NULL, ce->name, ce->name);
+ hit |= grep_submodule(opt, pathspec, NULL, ce->name,
+ ce->name, cached);
} else {
continue;
}
@@ -598,7 +599,8 @@ static int grep_tree(struct grep_opt *opt, const struct pathspec *pathspec,
free(data);
} else if (recurse_submodules && S_ISGITLINK(entry.mode)) {
hit |= grep_submodule(opt, pathspec, &entry.oid,
- base->buf, base->buf + tn_len);
+ base->buf, base->buf + tn_len,
+ 1); /* ignored */
}
strbuf_setlen(base, old_baselen);
@@ -1108,8 +1110,8 @@ int cmd_grep(int argc, const char **argv, const char *prefix)
strbuf_addf(&buf, "+/%s%s",
strcmp("less", pager) ? "" : "*",
opt.pattern_list->pattern);
- string_list_append(&path_list, buf.buf);
- strbuf_detach(&buf, NULL);
+ string_list_append(&path_list,
+ strbuf_detach(&buf, NULL));
}
}
diff --git a/builtin/index-pack.c b/builtin/index-pack.c
index 0d55f73b0b..60a5591039 100644
--- a/builtin/index-pack.c
+++ b/builtin/index-pack.c
@@ -14,7 +14,7 @@
#include "thread-utils.h"
#include "packfile.h"
#include "object-store.h"
-#include "fetch-object.h"
+#include "promisor-remote.h"
static const char index_pack_usage[] =
"git index-pack [-v] [-o <index-file>] [--keep | --keep=<msg>] [--verify] [--strict] (<pack-file> | --stdin [--fix-thin] [<pack-file>])";
@@ -1352,7 +1352,7 @@ static void fix_unresolved_deltas(struct hashfile *f)
sorted_by_pos[i] = &ref_deltas[i];
QSORT(sorted_by_pos, nr_ref_deltas, delta_pos_compare);
- if (repository_format_partial_clone) {
+ if (has_promisor_remote()) {
/*
* Prefetch the delta bases.
*/
@@ -1366,8 +1366,8 @@ static void fix_unresolved_deltas(struct hashfile *f)
oid_array_append(&to_fetch, &d->oid);
}
if (to_fetch.nr)
- fetch_objects(repository_format_partial_clone,
- to_fetch.oid, to_fetch.nr);
+ promisor_remote_get_direct(the_repository,
+ to_fetch.oid, to_fetch.nr);
oid_array_clear(&to_fetch);
}
@@ -1490,11 +1490,11 @@ static void final(const char *final_pack_name, const char *curr_pack_name,
}
if (!from_stdin) {
- printf("%s\n", sha1_to_hex(hash));
+ printf("%s\n", hash_to_hex(hash));
} else {
struct strbuf buf = STRBUF_INIT;
- strbuf_addf(&buf, "%s\t%s\n", report, sha1_to_hex(hash));
+ strbuf_addf(&buf, "%s\t%s\n", report, hash_to_hex(hash));
write_or_die(1, buf.buf, buf.len);
strbuf_release(&buf);
diff --git a/builtin/log.c b/builtin/log.c
index 44b10b3415..c4b35fdaf9 100644
--- a/builtin/log.c
+++ b/builtin/log.c
@@ -627,6 +627,7 @@ int cmd_show(int argc, const char **argv, const char *prefix)
break;
case OBJ_TAG: {
struct tag *t = (struct tag *)o;
+ struct object_id *oid = get_tagged_oid(t);
if (rev.shown_one)
putchar('\n');
@@ -638,10 +639,10 @@ int cmd_show(int argc, const char **argv, const char *prefix)
rev.shown_one = 1;
if (ret)
break;
- o = parse_object(the_repository, &t->tagged->oid);
+ o = parse_object(the_repository, oid);
if (!o)
ret = error(_("could not read object %s"),
- oid_to_hex(&t->tagged->oid));
+ oid_to_hex(oid));
objects[i].item = o;
i--;
break;
diff --git a/builtin/ls-files.c b/builtin/ls-files.c
index 670e8fb93c..f069a028ce 100644
--- a/builtin/ls-files.c
+++ b/builtin/ls-files.c
@@ -492,7 +492,7 @@ static int option_parse_exclude_from(const struct option *opt,
BUG_ON_OPT_NEG(unset);
exc_given = 1;
- add_excludes_from_file(dir, arg);
+ add_patterns_from_file(dir, arg);
return 0;
}
@@ -516,7 +516,7 @@ int cmd_ls_files(int argc, const char **argv, const char *cmd_prefix)
int require_work_tree = 0, show_tag = 0, i;
const char *max_prefix;
struct dir_struct dir;
- struct exclude_list *el;
+ struct pattern_list *pl;
struct string_list exclude_list = STRING_LIST_INIT_NODUP;
struct option builtin_ls_files_options[] = {
/* Think twice before adding "--nul" synonym to this */
@@ -594,9 +594,9 @@ int cmd_ls_files(int argc, const char **argv, const char *cmd_prefix)
argc = parse_options(argc, argv, prefix, builtin_ls_files_options,
ls_files_usage, 0);
- el = add_exclude_list(&dir, EXC_CMDL, "--exclude option");
+ pl = add_pattern_list(&dir, EXC_CMDL, "--exclude option");
for (i = 0; i < exclude_list.nr; i++) {
- add_exclude(exclude_list.items[i].string, "", 0, el, --exclude_args);
+ add_pattern(exclude_list.items[i].string, "", 0, pl, --exclude_args);
}
if (show_tag || show_valid_bit || show_fsmonitor_bit) {
tag_cached = "H ";
diff --git a/builtin/merge-recursive.c b/builtin/merge-recursive.c
index 5b910e351e..a4bfd8fc51 100644
--- a/builtin/merge-recursive.c
+++ b/builtin/merge-recursive.c
@@ -1,3 +1,4 @@
+#include "cache.h"
#include "builtin.h"
#include "commit.h"
#include "tag.h"
@@ -63,6 +64,9 @@ int cmd_merge_recursive(int argc, const char **argv, const char *prefix)
if (argc - i != 3) /* "--" "<head>" "<remote>" */
die(_("not handling anything other than two heads merge."));
+ if (repo_read_index_unmerged(the_repository))
+ die_resolve_conflict("merge");
+
o.branch1 = argv[++i];
o.branch2 = argv[++i];
diff --git a/builtin/merge-tree.c b/builtin/merge-tree.c
index 97b54caeb9..e72714a5a8 100644
--- a/builtin/merge-tree.c
+++ b/builtin/merge-tree.c
@@ -180,8 +180,9 @@ static struct merge_list *create_entry(unsigned stage, unsigned mode, const stru
static char *traverse_path(const struct traverse_info *info, const struct name_entry *n)
{
- char *path = xmallocz(traverse_path_len(info, n) + the_hash_algo->rawsz);
- return make_traverse_path(path, info, n);
+ struct strbuf buf = STRBUF_INIT;
+ strbuf_make_traverse_path(&buf, info, n->path, n->pathlen);
+ return strbuf_detach(&buf, NULL);
}
static void resolve(const struct traverse_info *info, struct name_entry *ours, struct name_entry *result)
diff --git a/builtin/merge.c b/builtin/merge.c
index e2ccbc44e2..062e911441 100644
--- a/builtin/merge.c
+++ b/builtin/merge.c
@@ -81,7 +81,7 @@ static int show_progress = -1;
static int default_to_upstream = 1;
static int signoff;
static const char *sign_commit;
-static int verify_msg = 1;
+static int no_verify;
static struct strategy all_strategy[] = {
{ "recursive", DEFAULT_TWOHEAD | NO_TRIVIAL },
@@ -287,7 +287,7 @@ static struct option builtin_merge_options[] = {
N_("GPG sign commit"), PARSE_OPT_OPTARG, NULL, (intptr_t) "" },
OPT_BOOL(0, "overwrite-ignore", &overwrite_ignore, N_("update ignored files (default)")),
OPT_BOOL(0, "signoff", &signoff, N_("add Signed-off-by:")),
- OPT_BOOL(0, "verify", &verify_msg, N_("verify commit-msg hook")),
+ OPT_BOOL(0, "no-verify", &no_verify, N_("bypass pre-merge-commit and commit-msg hooks")),
OPT_END()
};
@@ -688,16 +688,13 @@ static int try_merge_strategy(const char *strategy, struct commit_list *common,
struct commit_list *remoteheads,
struct commit *head)
{
- struct lock_file lock = LOCK_INIT;
const char *head_arg = "HEAD";
- hold_locked_index(&lock, LOCK_DIE_ON_ERROR);
- refresh_cache(REFRESH_QUIET);
- if (write_locked_index(&the_index, &lock,
- COMMIT_LOCK | SKIP_IF_UNCHANGED))
+ if (refresh_and_write_cache(REFRESH_QUIET, SKIP_IF_UNCHANGED, 0) < 0)
return error(_("Unable to write index."));
if (!strcmp(strategy, "recursive") || !strcmp(strategy, "subtree")) {
+ struct lock_file lock = LOCK_INIT;
int clean, x;
struct commit *result;
struct commit_list *reversed = NULL;
@@ -816,6 +813,18 @@ static void write_merge_heads(struct commit_list *);
static void prepare_to_commit(struct commit_list *remoteheads)
{
struct strbuf msg = STRBUF_INIT;
+ const char *index_file = get_index_file();
+
+ if (!no_verify && run_commit_hook(0 < option_edit, index_file, "pre-merge-commit", NULL))
+ abort_commit(remoteheads, NULL);
+ /*
+ * Re-read the index as pre-merge-commit hook could have updated it,
+ * and write it out as a tree. We must do this before we invoke
+ * the editor and after we invoke run_status above.
+ */
+ if (find_hook("pre-merge-commit"))
+ discard_cache();
+ read_cache_from(index_file);
strbuf_addbuf(&msg, &merge_msg);
if (squash)
BUG("the control must not reach here under --squash");
@@ -842,7 +851,7 @@ static void prepare_to_commit(struct commit_list *remoteheads)
abort_commit(remoteheads, NULL);
}
- if (verify_msg && run_commit_hook(0 < option_edit, get_index_file(),
+ if (!no_verify && run_commit_hook(0 < option_edit, get_index_file(),
"commit-msg",
git_path_merge_msg(the_repository), NULL))
abort_commit(remoteheads, NULL);
@@ -860,12 +869,8 @@ static int merge_trivial(struct commit *head, struct commit_list *remoteheads)
{
struct object_id result_tree, result_commit;
struct commit_list *parents, **pptr = &parents;
- struct lock_file lock = LOCK_INIT;
- hold_locked_index(&lock, LOCK_DIE_ON_ERROR);
- refresh_cache(REFRESH_QUIET);
- if (write_locked_index(&the_index, &lock,
- COMMIT_LOCK | SKIP_IF_UNCHANGED))
+ if (refresh_and_write_cache(REFRESH_QUIET, SKIP_IF_UNCHANGED, 0) < 0)
return error(_("Unable to write index."));
write_tree_trivial(&result_tree);
diff --git a/builtin/name-rev.c b/builtin/name-rev.c
index c785fe16ba..b0f0776947 100644
--- a/builtin/name-rev.c
+++ b/builtin/name-rev.c
@@ -9,7 +9,11 @@
#include "sha1-lookup.h"
#include "commit-slab.h"
-#define CUTOFF_DATE_SLOP 86400 /* one day */
+/*
+ * One day. See the 'name a rev shortly after epoch' test in t6120 when
+ * changing this value
+ */
+#define CUTOFF_DATE_SLOP 86400
typedef struct rev_name {
const char *tip_name;
@@ -481,8 +485,13 @@ int cmd_name_rev(int argc, const char **argv, const char *prefix)
add_object_array(object, *argv, &revs);
}
- if (cutoff)
- cutoff = cutoff - CUTOFF_DATE_SLOP;
+ if (cutoff) {
+ /* check for undeflow */
+ if (cutoff > TIME_MIN + CUTOFF_DATE_SLOP)
+ cutoff = cutoff - CUTOFF_DATE_SLOP;
+ else
+ cutoff = TIME_MIN;
+ }
for_each_ref(name_ref, &data);
if (transform_stdin) {
diff --git a/builtin/pack-objects.c b/builtin/pack-objects.c
index 76ce906946..5876583220 100644
--- a/builtin/pack-objects.c
+++ b/builtin/pack-objects.c
@@ -610,12 +610,12 @@ static int mark_tagged(const char *path, const struct object_id *oid, int flag,
void *cb_data)
{
struct object_id peeled;
- struct object_entry *entry = packlist_find(&to_pack, oid, NULL);
+ struct object_entry *entry = packlist_find(&to_pack, oid);
if (entry)
entry->tagged = 1;
if (!peel_ref(path, &peeled)) {
- entry = packlist_find(&to_pack, &peeled, NULL);
+ entry = packlist_find(&to_pack, &peeled);
if (entry)
entry->tagged = 1;
}
@@ -996,12 +996,11 @@ static int no_try_delta(const char *path)
* few lines later when we want to add the new entry.
*/
static int have_duplicate_entry(const struct object_id *oid,
- int exclude,
- uint32_t *index_pos)
+ int exclude)
{
struct object_entry *entry;
- entry = packlist_find(&to_pack, oid, index_pos);
+ entry = packlist_find(&to_pack, oid);
if (!entry)
return 0;
@@ -1141,13 +1140,12 @@ static void create_object_entry(const struct object_id *oid,
uint32_t hash,
int exclude,
int no_try_delta,
- uint32_t index_pos,
struct packed_git *found_pack,
off_t found_offset)
{
struct object_entry *entry;
- entry = packlist_alloc(&to_pack, oid->hash, index_pos);
+ entry = packlist_alloc(&to_pack, oid);
entry->hash = hash;
oe_set_type(entry, type);
if (exclude)
@@ -1171,11 +1169,10 @@ static int add_object_entry(const struct object_id *oid, enum object_type type,
{
struct packed_git *found_pack = NULL;
off_t found_offset = 0;
- uint32_t index_pos;
display_progress(progress_state, ++nr_seen);
- if (have_duplicate_entry(oid, exclude, &index_pos))
+ if (have_duplicate_entry(oid, exclude))
return 0;
if (!want_object_in_pack(oid, exclude, &found_pack, &found_offset)) {
@@ -1190,7 +1187,7 @@ static int add_object_entry(const struct object_id *oid, enum object_type type,
create_object_entry(oid, type, pack_name_hash(name),
exclude, name && no_try_delta(name),
- index_pos, found_pack, found_offset);
+ found_pack, found_offset);
return 1;
}
@@ -1199,17 +1196,15 @@ static int add_object_entry_from_bitmap(const struct object_id *oid,
int flags, uint32_t name_hash,
struct packed_git *pack, off_t offset)
{
- uint32_t index_pos;
-
display_progress(progress_state, ++nr_seen);
- if (have_duplicate_entry(oid, 0, &index_pos))
+ if (have_duplicate_entry(oid, 0))
return 0;
if (!want_object_in_pack(oid, 0, &pack, &offset))
return 0;
- create_object_entry(oid, type, name_hash, 0, 0, index_pos, pack, offset);
+ create_object_entry(oid, type, name_hash, 0, 0, pack, offset);
return 1;
}
@@ -1507,7 +1502,7 @@ static int can_reuse_delta(const unsigned char *base_sha1,
* First see if we're already sending the base (or it's explicitly in
* our "excluded" list).
*/
- base = packlist_find(&to_pack, &base_oid, NULL);
+ base = packlist_find(&to_pack, &base_oid);
if (base) {
if (!in_same_island(&delta->idx.oid, &base->idx.oid))
return 0;
@@ -2342,15 +2337,6 @@ static void find_deltas(struct object_entry **list, unsigned *list_size,
free(array);
}
-static void try_to_free_from_threads(size_t size)
-{
- packing_data_lock(&to_pack);
- release_pack_memory(size);
- packing_data_unlock(&to_pack);
-}
-
-static try_to_free_t old_try_to_free_routine;
-
/*
* The main object list is split into smaller lists, each is handed to
* one worker.
@@ -2391,12 +2377,10 @@ static void init_threaded_search(void)
pthread_mutex_init(&cache_mutex, NULL);
pthread_mutex_init(&progress_mutex, NULL);
pthread_cond_init(&progress_cond, NULL);
- old_try_to_free_routine = set_try_to_free_routine(try_to_free_from_threads);
}
static void cleanup_threaded_search(void)
{
- set_try_to_free_routine(old_try_to_free_routine);
pthread_cond_destroy(&progress_cond);
pthread_mutex_destroy(&cache_mutex);
pthread_mutex_destroy(&progress_mutex);
@@ -2579,7 +2563,7 @@ static void add_tag_chain(const struct object_id *oid)
* it was included via bitmaps, we would not have parsed it
* previously).
*/
- if (packlist_find(&to_pack, oid, NULL))
+ if (packlist_find(&to_pack, oid))
return;
tag = lookup_tag(the_repository, oid);
@@ -2603,7 +2587,7 @@ static int add_ref_tag(const char *path, const struct object_id *oid, int flag,
if (starts_with(path, "refs/tags/") && /* is a tag? */
!peel_ref(path, &peeled) && /* peelable? */
- packlist_find(&to_pack, &peeled, NULL)) /* object packed? */
+ packlist_find(&to_pack, &peeled)) /* object packed? */
add_tag_chain(oid);
return 0;
}
@@ -2715,10 +2699,6 @@ static int git_pack_config(const char *k, const char *v, void *cb)
use_bitmap_index_default = git_config_bool(k, v);
return 0;
}
- if (!strcmp(k, "pack.usesparse")) {
- sparse = git_config_bool(k, v);
- return 0;
- }
if (!strcmp(k, "pack.threads")) {
delta_search_threads = git_config_int(k, v);
if (delta_search_threads < 0)
@@ -2803,7 +2783,7 @@ static void show_object(struct object *obj, const char *name, void *data)
for (p = strchr(name, '/'); p; p = strchr(p + 1, '/'))
depth++;
- ent = packlist_find(&to_pack, &obj->oid, NULL);
+ ent = packlist_find(&to_pack, &obj->oid);
if (ent && depth > oe_tree_depth(&to_pack, ent))
oe_set_tree_depth(&to_pack, ent, depth);
}
@@ -3034,7 +3014,7 @@ static void loosen_unused_packed_objects(void)
for (i = 0; i < p->num_objects; i++) {
nth_packed_object_oid(&oid, p, i);
- if (!packlist_find(&to_pack, &oid, NULL) &&
+ if (!packlist_find(&to_pack, &oid) &&
!has_sha1_pack_kept_or_nonlocal(&oid) &&
!loosened_object_can_be_discarded(&oid, p->mtime))
if (force_object_loose(&oid, p->mtime))
@@ -3343,6 +3323,10 @@ int cmd_pack_objects(int argc, const char **argv, const char *prefix)
read_replace_refs = 0;
sparse = git_env_bool("GIT_TEST_PACK_SPARSE", 0);
+ prepare_repo_settings(the_repository);
+ if (!sparse && the_repository->settings.pack_use_sparse != -1)
+ sparse = the_repository->settings.pack_use_sparse;
+
reset_pack_idx_option(&pack_idx_opts);
git_config(git_pack_config, NULL);
diff --git a/builtin/patch-id.c b/builtin/patch-id.c
index bd28b80b2d..3059e525b8 100644
--- a/builtin/patch-id.c
+++ b/builtin/patch-id.c
@@ -1,15 +1,16 @@
+#include "cache.h"
#include "builtin.h"
#include "config.h"
#include "diff.h"
static void flush_current_id(int patchlen, struct object_id *id, struct object_id *result)
{
- char name[50];
+ char name[GIT_MAX_HEXSZ + 1];
if (!patchlen)
return;
- memcpy(name, oid_to_hex(id), GIT_SHA1_HEXSZ + 1);
+ memcpy(name, oid_to_hex(id), the_hash_algo->hexsz + 1);
printf("%s %s\n", oid_to_hex(result), name);
}
@@ -60,9 +61,9 @@ static int get_one_patchid(struct object_id *next_oid, struct object_id *result,
{
int patchlen = 0, found_next = 0;
int before = -1, after = -1;
- git_SHA_CTX ctx;
+ git_hash_ctx ctx;
- git_SHA1_Init(&ctx);
+ the_hash_algo->init_fn(&ctx);
oidclr(result);
while (strbuf_getwholeline(line_buf, stdin, '\n') != EOF) {
@@ -122,7 +123,7 @@ static int get_one_patchid(struct object_id *next_oid, struct object_id *result,
/* Compute the sha without whitespace */
len = remove_space(line);
patchlen += len;
- git_SHA1_Update(&ctx, line, len);
+ the_hash_algo->update_fn(&ctx, line, len);
}
if (!found_next)
diff --git a/builtin/pull.c b/builtin/pull.c
index f1eaf6e6ed..d25ff13a60 100644
--- a/builtin/pull.c
+++ b/builtin/pull.c
@@ -129,6 +129,7 @@ static char *opt_refmap;
static char *opt_ipv4;
static char *opt_ipv6;
static int opt_show_forced_updates = -1;
+static char *set_upstream;
static struct option pull_options[] = {
/* Shared options */
@@ -243,6 +244,9 @@ static struct option pull_options[] = {
PARSE_OPT_NOARG),
OPT_BOOL(0, "show-forced-updates", &opt_show_forced_updates,
N_("check for forced-updates on all updated branches")),
+ OPT_PASSTHRU(0, "set-upstream", &set_upstream, NULL,
+ N_("set upstream for git pull/fetch"),
+ PARSE_OPT_NOARG),
OPT_END()
};
@@ -556,6 +560,8 @@ static int run_fetch(const char *repo, const char **refspecs)
argv_array_push(&args, "--show-forced-updates");
else if (opt_show_forced_updates == 0)
argv_array_push(&args, "--no-show-forced-updates");
+ if (set_upstream)
+ argv_array_push(&args, set_upstream);
if (repo) {
argv_array_push(&args, repo);
diff --git a/builtin/push.c b/builtin/push.c
index 021dd3b1e4..843f5b22a2 100644
--- a/builtin/push.c
+++ b/builtin/push.c
@@ -143,8 +143,8 @@ static int push_url_of_remote(struct remote *remote, const char ***url_p)
return remote->url_nr;
}
-static NORETURN int die_push_simple(struct branch *branch,
- struct remote *remote)
+static NORETURN void die_push_simple(struct branch *branch,
+ struct remote *remote)
{
/*
* There's no point in using shorten_unambiguous_ref here,
@@ -357,8 +357,10 @@ static int push_with_options(struct transport *transport, struct refspec *rs,
if (verbosity > 0)
fprintf(stderr, _("Pushing to %s\n"), transport->url);
+ trace2_region_enter("push", "transport_push", the_repository);
err = transport_push(the_repository, transport,
rs, flags, &reject_reasons);
+ trace2_region_leave("push", "transport_push", the_repository);
if (err != 0) {
fprintf(stderr, "%s", push_get_color(PUSH_COLOR_ERROR));
error(_("failed to push some refs to '%s'"), transport->url);
@@ -385,30 +387,14 @@ static int push_with_options(struct transport *transport, struct refspec *rs,
}
static int do_push(const char *repo, int flags,
- const struct string_list *push_options)
+ const struct string_list *push_options,
+ struct remote *remote)
{
int i, errs;
- struct remote *remote = pushremote_get(repo);
const char **url;
int url_nr;
struct refspec *push_refspec = &rs;
- if (!remote) {
- if (repo)
- die(_("bad repository '%s'"), repo);
- die(_("No configured push destination.\n"
- "Either specify the URL from the command-line or configure a remote repository using\n"
- "\n"
- " git remote add <name> <url>\n"
- "\n"
- "and then push using the remote name\n"
- "\n"
- " git push <name>\n"));
- }
-
- if (remote->mirror)
- flags |= (TRANSPORT_PUSH_MIRROR|TRANSPORT_PUSH_FORCE);
-
if (push_options->nr)
flags |= TRANSPORT_PUSH_OPTIONS;
@@ -548,6 +534,7 @@ int cmd_push(int argc, const char **argv, const char *prefix)
struct string_list push_options_cmdline = STRING_LIST_INIT_DUP;
struct string_list *push_options;
const struct string_list_item *item;
+ struct remote *remote;
struct option options[] = {
OPT__VERBOSITY(&verbosity),
@@ -602,20 +589,6 @@ int cmd_push(int argc, const char **argv, const char *prefix)
die(_("--delete is incompatible with --all, --mirror and --tags"));
if (deleterefs && argc < 2)
die(_("--delete doesn't make sense without any refs"));
- if (flags & TRANSPORT_PUSH_ALL) {
- if (tags)
- die(_("--all and --tags are incompatible"));
- if (argc >= 2)
- die(_("--all can't be combined with refspecs"));
- }
- if (flags & TRANSPORT_PUSH_MIRROR) {
- if (tags)
- die(_("--mirror and --tags are incompatible"));
- if (argc >= 2)
- die(_("--mirror can't be combined with refspecs"));
- }
- if ((flags & TRANSPORT_PUSH_ALL) && (flags & TRANSPORT_PUSH_MIRROR))
- die(_("--all and --mirror are incompatible"));
if (recurse_submodules == RECURSE_SUBMODULES_CHECK)
flags |= TRANSPORT_RECURSE_SUBMODULES_CHECK;
@@ -632,11 +605,43 @@ int cmd_push(int argc, const char **argv, const char *prefix)
set_refspecs(argv + 1, argc - 1, repo);
}
+ remote = pushremote_get(repo);
+ if (!remote) {
+ if (repo)
+ die(_("bad repository '%s'"), repo);
+ die(_("No configured push destination.\n"
+ "Either specify the URL from the command-line or configure a remote repository using\n"
+ "\n"
+ " git remote add <name> <url>\n"
+ "\n"
+ "and then push using the remote name\n"
+ "\n"
+ " git push <name>\n"));
+ }
+
+ if (remote->mirror)
+ flags |= (TRANSPORT_PUSH_MIRROR|TRANSPORT_PUSH_FORCE);
+
+ if (flags & TRANSPORT_PUSH_ALL) {
+ if (tags)
+ die(_("--all and --tags are incompatible"));
+ if (argc >= 2)
+ die(_("--all can't be combined with refspecs"));
+ }
+ if (flags & TRANSPORT_PUSH_MIRROR) {
+ if (tags)
+ die(_("--mirror and --tags are incompatible"));
+ if (argc >= 2)
+ die(_("--mirror can't be combined with refspecs"));
+ }
+ if ((flags & TRANSPORT_PUSH_ALL) && (flags & TRANSPORT_PUSH_MIRROR))
+ die(_("--all and --mirror are incompatible"));
+
for_each_string_list_item(item, push_options)
if (strchr(item->string, '\n'))
die(_("push options must not have new line characters"));
- rc = do_push(repo, flags, push_options);
+ rc = do_push(repo, flags, push_options, remote);
string_list_clear(&push_options_cmdline, 0);
string_list_clear(&push_options_config, 0);
if (rc == -1)
diff --git a/builtin/rebase.c b/builtin/rebase.c
index 670096c065..4a20582e72 100644
--- a/builtin/rebase.c
+++ b/builtin/rebase.c
@@ -29,8 +29,8 @@
#include "rebase-interactive.h"
static char const * const builtin_rebase_usage[] = {
- N_("git rebase [-i] [options] [--exec <cmd>] [--onto <newbase>] "
- "[<upstream>] [<branch>]"),
+ N_("git rebase [-i] [options] [--exec <cmd>] "
+ "[--onto <newbase> | --keep-base] [<upstream> [<branch>]]"),
N_("git rebase [-i] [options] [--exec <cmd>] [--onto <newbase>] "
"--root [<branch>]"),
N_("git rebase --continue | --abort | --skip | --edit-todo"),
@@ -62,7 +62,7 @@ struct rebase_options {
const char *onto_name;
const char *revisions;
const char *switch_to;
- int root;
+ int root, root_with_onto;
struct object_id *squash_onto;
struct commit *restrict_revision;
int dont_finish_rebase;
@@ -374,6 +374,7 @@ static int run_rebase_interactive(struct rebase_options *opts,
flags |= abbreviate_commands ? TODO_LIST_ABBREVIATE_CMDS : 0;
flags |= opts->rebase_merges ? TODO_LIST_REBASE_MERGES : 0;
flags |= opts->rebase_cousins > 0 ? TODO_LIST_REBASE_COUSINS : 0;
+ flags |= opts->root_with_onto ? TODO_LIST_ROOT_WITH_ONTO : 0;
flags |= command == ACTION_SHORTEN_OIDS ? TODO_LIST_SHORTEN_IDS : 0;
switch (command) {
@@ -1260,24 +1261,44 @@ static int is_linear_history(struct commit *from, struct commit *to)
return 1;
}
-static int can_fast_forward(struct commit *onto, struct object_id *head_oid,
- struct object_id *merge_base)
+static int can_fast_forward(struct commit *onto, struct commit *upstream,
+ struct commit *restrict_revision,
+ struct object_id *head_oid, struct object_id *merge_base)
{
struct commit *head = lookup_commit(the_repository, head_oid);
- struct commit_list *merge_bases;
- int res;
+ struct commit_list *merge_bases = NULL;
+ int res = 0;
if (!head)
- return 0;
+ goto done;
merge_bases = get_merge_bases(onto, head);
- if (merge_bases && !merge_bases->next) {
- oidcpy(merge_base, &merge_bases->item->object.oid);
- res = oideq(merge_base, &onto->object.oid);
- } else {
+ if (!merge_bases || merge_bases->next) {
oidcpy(merge_base, &null_oid);
- res = 0;
+ goto done;
}
+
+ oidcpy(merge_base, &merge_bases->item->object.oid);
+ if (!oideq(merge_base, &onto->object.oid))
+ goto done;
+
+ if (restrict_revision && !oideq(&restrict_revision->object.oid, merge_base))
+ goto done;
+
+ if (!upstream)
+ goto done;
+
+ free_commit_list(merge_bases);
+ merge_bases = get_merge_bases(upstream, head);
+ if (!merge_bases || merge_bases->next)
+ goto done;
+
+ if (!oideq(&onto->object.oid, &merge_bases->item->object.oid))
+ goto done;
+
+ res = 1;
+
+done:
free_commit_list(merge_bases);
return res && is_linear_history(onto, head);
}
@@ -1376,6 +1397,7 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
struct rebase_options options = REBASE_OPTIONS_INIT;
const char *branch_name;
int ret, flags, total_argc, in_progress = 0;
+ int keep_base = 0;
int ok_to_skip_pre_rebase = 0;
struct strbuf msg = STRBUF_INIT;
struct strbuf revisions = STRBUF_INIT;
@@ -1394,6 +1416,8 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
OPT_STRING(0, "onto", &options.onto_name,
N_("revision"),
N_("rebase onto given branch instead of upstream")),
+ OPT_BOOL(0, "keep-base", &keep_base,
+ N_("use the merge-base of upstream and branch as the current base")),
OPT_BOOL(0, "no-verify", &ok_to_skip_pre_rebase,
N_("allow pre-rebase hook to run")),
OPT_NEGBIT('q', "quiet", &options.flags,
@@ -1547,6 +1571,13 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
warning(_("git rebase --preserve-merges is deprecated. "
"Use --rebase-merges instead."));
+ if (keep_base) {
+ if (options.onto_name)
+ die(_("cannot combine '--keep-base' with '--onto'"));
+ if (options.root)
+ die(_("cannot combine '--keep-base' with '--root'"));
+ }
+
if (action != ACTION_NONE && !in_progress)
die(_("No rebase in progress?"));
setenv(GIT_REFLOG_ACTION_ENVIRONMENT, "rebase", 0);
@@ -1833,15 +1864,6 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
"'--reschedule-failed-exec'"));
}
- if (options.rebase_merges) {
- if (strategy_options.nr)
- die(_("cannot combine '--rebase-merges' with "
- "'--strategy-option'"));
- if (options.strategy)
- die(_("cannot combine '--rebase-merges' with "
- "'--strategy'"));
- }
-
if (!options.root) {
if (argc < 1) {
struct branch *branch;
@@ -1872,7 +1894,9 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
options.squash_onto = &squash_onto;
options.onto_name = squash_onto_name =
xstrdup(oid_to_hex(&squash_onto));
- }
+ } else
+ options.root_with_onto = 1;
+
options.upstream_name = NULL;
options.upstream = NULL;
if (argc > 1)
@@ -1882,12 +1906,22 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
}
/* Make sure the branch to rebase onto is valid. */
- if (!options.onto_name)
+ if (keep_base) {
+ strbuf_reset(&buf);
+ strbuf_addstr(&buf, options.upstream_name);
+ strbuf_addstr(&buf, "...");
+ options.onto_name = xstrdup(buf.buf);
+ } else if (!options.onto_name)
options.onto_name = options.upstream_name;
if (strstr(options.onto_name, "...")) {
- if (get_oid_mb(options.onto_name, &merge_base) < 0)
- die(_("'%s': need exactly one merge base"),
- options.onto_name);
+ if (get_oid_mb(options.onto_name, &merge_base) < 0) {
+ if (keep_base)
+ die(_("'%s': need exactly one merge base with branch"),
+ options.upstream_name);
+ else
+ die(_("'%s': need exactly one merge base"),
+ options.onto_name);
+ }
options.onto = lookup_commit_or_die(&merge_base,
options.onto_name);
} else {
@@ -1968,9 +2002,6 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
state_dir_path("autostash", &options);
struct child_process stash = CHILD_PROCESS_INIT;
struct object_id oid;
- struct commit *head =
- lookup_commit_reference(the_repository,
- &options.orig_head);
argv_array_pushl(&stash.args,
"stash", "create", "autostash", NULL);
@@ -1991,17 +2022,9 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
options.state_dir);
write_file(autostash, "%s", oid_to_hex(&oid));
printf(_("Created autostash: %s\n"), buf.buf);
- if (reset_head(&head->object.oid, "reset --hard",
+ if (reset_head(NULL, "reset --hard",
NULL, RESET_HEAD_HARD, NULL, NULL) < 0)
die(_("could not reset --hard"));
- printf(_("HEAD is now at %s"),
- find_unique_abbrev(&head->object.oid,
- DEFAULT_ABBREV));
- strbuf_reset(&buf);
- pp_commit_easy(CMIT_FMT_ONELINE, head, &buf);
- if (buf.len > 0)
- printf(" %s", buf.buf);
- putchar('\n');
if (discard_index(the_repository->index) < 0 ||
repo_read_index(the_repository) < 0)
@@ -2022,13 +2045,13 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
/*
* Check if we are already based on onto with linear history,
- * but this should be done only when upstream and onto are the same
- * and if this is not an interactive rebase.
+ * in which case we could fast-forward without replacing the commits
+ * with new commits recreated by replaying their changes. This
+ * optimization must not be done if this is an interactive rebase.
*/
- if (can_fast_forward(options.onto, &options.orig_head, &merge_base) &&
- !is_interactive(&options) && !options.restrict_revision &&
- options.upstream &&
- !oidcmp(&options.upstream->object.oid, &options.onto->object.oid)) {
+ if (can_fast_forward(options.onto, options.upstream, options.restrict_revision,
+ &options.orig_head, &merge_base) &&
+ !is_interactive(&options)) {
int flag;
if (!(options.flags & REBASE_FORCE)) {
diff --git a/builtin/receive-pack.c b/builtin/receive-pack.c
index dcf385511f..411e0b4d99 100644
--- a/builtin/receive-pack.c
+++ b/builtin/receive-pack.c
@@ -417,24 +417,22 @@ static int copy_to_sideband(int in, int out, void *arg)
return 0;
}
-#define HMAC_BLOCK_SIZE 64
-
-static void hmac_sha1(unsigned char *out,
+static void hmac(unsigned char *out,
const char *key_in, size_t key_len,
const char *text, size_t text_len)
{
- unsigned char key[HMAC_BLOCK_SIZE];
- unsigned char k_ipad[HMAC_BLOCK_SIZE];
- unsigned char k_opad[HMAC_BLOCK_SIZE];
+ unsigned char key[GIT_MAX_BLKSZ];
+ unsigned char k_ipad[GIT_MAX_BLKSZ];
+ unsigned char k_opad[GIT_MAX_BLKSZ];
int i;
- git_SHA_CTX ctx;
+ git_hash_ctx ctx;
/* RFC 2104 2. (1) */
- memset(key, '\0', HMAC_BLOCK_SIZE);
- if (HMAC_BLOCK_SIZE < key_len) {
- git_SHA1_Init(&ctx);
- git_SHA1_Update(&ctx, key_in, key_len);
- git_SHA1_Final(key, &ctx);
+ memset(key, '\0', GIT_MAX_BLKSZ);
+ if (the_hash_algo->blksz < key_len) {
+ the_hash_algo->init_fn(&ctx);
+ the_hash_algo->update_fn(&ctx, key_in, key_len);
+ the_hash_algo->final_fn(key, &ctx);
} else {
memcpy(key, key_in, key_len);
}
@@ -446,29 +444,29 @@ static void hmac_sha1(unsigned char *out,
}
/* RFC 2104 2. (3) & (4) */
- git_SHA1_Init(&ctx);
- git_SHA1_Update(&ctx, k_ipad, sizeof(k_ipad));
- git_SHA1_Update(&ctx, text, text_len);
- git_SHA1_Final(out, &ctx);
+ the_hash_algo->init_fn(&ctx);
+ the_hash_algo->update_fn(&ctx, k_ipad, sizeof(k_ipad));
+ the_hash_algo->update_fn(&ctx, text, text_len);
+ the_hash_algo->final_fn(out, &ctx);
/* RFC 2104 2. (6) & (7) */
- git_SHA1_Init(&ctx);
- git_SHA1_Update(&ctx, k_opad, sizeof(k_opad));
- git_SHA1_Update(&ctx, out, GIT_SHA1_RAWSZ);
- git_SHA1_Final(out, &ctx);
+ the_hash_algo->init_fn(&ctx);
+ the_hash_algo->update_fn(&ctx, k_opad, sizeof(k_opad));
+ the_hash_algo->update_fn(&ctx, out, the_hash_algo->rawsz);
+ the_hash_algo->final_fn(out, &ctx);
}
static char *prepare_push_cert_nonce(const char *path, timestamp_t stamp)
{
struct strbuf buf = STRBUF_INIT;
- unsigned char sha1[GIT_SHA1_RAWSZ];
+ unsigned char hash[GIT_MAX_RAWSZ];
strbuf_addf(&buf, "%s:%"PRItime, path, stamp);
- hmac_sha1(sha1, buf.buf, buf.len, cert_nonce_seed, strlen(cert_nonce_seed));
+ hmac(hash, buf.buf, buf.len, cert_nonce_seed, strlen(cert_nonce_seed));
strbuf_release(&buf);
/* RFC 2104 5. HMAC-SHA1-80 */
- strbuf_addf(&buf, "%"PRItime"-%.*s", stamp, GIT_SHA1_HEXSZ, sha1_to_hex(sha1));
+ strbuf_addf(&buf, "%"PRItime"-%.*s", stamp, (int)the_hash_algo->hexsz, hash_to_hex(hash));
return strbuf_detach(&buf, NULL);
}
@@ -970,7 +968,7 @@ static const char *push_to_deploy(unsigned char *sha1,
if (run_command(&child))
return "Working directory has staged changes";
- read_tree[3] = sha1_to_hex(sha1);
+ read_tree[3] = hash_to_hex(sha1);
child_process_init(&child);
child.argv = read_tree;
child.env = env->argv;
@@ -987,13 +985,13 @@ static const char *push_to_deploy(unsigned char *sha1,
static const char *push_to_checkout_hook = "push-to-checkout";
-static const char *push_to_checkout(unsigned char *sha1,
+static const char *push_to_checkout(unsigned char *hash,
struct argv_array *env,
const char *work_tree)
{
argv_array_pushf(env, "GIT_WORK_TREE=%s", absolute_path(work_tree));
if (run_hook_le(env->argv, push_to_checkout_hook,
- sha1_to_hex(sha1), NULL))
+ hash_to_hex(hash), NULL))
return "push-to-checkout hook declined";
else
return NULL;
diff --git a/builtin/repack.c b/builtin/repack.c
index 632c0c0a79..094c2f8ea4 100644
--- a/builtin/repack.c
+++ b/builtin/repack.c
@@ -11,6 +11,7 @@
#include "midx.h"
#include "packfile.h"
#include "object-store.h"
+#include "promisor-remote.h"
static int delta_base_offset = 1;
static int pack_kept_objects = -1;
@@ -190,7 +191,7 @@ static int write_oid(const struct object_id *oid, struct packed_git *pack,
die(_("could not start pack-objects to repack promisor objects"));
}
- xwrite(cmd->in, oid_to_hex(oid), GIT_SHA1_HEXSZ);
+ xwrite(cmd->in, oid_to_hex(oid), the_hash_algo->hexsz);
xwrite(cmd->in, "\n", 1);
return 0;
}
@@ -361,7 +362,7 @@ int cmd_repack(int argc, const char **argv, const char *prefix)
argv_array_push(&cmd.args, "--all");
argv_array_push(&cmd.args, "--reflog");
argv_array_push(&cmd.args, "--indexed-objects");
- if (repository_format_partial_clone)
+ if (has_promisor_remote())
argv_array_push(&cmd.args, "--exclude-promisor-objects");
if (write_bitmaps > 0)
argv_array_push(&cmd.args, "--write-bitmap-index");
diff --git a/builtin/replace.c b/builtin/replace.c
index 644b21ca8d..bd92dc63b9 100644
--- a/builtin/replace.c
+++ b/builtin/replace.c
@@ -272,7 +272,7 @@ static int import_object(struct object_id *oid, enum object_type type,
return error(_("unable to spawn mktree"));
}
- if (strbuf_read(&result, cmd.out, 41) < 0) {
+ if (strbuf_read(&result, cmd.out, the_hash_algo->hexsz + 1) < 0) {
error_errno(_("unable to read from mktree"));
close(fd);
close(cmd.out);
@@ -358,14 +358,15 @@ static int replace_parents(struct strbuf *buf, int argc, const char **argv)
struct strbuf new_parents = STRBUF_INIT;
const char *parent_start, *parent_end;
int i;
+ const unsigned hexsz = the_hash_algo->hexsz;
/* find existing parents */
parent_start = buf->buf;
- parent_start += GIT_SHA1_HEXSZ + 6; /* "tree " + "hex sha1" + "\n" */
+ parent_start += hexsz + 6; /* "tree " + "hex sha1" + "\n" */
parent_end = parent_start;
while (starts_with(parent_end, "parent "))
- parent_end += 48; /* "parent " + "hex sha1" + "\n" */
+ parent_end += hexsz + 8; /* "parent " + "hex sha1" + "\n" */
/* prepare new parents */
for (i = 0; i < argc; i++) {
@@ -421,7 +422,7 @@ static int check_one_mergetag(struct commit *commit,
if (get_oid(mergetag_data->argv[i], &oid) < 0)
return error(_("not a valid object name: '%s'"),
mergetag_data->argv[i]);
- if (oideq(&tag->tagged->oid, &oid))
+ if (oideq(get_tagged_oid(tag), &oid))
return 0; /* found */
}
diff --git a/builtin/rev-list.c b/builtin/rev-list.c
index 301ccb970b..e28d62ec64 100644
--- a/builtin/rev-list.c
+++ b/builtin/rev-list.c
@@ -18,7 +18,6 @@
#include "reflog-walk.h"
#include "oidset.h"
#include "packfile.h"
-#include "object-store.h"
static const char rev_list_usage[] =
"git rev-list [OPTION] <commit-id>... [ -- paths... ]\n"
@@ -471,10 +470,6 @@ int cmd_rev_list(int argc, const char **argv, const char *prefix)
parse_list_objects_filter(&filter_options, arg);
if (filter_options.choice && !revs.blob_objects)
die(_("object filtering requires --objects"));
- if (filter_options.choice == LOFC_SPARSE_OID &&
- !filter_options.sparse_oid_value)
- die(_("invalid sparse value '%s'"),
- filter_options.filter_spec);
continue;
}
if (!strcmp(arg, ("--no-" CL_ARG__FILTER))) {
diff --git a/builtin/rev-parse.c b/builtin/rev-parse.c
index f8bbe6d47e..308c67e4fc 100644
--- a/builtin/rev-parse.c
+++ b/builtin/rev-parse.c
@@ -593,6 +593,7 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
const char *name = NULL;
struct object_context unused;
struct strbuf buf = STRBUF_INIT;
+ const int hexsz = the_hash_algo->hexsz;
if (argc > 1 && !strcmp("--parseopt", argv[1]))
return cmd_parseopt(argc - 1, argv + 1, prefix);
@@ -730,8 +731,8 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
abbrev = strtoul(arg, NULL, 10);
if (abbrev < MINIMUM_ABBREV)
abbrev = MINIMUM_ABBREV;
- else if (40 <= abbrev)
- abbrev = 40;
+ else if (hexsz <= abbrev)
+ abbrev = hexsz;
continue;
}
if (!strcmp(arg, "--sq")) {
diff --git a/builtin/show-index.c b/builtin/show-index.c
index a6e678809e..0826f6a5a2 100644
--- a/builtin/show-index.c
+++ b/builtin/show-index.c
@@ -11,6 +11,7 @@ int cmd_show_index(int argc, const char **argv, const char *prefix)
unsigned nr;
unsigned int version;
static unsigned int top_index[256];
+ const unsigned hashsz = the_hash_algo->rawsz;
if (argc != 1)
usage(show_index_usage);
@@ -36,23 +37,23 @@ int cmd_show_index(int argc, const char **argv, const char *prefix)
}
if (version == 1) {
for (i = 0; i < nr; i++) {
- unsigned int offset, entry[6];
+ unsigned int offset, entry[(GIT_MAX_RAWSZ + 4) / sizeof(unsigned int)];
- if (fread(entry, 4 + 20, 1, stdin) != 1)
+ if (fread(entry, 4 + hashsz, 1, stdin) != 1)
die("unable to read entry %u/%u", i, nr);
offset = ntohl(entry[0]);
- printf("%u %s\n", offset, sha1_to_hex((void *)(entry+1)));
+ printf("%u %s\n", offset, hash_to_hex((void *)(entry+1)));
}
} else {
unsigned off64_nr = 0;
struct {
- unsigned char sha1[20];
+ struct object_id oid;
uint32_t crc;
uint32_t off;
} *entries;
ALLOC_ARRAY(entries, nr);
for (i = 0; i < nr; i++)
- if (fread(entries[i].sha1, 20, 1, stdin) != 1)
+ if (fread(entries[i].oid.hash, hashsz, 1, stdin) != 1)
die("unable to read sha1 %u/%u", i, nr);
for (i = 0; i < nr; i++)
if (fread(&entries[i].crc, 4, 1, stdin) != 1)
@@ -77,7 +78,7 @@ int cmd_show_index(int argc, const char **argv, const char *prefix)
}
printf("%" PRIuMAX " %s (%08"PRIx32")\n",
(uintmax_t) offset,
- sha1_to_hex(entries[i].sha1),
+ oid_to_hex(&entries[i].oid),
ntohl(entries[i].crc));
}
free(entries);
diff --git a/builtin/stash.c b/builtin/stash.c
index b5a301f24d..bb4f6d8d76 100644
--- a/builtin/stash.c
+++ b/builtin/stash.c
@@ -396,7 +396,7 @@ static int do_apply_stash(const char *prefix, struct stash_info *info,
const struct object_id *bases[1];
read_cache_preload(NULL);
- if (refresh_cache(REFRESH_QUIET))
+ if (refresh_and_write_cache(REFRESH_QUIET, 0, 0))
return -1;
if (write_cache_as_tree(&c_tree, 0, NULL))
@@ -427,6 +427,8 @@ static int do_apply_stash(const char *prefix, struct stash_info *info,
return error(_("could not save index tree"));
reset_head();
+ discard_cache();
+ read_cache();
}
}
@@ -485,7 +487,7 @@ static int do_apply_stash(const char *prefix, struct stash_info *info,
}
if (quiet) {
- if (refresh_cache(REFRESH_QUIET))
+ if (refresh_and_write_cache(REFRESH_QUIET, 0, 0))
warning("could not refresh index");
} else {
struct child_process cp = CHILD_PROCESS_INIT;
@@ -497,6 +499,10 @@ static int do_apply_stash(const char *prefix, struct stash_info *info,
*/
cp.git_cmd = 1;
cp.dir = prefix;
+ argv_array_pushf(&cp.env_array, GIT_WORK_TREE_ENVIRONMENT"=%s",
+ absolute_path(get_git_work_tree()));
+ argv_array_pushf(&cp.env_array, GIT_DIR_ENVIRONMENT"=%s",
+ absolute_path(get_git_dir()));
argv_array_push(&cp.args, "status");
run_command(&cp);
}
@@ -1129,7 +1135,10 @@ static int do_create_stash(const struct pathspec *ps, struct strbuf *stash_msg_b
prepare_fallback_ident("git stash", "git@stash");
read_cache_preload(NULL);
- refresh_cache(REFRESH_QUIET);
+ if (refresh_and_write_cache(REFRESH_QUIET, 0, 0) < 0) {
+ ret = -1;
+ goto done;
+ }
if (get_oid("HEAD", &info->b_commit)) {
if (!quiet)
@@ -1290,7 +1299,7 @@ static int do_push_stash(const struct pathspec *ps, const char *stash_msg, int q
free(ps_matched);
}
- if (refresh_cache(REFRESH_QUIET)) {
+ if (refresh_and_write_cache(REFRESH_QUIET, 0, 0)) {
ret = -1;
goto done;
}
diff --git a/builtin/submodule--helper.c b/builtin/submodule--helper.c
index 909e77e802..2c2395a620 100644
--- a/builtin/submodule--helper.c
+++ b/builtin/submodule--helper.c
@@ -424,7 +424,7 @@ static int module_list(int argc, const char **argv, const char *prefix)
const struct cache_entry *ce = list.entries[i];
if (ce_stage(ce))
- printf("%06o %s U\t", ce->ce_mode, sha1_to_hex(null_sha1));
+ printf("%06o %s U\t", ce->ce_mode, oid_to_hex(&null_oid));
else
printf("%06o %s %d\t", ce->ce_mode,
oid_to_hex(&ce->oid), ce_stage(ce));
@@ -1874,7 +1874,7 @@ static int update_clone(int argc, const char **argv, const char *prefix)
};
const char *const git_submodule_helper_usage[] = {
- N_("git submodule--helper update_clone [--prefix=<path>] [<path>...]"),
+ N_("git submodule--helper update-clone [--prefix=<path>] [<path>...]"),
NULL
};
suc.prefix = prefix;
diff --git a/builtin/update-index.c b/builtin/update-index.c
index dff2f4b837..49302d98c5 100644
--- a/builtin/update-index.c
+++ b/builtin/update-index.c
@@ -966,6 +966,7 @@ int cmd_update_index(int argc, const char **argv, const char *prefix)
struct parse_opt_ctx_t ctx;
strbuf_getline_fn getline_fn;
int parseopt_state = PARSE_OPT_UNKNOWN;
+ struct repository *r = the_repository;
struct option options[] = {
OPT_BIT('q', NULL, &refresh_args.flags,
N_("continue refresh even when index needs update"),
@@ -1180,11 +1181,12 @@ int cmd_update_index(int argc, const char **argv, const char *prefix)
remove_split_index(&the_index);
}
+ prepare_repo_settings(r);
switch (untracked_cache) {
case UC_UNSPECIFIED:
break;
case UC_DISABLE:
- if (git_config_get_untracked_cache() == 1)
+ if (r->settings.core_untracked_cache == UNTRACKED_CACHE_WRITE)
warning(_("core.untrackedCache is set to true; "
"remove or change it, if you really want to "
"disable the untracked cache"));
@@ -1196,7 +1198,7 @@ int cmd_update_index(int argc, const char **argv, const char *prefix)
return !test_if_untracked_cache_is_supported();
case UC_ENABLE:
case UC_FORCE:
- if (git_config_get_untracked_cache() == 0)
+ if (r->settings.core_untracked_cache == UNTRACKED_CACHE_REMOVE)
warning(_("core.untrackedCache is set to false; "
"remove or change it, if you really want to "
"enable the untracked cache"));
diff --git a/builtin/worktree.c b/builtin/worktree.c
index a5bb02b207..4de44f579a 100644
--- a/builtin/worktree.c
+++ b/builtin/worktree.c
@@ -10,7 +10,6 @@
#include "run-command.h"
#include "sigchain.h"
#include "submodule.h"
-#include "refs.h"
#include "utf8.h"
#include "worktree.h"
@@ -350,7 +349,7 @@ static int add_worktree(const char *path, const char *refname,
*/
strbuf_reset(&sb);
strbuf_addf(&sb, "%s/HEAD", sb_repo.buf);
- write_file(sb.buf, "%s", sha1_to_hex(null_sha1));
+ write_file(sb.buf, "%s", oid_to_hex(&null_oid));
strbuf_reset(&sb);
strbuf_addf(&sb, "%s/commondir", sb_repo.buf);
write_file(sb.buf, "../..");
@@ -880,7 +879,7 @@ static void check_clean_worktree(struct worktree *wt,
original_path);
ret = xread(cp.out, buf, sizeof(buf));
if (ret)
- die(_("'%s' is dirty, use --force to delete it"),
+ die(_("'%s' contains modified or untracked files, use --force to delete it"),
original_path);
close(cp.out);
ret = finish_command(&cp);
diff --git a/bulk-checkin.c b/bulk-checkin.c
index 39ee7d6107..583aacb9e3 100644
--- a/bulk-checkin.c
+++ b/bulk-checkin.c
@@ -197,7 +197,7 @@ static int deflate_to_pack(struct bulk_checkin_state *state,
git_hash_ctx ctx;
unsigned char obuf[16384];
unsigned header_len;
- struct hashfile_checkpoint checkpoint;
+ struct hashfile_checkpoint checkpoint = {0};
struct pack_idx_entry *idx = NULL;
seekback = lseek(fd, 0, SEEK_CUR);
diff --git a/bundle.c b/bundle.c
index b5d21cd80f..a85ed3f7bc 100644
--- a/bundle.c
+++ b/bundle.c
@@ -282,7 +282,7 @@ static int write_pack_data(int bundle_fd, struct rev_info *revs)
struct object *object = revs->pending.objects[i].item;
if (object->flags & UNINTERESTING)
write_or_die(pack_objects.in, "^", 1);
- write_or_die(pack_objects.in, oid_to_hex(&object->oid), GIT_SHA1_HEXSZ);
+ write_or_die(pack_objects.in, oid_to_hex(&object->oid), the_hash_algo->hexsz);
write_or_die(pack_objects.in, "\n", 1);
}
close(pack_objects.in);
@@ -414,7 +414,7 @@ static int write_bundle_refs(int bundle_fd, struct rev_info *revs)
}
ref_count++;
- write_or_die(bundle_fd, oid_to_hex(&e->item->oid), 40);
+ write_or_die(bundle_fd, oid_to_hex(&e->item->oid), the_hash_algo->hexsz);
write_or_die(bundle_fd, " ", 1);
write_or_die(bundle_fd, display_ref, strlen(display_ref));
write_or_die(bundle_fd, "\n", 1);
diff --git a/cache-tree.c b/cache-tree.c
index 706ffcf188..1bd1b23d38 100644
--- a/cache-tree.c
+++ b/cache-tree.c
@@ -5,6 +5,7 @@
#include "cache-tree.h"
#include "object-store.h"
#include "replace-object.h"
+#include "promisor-remote.h"
#ifndef DEBUG_CACHE_TREE
#define DEBUG_CACHE_TREE 0
@@ -357,7 +358,7 @@ static int update_one(struct cache_tree *it,
}
ce_missing_ok = mode == S_IFGITLINK || missing_ok ||
- (repository_format_partial_clone &&
+ (has_promisor_remote() &&
ce_skip_worktree(ce));
if (is_null_oid(oid) ||
(!ce_missing_ok && !has_object_file(oid))) {
@@ -407,7 +408,7 @@ static int update_one(struct cache_tree *it,
if (repair) {
struct object_id oid;
hash_object_file(buffer.buf, buffer.len, tree_type, &oid);
- if (has_object_file(&oid))
+ if (has_object_file_with_flags(&oid, OBJECT_INFO_SKIP_FETCH_OBJECT))
oidcpy(&it->oid, &oid);
else
to_invalidate = 1;
@@ -608,11 +609,66 @@ static struct cache_tree *cache_tree_find(struct cache_tree *it, const char *pat
return it;
}
+static int write_index_as_tree_internal(struct object_id *oid,
+ struct index_state *index_state,
+ int cache_tree_valid,
+ int flags,
+ const char *prefix)
+{
+ if (flags & WRITE_TREE_IGNORE_CACHE_TREE) {
+ cache_tree_free(&index_state->cache_tree);
+ cache_tree_valid = 0;
+ }
+
+ if (!index_state->cache_tree)
+ index_state->cache_tree = cache_tree();
+
+ if (!cache_tree_valid && cache_tree_update(index_state, flags) < 0)
+ return WRITE_TREE_UNMERGED_INDEX;
+
+ if (prefix) {
+ struct cache_tree *subtree;
+ subtree = cache_tree_find(index_state->cache_tree, prefix);
+ if (!subtree)
+ return WRITE_TREE_PREFIX_ERROR;
+ oidcpy(oid, &subtree->oid);
+ }
+ else
+ oidcpy(oid, &index_state->cache_tree->oid);
+
+ return 0;
+}
+
+struct tree* write_in_core_index_as_tree(struct repository *repo) {
+ struct object_id o;
+ int was_valid, ret;
+
+ struct index_state *index_state = repo->index;
+ was_valid = index_state->cache_tree &&
+ cache_tree_fully_valid(index_state->cache_tree);
+
+ ret = write_index_as_tree_internal(&o, index_state, was_valid, 0, NULL);
+ if (ret == WRITE_TREE_UNMERGED_INDEX) {
+ int i;
+ fprintf(stderr, "BUG: There are unmerged index entries:\n");
+ for (i = 0; i < index_state->cache_nr; i++) {
+ const struct cache_entry *ce = index_state->cache[i];
+ if (ce_stage(ce))
+ fprintf(stderr, "BUG: %d %.*s\n", ce_stage(ce),
+ (int)ce_namelen(ce), ce->name);
+ }
+ BUG("unmerged index entries when writing inmemory index");
+ }
+
+ return lookup_tree(repo, &index_state->cache_tree->oid);
+}
+
+
int write_index_as_tree(struct object_id *oid, struct index_state *index_state, const char *index_path, int flags, const char *prefix)
{
int entries, was_valid;
struct lock_file lock_file = LOCK_INIT;
- int ret = 0;
+ int ret;
hold_lock_file_for_update(&lock_file, index_path, LOCK_DIE_ON_ERROR);
@@ -621,18 +677,14 @@ int write_index_as_tree(struct object_id *oid, struct index_state *index_state,
ret = WRITE_TREE_UNREADABLE_INDEX;
goto out;
}
- if (flags & WRITE_TREE_IGNORE_CACHE_TREE)
- cache_tree_free(&index_state->cache_tree);
- if (!index_state->cache_tree)
- index_state->cache_tree = cache_tree();
+ was_valid = !(flags & WRITE_TREE_IGNORE_CACHE_TREE) &&
+ index_state->cache_tree &&
+ cache_tree_fully_valid(index_state->cache_tree);
- was_valid = cache_tree_fully_valid(index_state->cache_tree);
- if (!was_valid) {
- if (cache_tree_update(index_state, flags) < 0) {
- ret = WRITE_TREE_UNMERGED_INDEX;
- goto out;
- }
+ ret = write_index_as_tree_internal(oid, index_state, was_valid, flags,
+ prefix);
+ if (!ret && !was_valid) {
write_locked_index(index_state, &lock_file, COMMIT_LOCK);
/* Not being able to write is fine -- we are only interested
* in updating the cache-tree part, and if the next caller
@@ -642,18 +694,6 @@ int write_index_as_tree(struct object_id *oid, struct index_state *index_state,
*/
}
- if (prefix) {
- struct cache_tree *subtree;
- subtree = cache_tree_find(index_state->cache_tree, prefix);
- if (!subtree) {
- ret = WRITE_TREE_PREFIX_ERROR;
- goto out;
- }
- oidcpy(oid, &subtree->oid);
- }
- else
- oidcpy(oid, &index_state->cache_tree->oid);
-
out:
rollback_lock_file(&lock_file);
return ret;
@@ -713,7 +753,7 @@ static struct cache_tree *find_cache_tree_from_traversal(struct cache_tree *root
if (!info->prev)
return root;
our_parent = find_cache_tree_from_traversal(root, info->prev);
- return cache_tree_find(our_parent, info->name.path);
+ return cache_tree_find(our_parent, info->name);
}
int cache_tree_matches_traversal(struct cache_tree *root,
diff --git a/cache-tree.h b/cache-tree.h
index 757bbc48bc..639bfa5340 100644
--- a/cache-tree.h
+++ b/cache-tree.h
@@ -34,7 +34,7 @@ int cache_tree_fully_valid(struct cache_tree *);
int cache_tree_update(struct index_state *, int);
void cache_tree_verify(struct repository *, struct index_state *);
-/* bitmasks to write_cache_as_tree flags */
+/* bitmasks to write_index_as_tree flags */
#define WRITE_TREE_MISSING_OK 1
#define WRITE_TREE_IGNORE_CACHE_TREE 2
#define WRITE_TREE_DRY_RUN 4
@@ -46,6 +46,7 @@ void cache_tree_verify(struct repository *, struct index_state *);
#define WRITE_TREE_UNMERGED_INDEX (-2)
#define WRITE_TREE_PREFIX_ERROR (-3)
+struct tree* write_in_core_index_as_tree(struct repository *repo);
int write_index_as_tree(struct object_id *oid, struct index_state *index_state, const char *index_path, int flags, const char *prefix);
void prime_cache_tree(struct repository *, struct index_state *, struct tree *);
diff --git a/cache.h b/cache.h
index b1da1ab08f..04cabaac11 100644
--- a/cache.h
+++ b/cache.h
@@ -414,6 +414,7 @@ extern struct index_state the_index;
#define add_file_to_cache(path, flags) add_file_to_index(&the_index, (path), (flags))
#define chmod_cache_entry(ce, flip) chmod_index_entry(&the_index, (ce), (flip))
#define refresh_cache(flags) refresh_index(&the_index, (flags), NULL, NULL, NULL)
+#define refresh_and_write_cache(refresh_flags, write_flags, gentle) repo_refresh_and_write_index(the_repository, (refresh_flags), (write_flags), (gentle), NULL, NULL, NULL)
#define ce_match_stat(ce, st, options) ie_match_stat(&the_index, (ce), (st), (options))
#define ce_modified(ce, st, options) ie_modified(&the_index, (ce), (st), (options))
#define cache_dir_exists(name, namelen) index_dir_exists(&the_index, (name), (namelen))
@@ -636,6 +637,9 @@ int daemonize(void);
* at least 'nr' entries; the number of entries currently allocated
* is 'alloc', using the standard growing factor alloc_nr() macro.
*
+ * Consider using ALLOC_GROW_BY instead of ALLOC_GROW as it has some
+ * added niceties.
+ *
* DO NOT USE any expression with side-effect for 'x', 'nr', or 'alloc'.
*/
#define ALLOC_GROW(x, nr, alloc) \
@@ -649,6 +653,25 @@ int daemonize(void);
} \
} while (0)
+/*
+ * Similar to ALLOC_GROW but handles updating of the nr value and
+ * zeroing the bytes of the newly-grown array elements.
+ *
+ * DO NOT USE any expression with side-effect for any of the
+ * arguments.
+ */
+#define ALLOC_GROW_BY(x, nr, increase, alloc) \
+ do { \
+ if (increase) { \
+ size_t new_nr = nr + (increase); \
+ if (new_nr < nr) \
+ BUG("negative growth in ALLOC_GROW_BY"); \
+ ALLOC_GROW(x, new_nr, alloc); \
+ memset((x) + nr, 0, sizeof(*(x)) * (increase)); \
+ nr = new_nr; \
+ } \
+ } while (0)
+
/* Initialize and use the cache information */
struct lock_file;
void preload_index(struct index_state *index,
@@ -725,6 +748,19 @@ struct cache_entry *index_file_exists(struct index_state *istate, const char *na
*/
int index_name_pos(const struct index_state *, const char *name, int namelen);
+/*
+ * Some functions return the negative complement of an insert position when a
+ * precise match was not found but a position was found where the entry would
+ * need to be inserted. This helper protects that logic from any integer
+ * underflow.
+ */
+static inline int index_pos_to_insert_pos(uintmax_t pos)
+{
+ if (pos > INT_MAX)
+ die("overflow: -1 - %"PRIuMAX, pos);
+ return -1 - (int)pos;
+}
+
#define ADD_CACHE_OK_TO_ADD 1 /* Ok to add */
#define ADD_CACHE_OK_TO_REPLACE 2 /* Ok to replace file/directory */
#define ADD_CACHE_SKIP_DFCHECK 4 /* Ok to skip DF conflict checks */
@@ -812,6 +848,23 @@ void fill_stat_cache_info(struct index_state *istate, struct cache_entry *ce, st
#define REFRESH_IN_PORCELAIN 0x0020 /* user friendly output, not "needs update" */
#define REFRESH_PROGRESS 0x0040 /* show progress bar if stderr is tty */
int refresh_index(struct index_state *, unsigned int flags, const struct pathspec *pathspec, char *seen, const char *header_msg);
+/*
+ * Refresh the index and write it to disk.
+ *
+ * 'refresh_flags' is passed directly to 'refresh_index()', while
+ * 'COMMIT_LOCK | write_flags' is passed to 'write_locked_index()', so
+ * the lockfile is always either committed or rolled back.
+ *
+ * If 'gentle' is passed, errors locking the index are ignored.
+ *
+ * Return 1 if refreshing the index returns an error, -1 if writing
+ * the index to disk fails, 0 on success.
+ *
+ * Note that if refreshing the index returns an error, we still write
+ * out the index (unless locking fails).
+ */
+int repo_refresh_and_write_index(struct repository*, unsigned int refresh_flags, unsigned int write_flags, int gentle, const struct pathspec *, char *seen, const char *header_msg);
+
struct cache_entry *refresh_cache_entry(struct index_state *, struct cache_entry *, unsigned int);
void set_alternate_index_output(const char *);
@@ -937,8 +990,6 @@ extern int grafts_replace_parents;
#define GIT_REPO_VERSION 0
#define GIT_REPO_VERSION_READ 1
extern int repository_format_precious_objects;
-extern char *repository_format_partial_clone;
-extern const char *core_partial_clone_filter_default;
extern int repository_format_worktree_config;
/*
@@ -1029,7 +1080,6 @@ const char *repo_find_unique_abbrev(struct repository *r, const struct object_id
int repo_find_unique_abbrev_r(struct repository *r, char *hex, const struct object_id *oid, int len);
#define find_unique_abbrev_r(hex, oid, len) repo_find_unique_abbrev_r(the_repository, hex, oid, len)
-extern const unsigned char null_sha1[GIT_MAX_RAWSZ];
extern const struct object_id null_oid;
static inline int hashcmp(const unsigned char *sha1, const unsigned char *sha2)
@@ -1064,14 +1114,9 @@ static inline int oideq(const struct object_id *oid1, const struct object_id *oi
return hasheq(oid1->hash, oid2->hash);
}
-static inline int is_null_sha1(const unsigned char *sha1)
-{
- return hasheq(sha1, null_sha1);
-}
-
static inline int is_null_oid(const struct object_id *oid)
{
- return hasheq(oid->hash, null_sha1);
+ return oideq(oid, &null_oid);
}
static inline void hashcpy(unsigned char *sha_dst, const unsigned char *sha_src)
@@ -1516,8 +1561,7 @@ struct date_mode {
struct date_mode *date_mode_from_type(enum date_mode_type type);
const char *show_date(timestamp_t time, int timezone, const struct date_mode *mode);
-void show_date_relative(timestamp_t time, const struct timeval *now,
- struct strbuf *timebuf);
+void show_date_relative(timestamp_t time, struct strbuf *timebuf);
void show_date_human(timestamp_t time, int tz, const struct timeval *now,
struct strbuf *timebuf);
int parse_date(const char *date, struct strbuf *out);
@@ -1526,7 +1570,7 @@ int parse_expiry_date(const char *date, timestamp_t *timestamp);
void datestamp(struct strbuf *out);
#define approxidate(s) approxidate_careful((s), NULL)
timestamp_t approxidate_careful(const char *, int *);
-timestamp_t approxidate_relative(const char *date, const struct timeval *now);
+timestamp_t approxidate_relative(const char *date);
void parse_date_format(const char *format, struct date_mode *mode);
int date_overflows(timestamp_t date);
diff --git a/ci/install-dependencies.sh b/ci/install-dependencies.sh
index 8cc72503cb..85a9d6b15c 100755
--- a/ci/install-dependencies.sh
+++ b/ci/install-dependencies.sh
@@ -49,11 +49,12 @@ osx-clang|osx-gcc)
;;
StaticAnalysis)
sudo apt-get -q update
- sudo apt-get -q -y install coccinelle
+ sudo apt-get -q -y install coccinelle libcurl4-openssl-dev libssl-dev \
+ libexpat-dev gettext
;;
Documentation)
sudo apt-get -q update
- sudo apt-get -q -y install asciidoc xmlto
+ sudo apt-get -q -y install asciidoc xmlto docbook-xsl-ns
test -n "$ALREADY_HAVE_ASCIIDOCTOR" ||
gem install --version 1.5.8 asciidoctor
diff --git a/ci/lib.sh b/ci/lib.sh
index 44db2d5cbb..c8c2c38155 100755
--- a/ci/lib.sh
+++ b/ci/lib.sh
@@ -34,6 +34,11 @@ save_good_tree () {
# successfully before (e.g. because the branch got rebased, changing only
# the commit messages).
skip_good_tree () {
+ if test "$TRAVIS_DEBUG_MODE" = true
+ then
+ return
+ fi
+
if ! good_tree_info="$(grep "^$(git rev-parse $CI_COMMIT^{tree}) " "$good_trees_file")"
then
# Haven't seen this tree yet, or no cached good trees file yet.
@@ -160,7 +165,7 @@ linux-clang|linux-gcc)
export CC=gcc-8
fi
- export GIT_TEST_HTTPD=YesPlease
+ export GIT_TEST_HTTPD=true
# The Linux build installs the defined dependency versions below.
# The OS X build installs much more recent versions, whichever
diff --git a/ci/run-static-analysis.sh b/ci/run-static-analysis.sh
index a19aa7ebbc..65bcebda41 100755
--- a/ci/run-static-analysis.sh
+++ b/ci/run-static-analysis.sh
@@ -26,4 +26,7 @@ then
exit 1
fi
+make hdr-check ||
+exit 1
+
save_good_tree
diff --git a/ci/test-documentation.sh b/ci/test-documentation.sh
index d49089832d..b3e76ef863 100755
--- a/ci/test-documentation.sh
+++ b/ci/test-documentation.sh
@@ -8,6 +8,8 @@
filter_log () {
sed -e '/^GIT_VERSION = /d' \
-e '/^ \* new asciidoc flags$/d' \
+ -e '/stripped namespace before processing/d' \
+ -e '/Attributed.*IDs for element/d' \
"$1"
}
diff --git a/combine-diff.c b/combine-diff.c
index 3e49f3bda8..d5c4d839dc 100644
--- a/combine-diff.c
+++ b/combine-diff.c
@@ -930,7 +930,7 @@ static void show_combined_header(struct combine_diff_path *elem,
int show_file_header)
{
struct diff_options *opt = &rev->diffopt;
- int abbrev = opt->flags.full_index ? GIT_SHA1_HEXSZ : DEFAULT_ABBREV;
+ int abbrev = opt->flags.full_index ? the_hash_algo->hexsz : DEFAULT_ABBREV;
const char *a_prefix = opt->a_prefix ? opt->a_prefix : "a/";
const char *b_prefix = opt->b_prefix ? opt->b_prefix : "b/";
const char *c_meta = diff_get_color_opt(opt, DIFF_METAINFO);
diff --git a/commit-graph.c b/commit-graph.c
index fe954ab5f8..fc4a43b8d6 100644
--- a/commit-graph.c
+++ b/commit-graph.c
@@ -467,19 +467,26 @@ static void prepare_commit_graph_one(struct repository *r, const char *obj_dir)
static int prepare_commit_graph(struct repository *r)
{
struct object_directory *odb;
- int config_value;
- if (git_env_bool(GIT_TEST_COMMIT_GRAPH_DIE_ON_LOAD, 0))
- die("dying as requested by the '%s' variable on commit-graph load!",
- GIT_TEST_COMMIT_GRAPH_DIE_ON_LOAD);
+ /*
+ * This must come before the "already attempted?" check below, because
+ * we want to disable even an already-loaded graph file.
+ */
+ if (r->commit_graph_disabled)
+ return 0;
if (r->objects->commit_graph_attempted)
return !!r->objects->commit_graph;
r->objects->commit_graph_attempted = 1;
+ if (git_env_bool(GIT_TEST_COMMIT_GRAPH_DIE_ON_LOAD, 0))
+ die("dying as requested by the '%s' variable on commit-graph load!",
+ GIT_TEST_COMMIT_GRAPH_DIE_ON_LOAD);
+
+ prepare_repo_settings(r);
+
if (!git_env_bool(GIT_TEST_COMMIT_GRAPH, 0) &&
- (repo_config_get_bool(r, "core.commitgraph", &config_value) ||
- !config_value))
+ r->settings.core_commit_graph != 1)
/*
* This repository is not configured to use commit graphs, so
* do not load one. (But report commit_graph_attempted anyway
@@ -783,7 +790,8 @@ struct write_commit_graph_context {
unsigned append:1,
report_progress:1,
- split:1;
+ split:1,
+ check_oids:1;
const struct split_commit_graph_opts *split_opts;
};
@@ -838,12 +846,19 @@ static void write_graph_chunk_data(struct hashfile *f, int hash_len,
while (list < last) {
struct commit_list *parent;
+ struct object_id *tree;
int edge_value;
uint32_t packedDate[2];
display_progress(ctx->progress, ++ctx->progress_cnt);
- parse_commit_no_graph(*list);
- hashwrite(f, get_commit_tree_oid(*list)->hash, hash_len);
+ if (parse_commit_no_graph(*list))
+ die(_("unable to parse commit %s"),
+ oid_to_hex(&(*list)->object.oid));
+ tree = get_commit_tree_oid(*list);
+ if (!tree)
+ die(_("unable to get tree for %s"),
+ oid_to_hex(&(*list)->object.oid));
+ hashwrite(f, tree->hash, hash_len);
parent = (*list)->parents;
@@ -1049,7 +1064,7 @@ static void close_reachable(struct write_commit_graph_context *ctx)
if (ctx->report_progress)
ctx->progress = start_delayed_progress(
_("Expanding reachable commits in commit graph"),
- ctx->oids.nr);
+ 0);
for (i = 0; i < ctx->oids.nr; i++) {
display_progress(ctx->progress, i + 1);
commit = lookup_commit(ctx->r, &ctx->oids.list[i]);
@@ -1134,7 +1149,8 @@ static int add_ref_to_list(const char *refname,
return 0;
}
-int write_commit_graph_reachable(const char *obj_dir, unsigned int flags,
+int write_commit_graph_reachable(const char *obj_dir,
+ enum commit_graph_write_flags flags,
const struct split_commit_graph_opts *split_opts)
{
struct string_list list = STRING_LIST_INIT_DUP;
@@ -1193,8 +1209,8 @@ static int fill_oids_from_packs(struct write_commit_graph_context *ctx,
return 0;
}
-static void fill_oids_from_commit_hex(struct write_commit_graph_context *ctx,
- struct string_list *commit_hex)
+static int fill_oids_from_commit_hex(struct write_commit_graph_context *ctx,
+ struct string_list *commit_hex)
{
uint32_t i;
struct strbuf progress_title = STRBUF_INIT;
@@ -1215,20 +1231,21 @@ static void fill_oids_from_commit_hex(struct write_commit_graph_context *ctx,
struct commit *result;
display_progress(ctx->progress, i + 1);
- if (commit_hex->items[i].string &&
- parse_oid_hex(commit_hex->items[i].string, &oid, &end))
- continue;
-
- result = lookup_commit_reference_gently(ctx->r, &oid, 1);
-
- if (result) {
+ if (!parse_oid_hex(commit_hex->items[i].string, &oid, &end) &&
+ (result = lookup_commit_reference_gently(ctx->r, &oid, 1))) {
ALLOC_GROW(ctx->oids.list, ctx->oids.nr + 1, ctx->oids.alloc);
oidcpy(&ctx->oids.list[ctx->oids.nr], &(result->object.oid));
ctx->oids.nr++;
+ } else if (ctx->check_oids) {
+ error(_("invalid commit object id: %s"),
+ commit_hex->items[i].string);
+ return -1;
}
}
stop_progress(&ctx->progress);
strbuf_release(&progress_title);
+
+ return 0;
}
static void fill_oids_from_all_packs(struct write_commit_graph_context *ctx)
@@ -1276,7 +1293,6 @@ static uint32_t count_distinct_commits(struct write_commit_graph_context *ctx)
static void copy_oids_to_commits(struct write_commit_graph_context *ctx)
{
uint32_t i;
- struct commit_list *parent;
ctx->num_extra_edges = 0;
if (ctx->report_progress)
@@ -1284,7 +1300,8 @@ static void copy_oids_to_commits(struct write_commit_graph_context *ctx)
_("Finding extra edges in commit graph"),
ctx->oids.nr);
for (i = 0; i < ctx->oids.nr; i++) {
- int num_parents = 0;
+ unsigned int num_parents;
+
display_progress(ctx->progress, i + 1);
if (i > 0 && oideq(&ctx->oids.list[i - 1], &ctx->oids.list[i]))
continue;
@@ -1298,10 +1315,7 @@ static void copy_oids_to_commits(struct write_commit_graph_context *ctx)
parse_commit_no_graph(ctx->commits.list[ctx->commits.nr]);
- for (parent = ctx->commits.list[ctx->commits.nr]->parents;
- parent; parent = parent->next)
- num_parents++;
-
+ num_parents = commit_list_count(ctx->commits.list[ctx->commits.nr]->parents);
if (num_parents > 2)
ctx->num_extra_edges += num_parents - 1;
@@ -1519,8 +1533,8 @@ static int write_commit_graph_file(struct write_commit_graph_context *ctx)
static void split_graph_merge_strategy(struct write_commit_graph_context *ctx)
{
- struct commit_graph *g = ctx->r->objects->commit_graph;
- uint32_t num_commits = ctx->commits.nr;
+ struct commit_graph *g;
+ uint32_t num_commits;
uint32_t i;
int max_commits = 0;
@@ -1532,6 +1546,7 @@ static void split_graph_merge_strategy(struct write_commit_graph_context *ctx)
}
g = ctx->r->objects->commit_graph;
+ num_commits = ctx->commits.nr;
ctx->num_commit_graphs_after = ctx->num_commit_graphs_before + 1;
while (g && (g->num_commits <= size_mult * num_commits ||
@@ -1613,8 +1628,7 @@ static int commit_compare(const void *_a, const void *_b)
static void sort_and_scan_merged_commits(struct write_commit_graph_context *ctx)
{
- uint32_t i, num_parents;
- struct commit_list *parent;
+ uint32_t i;
if (ctx->report_progress)
ctx->progress = start_delayed_progress(
@@ -1632,10 +1646,9 @@ static void sort_and_scan_merged_commits(struct write_commit_graph_context *ctx)
die(_("unexpected duplicate commit id %s"),
oid_to_hex(&ctx->commits.list[i]->object.oid));
} else {
- num_parents = 0;
- for (parent = ctx->commits.list[i]->parents; parent; parent = parent->next)
- num_parents++;
+ unsigned int num_parents;
+ num_parents = commit_list_count(ctx->commits.list[i]->parents);
if (num_parents > 2)
ctx->num_extra_edges += num_parents - 1;
}
@@ -1752,7 +1765,7 @@ out:
int write_commit_graph(const char *obj_dir,
struct string_list *pack_indexes,
struct string_list *commit_hex,
- unsigned int flags,
+ enum commit_graph_write_flags flags,
const struct split_commit_graph_opts *split_opts)
{
struct write_commit_graph_context *ctx;
@@ -1773,9 +1786,10 @@ int write_commit_graph(const char *obj_dir,
if (len && ctx->obj_dir[len - 1] == '/')
ctx->obj_dir[len - 1] = 0;
- ctx->append = flags & COMMIT_GRAPH_APPEND ? 1 : 0;
- ctx->report_progress = flags & COMMIT_GRAPH_PROGRESS ? 1 : 0;
- ctx->split = flags & COMMIT_GRAPH_SPLIT ? 1 : 0;
+ ctx->append = flags & COMMIT_GRAPH_WRITE_APPEND ? 1 : 0;
+ ctx->report_progress = flags & COMMIT_GRAPH_WRITE_PROGRESS ? 1 : 0;
+ ctx->split = flags & COMMIT_GRAPH_WRITE_SPLIT ? 1 : 0;
+ ctx->check_oids = flags & COMMIT_GRAPH_WRITE_CHECK_OIDS ? 1 : 0;
ctx->split_opts = split_opts;
if (ctx->split) {
@@ -1830,8 +1844,10 @@ int write_commit_graph(const char *obj_dir,
goto cleanup;
}
- if (commit_hex)
- fill_oids_from_commit_hex(ctx, commit_hex);
+ if (commit_hex) {
+ if ((res = fill_oids_from_commit_hex(ctx, commit_hex)))
+ goto cleanup;
+ }
if (!pack_indexes && !commit_hex)
fill_oids_from_all_packs(ctx);
@@ -1986,8 +2002,10 @@ int verify_commit_graph(struct repository *r, struct commit_graph *g, int flags)
if (verify_commit_graph_error & ~VERIFY_COMMIT_GRAPH_ERROR_HASH)
return verify_commit_graph_error;
- progress = start_progress(_("Verifying commits in commit graph"),
- g->num_commits);
+ if (flags & COMMIT_GRAPH_WRITE_PROGRESS)
+ progress = start_progress(_("Verifying commits in commit graph"),
+ g->num_commits);
+
for (i = 0; i < g->num_commits; i++) {
struct commit *graph_commit, *odb_commit;
struct commit_list *graph_parents, *odb_parents;
@@ -2095,3 +2113,8 @@ void free_commit_graph(struct commit_graph *g)
free(g->filename);
free(g);
}
+
+void disable_commit_graph(struct repository *r)
+{
+ r->commit_graph_disabled = 1;
+}
diff --git a/commit-graph.h b/commit-graph.h
index df9a3b20e4..7f5c933fa2 100644
--- a/commit-graph.h
+++ b/commit-graph.h
@@ -71,9 +71,13 @@ struct commit_graph *parse_commit_graph(void *graph_map, int fd,
*/
int generation_numbers_enabled(struct repository *r);
-#define COMMIT_GRAPH_APPEND (1 << 0)
-#define COMMIT_GRAPH_PROGRESS (1 << 1)
-#define COMMIT_GRAPH_SPLIT (1 << 2)
+enum commit_graph_write_flags {
+ COMMIT_GRAPH_WRITE_APPEND = (1 << 0),
+ COMMIT_GRAPH_WRITE_PROGRESS = (1 << 1),
+ COMMIT_GRAPH_WRITE_SPLIT = (1 << 2),
+ /* Make sure that each OID in the input is a valid commit OID. */
+ COMMIT_GRAPH_WRITE_CHECK_OIDS = (1 << 3)
+};
struct split_commit_graph_opts {
int size_multiple;
@@ -87,12 +91,13 @@ struct split_commit_graph_opts {
* is not compatible with the commit-graph feature, then the
* methods will return 0 without writing a commit-graph.
*/
-int write_commit_graph_reachable(const char *obj_dir, unsigned int flags,
+int write_commit_graph_reachable(const char *obj_dir,
+ enum commit_graph_write_flags flags,
const struct split_commit_graph_opts *split_opts);
int write_commit_graph(const char *obj_dir,
struct string_list *pack_indexes,
struct string_list *commit_hex,
- unsigned int flags,
+ enum commit_graph_write_flags flags,
const struct split_commit_graph_opts *split_opts);
#define COMMIT_GRAPH_VERIFY_SHALLOW (1 << 0)
@@ -102,4 +107,10 @@ int verify_commit_graph(struct repository *r, struct commit_graph *g, int flags)
void close_commit_graph(struct raw_object_store *);
void free_commit_graph(struct commit_graph *);
+/*
+ * Disable further use of the commit graph in this process when parsing a
+ * "struct commit".
+ */
+void disable_commit_graph(struct repository *r);
+
#endif
diff --git a/commit.c b/commit.c
index a98de16e3d..40890ae7ce 100644
--- a/commit.c
+++ b/commit.c
@@ -358,14 +358,15 @@ struct tree *repo_get_commit_tree(struct repository *r,
struct object_id *get_commit_tree_oid(const struct commit *commit)
{
- return &get_commit_tree(commit)->object.oid;
+ struct tree *tree = get_commit_tree(commit);
+ return tree ? &tree->object.oid : NULL;
}
void release_commit_memory(struct parsed_object_pool *pool, struct commit *c)
{
set_commit_tree(c, NULL);
- c->index = 0;
free_commit_buffer(pool, c);
+ c->index = 0;
free_commit_list(c->parents);
c->object.parsed = 0;
diff --git a/common-main.c b/common-main.c
index 582a7b1886..71e21dd20a 100644
--- a/common-main.c
+++ b/common-main.c
@@ -39,16 +39,16 @@ int main(int argc, const char **argv)
git_resolve_executable_dir(argv[0]);
- trace2_initialize();
- trace2_cmd_start(argv);
- trace2_collect_process_info(TRACE2_PROCESS_INFO_STARTUP);
-
git_setup_gettext();
initialize_the_repository();
attr_start();
+ trace2_initialize();
+ trace2_cmd_start(argv);
+ trace2_collect_process_info(TRACE2_PROCESS_INFO_STARTUP);
+
result = cmd_main(argc, argv);
trace2_cmd_exit(result);
diff --git a/compat/mingw.c b/compat/mingw.c
index 738f0a826a..6b765d936c 100644
--- a/compat/mingw.c
+++ b/compat/mingw.c
@@ -1161,14 +1161,21 @@ static char *lookup_prog(const char *dir, int dirlen, const char *cmd,
int isexe, int exe_only)
{
char path[MAX_PATH];
+ wchar_t wpath[MAX_PATH];
snprintf(path, sizeof(path), "%.*s\\%s.exe", dirlen, dir, cmd);
- if (!isexe && access(path, F_OK) == 0)
+ if (xutftowcs_path(wpath, path) < 0)
+ return NULL;
+
+ if (!isexe && _waccess(wpath, F_OK) == 0)
return xstrdup(path);
- path[strlen(path)-4] = '\0';
- if ((!exe_only || isexe) && access(path, F_OK) == 0)
- if (!(GetFileAttributes(path) & FILE_ATTRIBUTE_DIRECTORY))
+ wpath[wcslen(wpath)-4] = '\0';
+ if ((!exe_only || isexe) && _waccess(wpath, F_OK) == 0) {
+ if (!(GetFileAttributesW(wpath) & FILE_ATTRIBUTE_DIRECTORY)) {
+ path[strlen(path)-4] = '\0';
return xstrdup(path);
+ }
+ }
return NULL;
}
@@ -1229,11 +1236,6 @@ static int wenvcmp(const void *a, const void *b)
return _wcsnicmp(p, q, p_len);
}
-/* We need a stable sort to convert the environment between UTF-16 <-> UTF-8 */
-#ifndef INTERNAL_QSORT
-#include "qsort.c"
-#endif
-
/*
* Build an environment block combining the inherited environment
* merged with the given list of settings.
@@ -1265,15 +1267,15 @@ static wchar_t *make_environment_block(char **deltaenv)
}
ALLOC_ARRAY(result, size);
- memcpy(result, wenv, size * sizeof(*wenv));
+ COPY_ARRAY(result, wenv, size);
FreeEnvironmentStringsW(wenv);
return result;
}
/*
* If there is a deltaenv, let's accumulate all keys into `array`,
- * sort them using the stable git_qsort() and then copy, skipping
- * duplicate keys
+ * sort them using the stable git_stable_qsort() and then copy,
+ * skipping duplicate keys
*/
for (p = wenv; p && *p; ) {
ALLOC_GROW(array, nr + 1, alloc);
@@ -1296,7 +1298,7 @@ static wchar_t *make_environment_block(char **deltaenv)
p += wlen + 1;
}
- git_qsort(array, nr, sizeof(*array), wenvcmp);
+ git_stable_qsort(array, nr, sizeof(*array), wenvcmp);
ALLOC_ARRAY(result, size + delta_size);
for (p = result, i = 0; i < nr; i++) {
@@ -1309,7 +1311,7 @@ static wchar_t *make_environment_block(char **deltaenv)
continue;
size = wcslen(array[i]) + 1;
- memcpy(p, array[i], size * sizeof(*p));
+ COPY_ARRAY(p, array[i], size);
p += size;
}
*p = L'\0';
@@ -1663,6 +1665,8 @@ char *mingw_getenv(const char *name)
if (!w_key)
die("Out of memory, (tried to allocate %u wchar_t's)", len_key);
xutftowcs(w_key, name, len_key);
+ /* GetEnvironmentVariableW() only sets the last error upon failure */
+ SetLastError(ERROR_SUCCESS);
len_value = GetEnvironmentVariableW(w_key, w_value, ARRAY_SIZE(w_value));
if (!len_value && GetLastError() == ERROR_ENVVAR_NOT_FOUND) {
free(w_key);
diff --git a/compat/mingw.h b/compat/mingw.h
index a03e40e6e2..9ad204c57c 100644
--- a/compat/mingw.h
+++ b/compat/mingw.h
@@ -11,7 +11,7 @@ typedef _sigset_t sigset_t;
#undef _POSIX_THREAD_SAFE_FUNCTIONS
#endif
-extern int mingw_core_config(const char *var, const char *value, void *cb);
+int mingw_core_config(const char *var, const char *value, void *cb);
#define platform_core_config mingw_core_config
/*
@@ -443,7 +443,7 @@ static inline void convert_slashes(char *path)
*path = '/';
}
#define PATH_SEP ';'
-extern char *mingw_query_user_email(void);
+char *mingw_query_user_email(void);
#define query_user_email mingw_query_user_email
#if !defined(__MINGW64_VERSION_MAJOR) && (!defined(_MSC_VER) || _MSC_VER < 1800)
#define PRIuMAX "I64u"
@@ -580,4 +580,4 @@ int main(int argc, const char **argv);
/*
* Used by Pthread API implementation for Windows
*/
-extern int err_win_to_posix(DWORD winerr);
+int err_win_to_posix(DWORD winerr);
diff --git a/compat/nedmalloc/malloc.c.h b/compat/nedmalloc/malloc.c.h
index b833ff9225..9134349590 100644
--- a/compat/nedmalloc/malloc.c.h
+++ b/compat/nedmalloc/malloc.c.h
@@ -1755,10 +1755,10 @@ static FORCEINLINE void pthread_release_lock (MLOCK_T *sl) {
assert(sl->l != 0);
assert(sl->threadid == CURRENT_THREAD);
if (--sl->c == 0) {
- sl->threadid = 0;
volatile unsigned int* lp = &sl->l;
int prev = 0;
int ret;
+ sl->threadid = 0;
__asm__ __volatile__ ("lock; xchgl %0, %1"
: "=r" (ret)
: "m" (*(lp)), "0"(prev)
@@ -3066,7 +3066,7 @@ static int init_mparams(void) {
#if !ONLY_MSPACES
/* Set up lock for main malloc area */
gm->mflags = mparams.default_mflags;
- INITIAL_LOCK(&gm->mutex);
+ (void)INITIAL_LOCK(&gm->mutex);
#endif
#if (FOOTERS && !INSECURE)
@@ -5017,7 +5017,7 @@ static mstate init_user_mstate(char* tbase, size_t tsize) {
mchunkptr msp = align_as_chunk(tbase);
mstate m = (mstate)(chunk2mem(msp));
memset(m, 0, msize);
- INITIAL_LOCK(&m->mutex);
+ (void)INITIAL_LOCK(&m->mutex);
msp->head = (msize|PINUSE_BIT|CINUSE_BIT);
m->seg.base = m->least_addr = tbase;
m->seg.size = m->footprint = m->max_footprint = tsize;
diff --git a/compat/vcbuild/scripts/clink.pl b/compat/vcbuild/scripts/clink.pl
index c7b021bfac..ec95a3b2d0 100755
--- a/compat/vcbuild/scripts/clink.pl
+++ b/compat/vcbuild/scripts/clink.pl
@@ -68,8 +68,54 @@ while (@ARGV) {
} elsif ("$arg" =~ /^-L/ && "$arg" ne "-LTCG") {
$arg =~ s/^-L/-LIBPATH:/;
push(@lflags, $arg);
- } elsif ("$arg" =~ /^-R/) {
+ } elsif ("$arg" =~ /^-[Rl]/) {
# eat
+ } elsif ("$arg" eq "-Werror") {
+ push(@cflags, "-WX");
+ } elsif ("$arg" eq "-Wall") {
+ # cl.exe understands -Wall, but it is really overzealous
+ push(@cflags, "-W4");
+ # disable the "signed/unsigned mismatch" warnings; our source code violates that
+ push(@cflags, "-wd4018");
+ push(@cflags, "-wd4245");
+ push(@cflags, "-wd4389");
+ # disable the "unreferenced formal parameter" warning; our source code violates that
+ push(@cflags, "-wd4100");
+ # disable the "conditional expression is constant" warning; our source code violates that
+ push(@cflags, "-wd4127");
+ # disable the "const object should be initialized" warning; these warnings affect only objects that are `static`
+ push(@cflags, "-wd4132");
+ # disable the "function/data pointer conversion in expression" warning; our source code violates that
+ push(@cflags, "-wd4152");
+ # disable the "non-constant aggregate initializer" warning; our source code violates that
+ push(@cflags, "-wd4204");
+ # disable the "cannot be initialized using address of automatic variable" warning; our source code violates that
+ push(@cflags, "-wd4221");
+ # disable the "possible loss of data" warnings; our source code violates that
+ push(@cflags, "-wd4244");
+ push(@cflags, "-wd4267");
+ # disable the "array is too small to include a terminating null character" warning; we ab-use strings to initialize OIDs
+ push(@cflags, "-wd4295");
+ # disable the "'<<': result of 32-bit shift implicitly converted to 64 bits" warning; our source code violates that
+ push(@cflags, "-wd4334");
+ # disable the "declaration hides previous local declaration" warning; our source code violates that
+ push(@cflags, "-wd4456");
+ # disable the "declaration hides function parameter" warning; our source code violates that
+ push(@cflags, "-wd4457");
+ # disable the "declaration hides global declaration" warning; our source code violates that
+ push(@cflags, "-wd4459");
+ # disable the "potentially uninitialized local variable '<name>' used" warning; our source code violates that
+ push(@cflags, "-wd4701");
+ # disable the "unreachable code" warning; our source code violates that
+ push(@cflags, "-wd4702");
+ # disable the "potentially uninitialized local pointer variable used" warning; our source code violates that
+ push(@cflags, "-wd4703");
+ # disable the "assignment within conditional expression" warning; our source code violates that
+ push(@cflags, "-wd4706");
+ # disable the "'inet_ntoa': Use inet_ntop() or InetNtop() instead" warning; our source code violates that
+ push(@cflags, "-wd4996");
+ } elsif ("$arg" =~ /^-W[a-z]/) {
+ # let's ignore those
} else {
push(@args, $arg);
}
diff --git a/compat/win32/path-utils.h b/compat/win32/path-utils.h
index 0f70d43920..8ed062a6b7 100644
--- a/compat/win32/path-utils.h
+++ b/compat/win32/path-utils.h
@@ -1,3 +1,6 @@
+#ifndef WIN32_PATH_UTILS_H
+#define WIN32_PATH_UTILS_H
+
#define has_dos_drive_prefix(path) \
(isalpha(*(path)) && (path)[1] == ':' ? 2 : 0)
int win32_skip_dos_drive_prefix(char **path);
@@ -18,3 +21,5 @@ static inline char *win32_find_last_dir_sep(const char *path)
#define find_last_dir_sep win32_find_last_dir_sep
int win32_offset_1st_component(const char *path);
#define offset_1st_component win32_offset_1st_component
+
+#endif
diff --git a/compat/win32/pthread.h b/compat/win32/pthread.h
index c6cb8dd219..f1cfe73de9 100644
--- a/compat/win32/pthread.h
+++ b/compat/win32/pthread.h
@@ -50,7 +50,7 @@ typedef struct {
DWORD tid;
} pthread_t;
-extern int pthread_create(pthread_t *thread, const void *unused,
+int pthread_create(pthread_t *thread, const void *unused,
void *(*start_routine)(void*), void *arg);
/*
@@ -59,10 +59,10 @@ extern int pthread_create(pthread_t *thread, const void *unused,
*/
#define pthread_join(a, b) win32_pthread_join(&(a), (b))
-extern int win32_pthread_join(pthread_t *thread, void **value_ptr);
+int win32_pthread_join(pthread_t *thread, void **value_ptr);
#define pthread_equal(t1, t2) ((t1).tid == (t2).tid)
-extern pthread_t pthread_self(void);
+pthread_t pthread_self(void);
static inline void NORETURN pthread_exit(void *ret)
{
diff --git a/compat/winansi.c b/compat/winansi.c
index cacd82c833..54fd701cbf 100644
--- a/compat/winansi.c
+++ b/compat/winansi.c
@@ -546,7 +546,7 @@ static HANDLE swap_osfhnd(int fd, HANDLE new_handle)
typedef struct _OBJECT_NAME_INFORMATION
{
UNICODE_STRING Name;
- WCHAR NameBuffer[0];
+ WCHAR NameBuffer[FLEX_ARRAY];
} OBJECT_NAME_INFORMATION, *POBJECT_NAME_INFORMATION;
#define ObjectNameInformation 1
diff --git a/config.c b/config.c
index 3900e4947b..e7052b3977 100644
--- a/config.c
+++ b/config.c
@@ -275,7 +275,7 @@ static int include_by_branch(const char *cond, size_t cond_len)
int flags;
int ret;
struct strbuf pattern = STRBUF_INIT;
- const char *refname = !the_repository || !the_repository->gitdir ?
+ const char *refname = !the_repository->gitdir ?
NULL : resolve_ref_unsafe("HEAD", 0, NULL, &flags);
const char *shortname;
@@ -1204,7 +1204,7 @@ static int git_default_core_config(const char *var, const char *value, void *cb)
default_abbrev = -1;
else {
int abbrev = git_config_int(var, value);
- if (abbrev < minimum_abbrev || abbrev > 40)
+ if (abbrev < minimum_abbrev || abbrev > the_hash_algo->hexsz)
return error(_("abbrev length out of range: %d"), abbrev);
default_abbrev = abbrev;
}
@@ -1379,11 +1379,6 @@ static int git_default_core_config(const char *var, const char *value, void *cb)
return 0;
}
- if (!strcmp(var, "core.partialclonefilter")) {
- return git_config_string(&core_partial_clone_filter_default,
- var, value);
- }
-
if (!strcmp(var, "core.usereplacerefs")) {
read_replace_refs = git_config_bool(var, value);
return 0;
@@ -1861,9 +1856,9 @@ static struct config_set_element *configset_find_element(struct config_set *cs,
if (git_config_parse_key(key, &normalized_key, NULL))
return NULL;
- hashmap_entry_init(&k, strhash(normalized_key));
+ hashmap_entry_init(&k.ent, strhash(normalized_key));
k.key = normalized_key;
- found_entry = hashmap_get(&cs->config_hash, &k, NULL);
+ found_entry = hashmap_get_entry(&cs->config_hash, &k, ent, NULL);
free(normalized_key);
return found_entry;
}
@@ -1882,10 +1877,10 @@ static int configset_add_value(struct config_set *cs, const char *key, const cha
*/
if (!e) {
e = xmalloc(sizeof(*e));
- hashmap_entry_init(e, strhash(key));
+ hashmap_entry_init(&e->ent, strhash(key));
e->key = xstrdup(key);
string_list_init(&e->value_list, 1);
- hashmap_add(&cs->config_hash, e);
+ hashmap_add(&cs->config_hash, &e->ent);
}
si = string_list_append_nodup(&e->value_list, xstrdup_or_null(value));
@@ -1913,12 +1908,14 @@ static int configset_add_value(struct config_set *cs, const char *key, const cha
}
static int config_set_element_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
- const struct config_set_element *e1 = entry;
- const struct config_set_element *e2 = entry_or_key;
+ const struct config_set_element *e1, *e2;
+
+ e1 = container_of(eptr, const struct config_set_element, ent);
+ e2 = container_of(entry_or_key, const struct config_set_element, ent);
return strcmp(e1->key, e2->key);
}
@@ -1939,12 +1936,12 @@ void git_configset_clear(struct config_set *cs)
if (!cs->hash_initialized)
return;
- hashmap_iter_init(&cs->config_hash, &iter);
- while ((entry = hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(&cs->config_hash, &iter, entry,
+ ent /* member name */) {
free(entry->key);
string_list_clear(&entry->value_list, 1);
}
- hashmap_free(&cs->config_hash, 1);
+ hashmap_free_entries(&cs->config_hash, struct config_set_element, ent);
cs->hash_initialized = 0;
free(cs->list.items);
cs->list.nr = 0;
@@ -2288,30 +2285,6 @@ int git_config_get_expiry_in_days(const char *key, timestamp_t *expiry, timestam
return -1; /* thing exists but cannot be parsed */
}
-int git_config_get_untracked_cache(void)
-{
- int val = -1;
- const char *v;
-
- /* Hack for test programs like test-dump-untracked-cache */
- if (ignore_untracked_cache_config)
- return -1;
-
- if (!git_config_get_maybe_bool("core.untrackedcache", &val))
- return val;
-
- if (!git_config_get_value("core.untrackedcache", &v)) {
- if (!strcasecmp(v, "keep"))
- return -1;
-
- error(_("unknown core.untrackedCache value '%s'; "
- "using 'keep' default value"), v);
- return -1;
- }
-
- return -1; /* default value */
-}
-
int git_config_get_split_index(void)
{
int val;
diff --git a/config.mak.uname b/config.mak.uname
index db7f06b95f..cc8efd95b1 100644
--- a/config.mak.uname
+++ b/config.mak.uname
@@ -703,20 +703,24 @@ vcxproj:
perl contrib/buildsystems/generate -g Vcxproj
git add -f git.sln {*,*/lib,t/helper/*}/*.vcxproj
- # Generate the LinkOrCopyBuiltins.targets file
+ # Generate the LinkOrCopyBuiltins.targets and LinkOrCopyRemoteHttp.targets file
(echo '<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">' && \
echo ' <Target Name="CopyBuiltins_AfterBuild" AfterTargets="AfterBuild">' && \
for name in $(BUILT_INS);\
do \
echo ' <Copy SourceFiles="$$(OutDir)\git.exe" DestinationFiles="$$(OutDir)\'"$$name"'" SkipUnchangedFiles="true" UseHardlinksIfPossible="true" />'; \
done && \
+ echo ' </Target>' && \
+ echo '</Project>') >git/LinkOrCopyBuiltins.targets
+ (echo '<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">' && \
+ echo ' <Target Name="CopyBuiltins_AfterBuild" AfterTargets="AfterBuild">' && \
for name in $(REMOTE_CURL_ALIASES); \
do \
echo ' <Copy SourceFiles="$$(OutDir)\'"$(REMOTE_CURL_PRIMARY)"'" DestinationFiles="$$(OutDir)\'"$$name"'" SkipUnchangedFiles="true" UseHardlinksIfPossible="true" />'; \
done && \
echo ' </Target>' && \
- echo '</Project>') >git/LinkOrCopyBuiltins.targets
- git add -f git/LinkOrCopyBuiltins.targets
+ echo '</Project>') >git-remote-http/LinkOrCopyRemoteHttp.targets
+ git add -f git/LinkOrCopyBuiltins.targets git-remote-http/LinkOrCopyRemoteHttp.targets
# Add command-list.h
$(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 command-list.h
@@ -724,11 +728,10 @@ vcxproj:
# Add scripts
rm -f perl/perl.mak
- $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 \
- $(SCRIPT_LIB) $(SCRIPT_SH_GEN) $(SCRIPT_PERL_GEN)
+ $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 $(SCRIPT_LIB) $(SCRIPTS)
# Strip out the sane tool path, needed only for building
sed -i '/^git_broken_path_fix ".*/d' git-sh-setup
- git add -f $(SCRIPT_LIB) $(SCRIPT_SH_GEN) $(SCRIPT_PERL_GEN)
+ git add -f $(SCRIPT_LIB) $(SCRIPTS)
# Add Perl module
$(MAKE) $(LIB_PERL_GEN)
@@ -758,6 +761,10 @@ vcxproj:
$(MAKE) -C templates
git add -f templates/boilerplates.made templates/blt/
+ # Add the translated messages
+ make MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 $(MOFILES)
+ git add -f $(MOFILES)
+
# Add build options
$(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 GIT-BUILD-OPTIONS
git add -f GIT-BUILD-OPTIONS
diff --git a/connect.c b/connect.c
index 2778481264..da7daa2b68 100644
--- a/connect.c
+++ b/connect.c
@@ -915,6 +915,10 @@ static enum protocol parse_connect_url(const char *url_orig, char **ret_host,
if (protocol == PROTO_LOCAL)
path = end;
+ else if (protocol == PROTO_FILE && *host != '/' &&
+ !has_dos_drive_prefix(host) &&
+ offset_1st_component(host - 2) > 1)
+ path = host - 2; /* include the leading "//" */
else if (protocol == PROTO_FILE && has_dos_drive_prefix(end))
path = end; /* "file://$(pwd)" may be "file://C:/projects/repo" */
else
diff --git a/connected.c b/connected.c
index cd9b324afa..36c4e5dedb 100644
--- a/connected.c
+++ b/connected.c
@@ -5,6 +5,7 @@
#include "connected.h"
#include "transport.h"
#include "packfile.h"
+#include "promisor-remote.h"
/*
* If we feed all the commits we want to verify to this command
@@ -28,6 +29,7 @@ int check_connected(oid_iterate_fn fn, void *cb_data,
struct packed_git *new_pack = NULL;
struct transport *transport;
size_t base_len;
+ const unsigned hexsz = the_hash_algo->hexsz;
if (!opt)
opt = &defaults;
@@ -73,7 +75,7 @@ int check_connected(oid_iterate_fn fn, void *cb_data,
argv_array_push(&rev_list.args,"rev-list");
argv_array_push(&rev_list.args, "--objects");
argv_array_push(&rev_list.args, "--stdin");
- if (repository_format_partial_clone)
+ if (has_promisor_remote())
argv_array_push(&rev_list.args, "--exclude-promisor-objects");
if (!opt->is_deepening_fetch) {
argv_array_push(&rev_list.args, "--not");
@@ -99,7 +101,7 @@ int check_connected(oid_iterate_fn fn, void *cb_data,
sigchain_push(SIGPIPE, SIG_IGN);
- commit[GIT_SHA1_HEXSZ] = '\n';
+ commit[hexsz] = '\n';
do {
/*
* If index-pack already checked that:
@@ -112,8 +114,8 @@ int check_connected(oid_iterate_fn fn, void *cb_data,
if (new_pack && find_pack_entry_one(oid.hash, new_pack))
continue;
- memcpy(commit, oid_to_hex(&oid), GIT_SHA1_HEXSZ);
- if (write_in_full(rev_list.in, commit, GIT_SHA1_HEXSZ + 1) < 0) {
+ memcpy(commit, oid_to_hex(&oid), hexsz);
+ if (write_in_full(rev_list.in, commit, hexsz + 1) < 0) {
if (errno != EPIPE && errno != EINVAL)
error_errno(_("failed write to rev-list"));
err = -1;
diff --git a/contrib/buildsystems/Generators/Vcxproj.pm b/contrib/buildsystems/Generators/Vcxproj.pm
index 576ccabe1d..5c666f9ac0 100644
--- a/contrib/buildsystems/Generators/Vcxproj.pm
+++ b/contrib/buildsystems/Generators/Vcxproj.pm
@@ -79,7 +79,8 @@ sub createProject {
if (!$static_library) {
$libs_release = join(";", sort(grep /^(?!libgit\.lib|xdiff\/lib\.lib|vcs-svn\/lib\.lib)/, @{$$build_structure{"$prefix${name}_LIBS"}}));
$libs_debug = $libs_release;
- $libs_debug =~ s/zlib\.lib/zlibd\.lib/;
+ $libs_debug =~ s/zlib\.lib/zlibd\.lib/g;
+ $libs_debug =~ s/libcurl\.lib/libcurl-d\.lib/g;
}
$defines =~ s/-D//g;
@@ -119,13 +120,13 @@ sub createProject {
<VCPKGArch Condition="'\$(Platform)'=='Win32'">x86-windows</VCPKGArch>
<VCPKGArch Condition="'\$(Platform)'!='Win32'">x64-windows</VCPKGArch>
<VCPKGArchDirectory>$cdup\\compat\\vcbuild\\vcpkg\\installed\\\$(VCPKGArch)</VCPKGArchDirectory>
- <VCPKGBinDirectory Condition="'\(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\bin</VCPKGBinDirectory>
- <VCPKGLibDirectory Condition="'\(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\lib</VCPKGLibDirectory>
- <VCPKGBinDirectory Condition="'\(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\bin</VCPKGBinDirectory>
- <VCPKGLibDirectory Condition="'\(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\lib</VCPKGLibDirectory>
+ <VCPKGBinDirectory Condition="'\$(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\bin</VCPKGBinDirectory>
+ <VCPKGLibDirectory Condition="'\$(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\lib</VCPKGLibDirectory>
+ <VCPKGBinDirectory Condition="'\$(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\bin</VCPKGBinDirectory>
+ <VCPKGLibDirectory Condition="'\$(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\lib</VCPKGLibDirectory>
<VCPKGIncludeDirectory>\$(VCPKGArchDirectory)\\include</VCPKGIncludeDirectory>
- <VCPKGLibs Condition="'\(Configuration)'=='Debug'">$libs_debug</VCPKGLibs>
- <VCPKGLibs Condition="'\(Configuration)'!='Debug'">$libs_release</VCPKGLibs>
+ <VCPKGLibs Condition="'\$(Configuration)'=='Debug'">$libs_debug</VCPKGLibs>
+ <VCPKGLibs Condition="'\$(Configuration)'!='Debug'">$libs_release</VCPKGLibs>
</PropertyGroup>
<Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'\$(Configuration)'=='Debug'" Label="Configuration">
@@ -277,6 +278,9 @@ EOM
if ($target eq 'git') {
print F " <Import Project=\"LinkOrCopyBuiltins.targets\" />\n";
}
+ if ($target eq 'git-remote-http') {
+ print F " <Import Project=\"LinkOrCopyRemoteHttp.targets\" />\n";
+ }
print F << "EOM";
</Project>
EOM
diff --git a/contrib/coccinelle/hashmap.cocci b/contrib/coccinelle/hashmap.cocci
new file mode 100644
index 0000000000..d69e120ccf
--- /dev/null
+++ b/contrib/coccinelle/hashmap.cocci
@@ -0,0 +1,16 @@
+@ hashmap_entry_init_usage @
+expression E;
+struct hashmap_entry HME;
+@@
+- HME.hash = E;
++ hashmap_entry_init(&HME, E);
+
+@@
+identifier f !~ "^hashmap_entry_init$";
+expression E;
+struct hashmap_entry *HMEP;
+@@
+ f(...) {<...
+- HMEP->hash = E;
++ hashmap_entry_init(HMEP, E);
+ ...>}
diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash
index e087c4bf00..00fbe6c03d 100644
--- a/contrib/completion/git-completion.bash
+++ b/contrib/completion/git-completion.bash
@@ -340,7 +340,7 @@ __gitcomp ()
c="$c${4-}"
if [[ $c == "$cur_"* ]]; then
case $c in
- --*=*|*.) ;;
+ --*=|*.) ;;
*) c="$c " ;;
esac
COMPREPLY[i++]="${2-}$c"
@@ -360,7 +360,7 @@ __gitcomp ()
c="$c${4-}"
if [[ $c == "$cur_"* ]]; then
case $c in
- --*=*|*.) ;;
+ *=|*.) ;;
*) c="$c " ;;
esac
COMPREPLY[i++]="${2-}$c"
@@ -524,7 +524,7 @@ __git_index_files ()
# Even when a directory name itself does not contain
# any special characters, it will still be quoted if
# any of its (stripped) trailing path components do.
- # Because of this we may have seen the same direcory
+ # Because of this we may have seen the same directory
# both quoted and unquoted.
if (p in paths)
# We have seen the same directory unquoted,
@@ -1250,10 +1250,7 @@ _git_archive ()
return
;;
--*)
- __gitcomp "
- --format= --list --verbose
- --prefix= --remote= --exec= --output
- "
+ __gitcomp_builtin archive "--format= --list --verbose --prefix= --worktree-attributes"
return
;;
esac
@@ -1361,7 +1358,9 @@ _git_checkout ()
esac
}
-__git_cherry_pick_inprogress_options="--continue --quit --abort"
+__git_sequencer_inprogress_options="--continue --quit --abort --skip"
+
+__git_cherry_pick_inprogress_options=$__git_sequencer_inprogress_options
_git_cherry_pick ()
{
@@ -1399,7 +1398,18 @@ _git_clean ()
_git_clone ()
{
+ case "$prev" in
+ -c|--config)
+ __git_complete_config_variable_name_and_value
+ return
+ ;;
+ esac
case "$cur" in
+ --config=*)
+ __git_complete_config_variable_name_and_value \
+ --cur="${cur##--config=}"
+ return
+ ;;
--*)
__gitcomp_builtin clone
return
@@ -1476,6 +1486,8 @@ __git_diff_common_options="--stat --numstat --shortstat --summary
--dirstat-by-file= --cumulative
--diff-algorithm=
--submodule --submodule= --ignore-submodules
+ --indent-heuristic --no-indent-heuristic
+ --textconv --no-textconv
"
_git_diff ()
@@ -1784,6 +1796,10 @@ _git_log ()
__gitcomp "$__git_diff_submodule_formats" "" "${cur##--submodule=}"
return
;;
+ --no-walk=*)
+ __gitcomp "sorted unsorted" "" "${cur##--no-walk=}"
+ return
+ ;;
--*)
__gitcomp "
$__git_log_common_options
@@ -1791,16 +1807,19 @@ _git_log ()
$__git_log_gitk_options
--root --topo-order --date-order --reverse
--follow --full-diff
- --abbrev-commit --abbrev=
+ --abbrev-commit --no-abbrev-commit --abbrev=
--relative-date --date=
--pretty= --format= --oneline
--show-signature
--cherry-mark
--cherry-pick
--graph
- --decorate --decorate=
+ --decorate --decorate= --no-decorate
--walk-reflogs
+ --no-walk --no-walk= --do-walk
--parents --children
+ --expand-tabs --expand-tabs= --no-expand-tabs
+ --patch
$merge
$__git_diff_common_options
--pickaxe-all --pickaxe-regex
@@ -2004,15 +2023,18 @@ _git_range_diff ()
__git_complete_revlist
}
+__git_rebase_inprogress_options="--continue --skip --abort --quit --show-current-patch"
+__git_rebase_interactive_inprogress_options="$__git_rebase_inprogress_options --edit-todo"
+
_git_rebase ()
{
__git_find_repo_path
if [ -f "$__git_repo_path"/rebase-merge/interactive ]; then
- __gitcomp "--continue --skip --abort --quit --edit-todo --show-current-patch"
+ __gitcomp "$__git_rebase_interactive_inprogress_options"
return
elif [ -d "$__git_repo_path"/rebase-apply ] || \
[ -d "$__git_repo_path"/rebase-merge ]; then
- __gitcomp "--continue --skip --abort --quit --show-current-patch"
+ __gitcomp "$__git_rebase_inprogress_options"
return
fi
__git_complete_strategy && return
@@ -2022,19 +2044,8 @@ _git_rebase ()
return
;;
--*)
- __gitcomp "
- --onto --merge --strategy --interactive
- --rebase-merges --preserve-merges --stat --no-stat
- --committer-date-is-author-date --ignore-date
- --ignore-whitespace --whitespace=
- --autosquash --no-autosquash
- --fork-point --no-fork-point
- --autostash --no-autostash
- --verify --no-verify
- --keep-empty --root --force-rebase --no-ff
- --rerere-autoupdate
- --exec
- "
+ __gitcomp_builtin rebase "" \
+ "$__git_rebase_interactive_inprogress_options"
return
esac
@@ -2225,181 +2236,282 @@ __git_config_vars=
__git_compute_config_vars ()
{
test -n "$__git_config_vars" ||
- __git_config_vars="$(git help --config-for-completion | sort | uniq)"
+ __git_config_vars="$(git help --config-for-completion | sort -u)"
}
-_git_config ()
+# Completes possible values of various configuration variables.
+#
+# Usage: __git_complete_config_variable_value [<option>]...
+# --varname=<word>: The name of the configuration variable whose value is
+# to be completed. Defaults to the previous word on the
+# command line.
+# --cur=<word>: The current value to be completed. Defaults to the current
+# word to be completed.
+__git_complete_config_variable_value ()
{
- local varname
+ local varname="$prev" cur_="$cur"
+
+ while test $# != 0; do
+ case "$1" in
+ --varname=*) varname="${1##--varname=}" ;;
+ --cur=*) cur_="${1##--cur=}" ;;
+ *) return 1 ;;
+ esac
+ shift
+ done
if [ "${BASH_VERSINFO[0]:-0}" -ge 4 ]; then
- varname="${prev,,}"
+ varname="${varname,,}"
else
- varname="$(echo "$prev" |tr A-Z a-z)"
+ varname="$(echo "$varname" |tr A-Z a-z)"
fi
case "$varname" in
branch.*.remote|branch.*.pushremote)
- __gitcomp_nl "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)" "" "$cur_"
return
;;
branch.*.merge)
- __git_complete_refs
+ __git_complete_refs --cur="$cur_"
return
;;
branch.*.rebase)
- __gitcomp "false true merges preserve interactive"
+ __gitcomp "false true merges preserve interactive" "" "$cur_"
return
;;
remote.pushdefault)
- __gitcomp_nl "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)" "" "$cur_"
return
;;
remote.*.fetch)
- local remote="${prev#remote.}"
+ local remote="${varname#remote.}"
remote="${remote%.fetch}"
- if [ -z "$cur" ]; then
+ if [ -z "$cur_" ]; then
__gitcomp_nl "refs/heads/" "" "" ""
return
fi
- __gitcomp_nl "$(__git_refs_remotes "$remote")"
+ __gitcomp_nl "$(__git_refs_remotes "$remote")" "" "$cur_"
return
;;
remote.*.push)
- local remote="${prev#remote.}"
+ local remote="${varname#remote.}"
remote="${remote%.push}"
__gitcomp_nl "$(__git for-each-ref \
- --format='%(refname):%(refname)' refs/heads)"
+ --format='%(refname):%(refname)' refs/heads)" "" "$cur_"
return
;;
pull.twohead|pull.octopus)
__git_compute_merge_strategies
- __gitcomp "$__git_merge_strategies"
- return
- ;;
- color.branch|color.diff|color.interactive|\
- color.showbranch|color.status|color.ui)
- __gitcomp "always never auto"
+ __gitcomp "$__git_merge_strategies" "" "$cur_"
return
;;
color.pager)
- __gitcomp "false true"
+ __gitcomp "false true" "" "$cur_"
return
;;
color.*.*)
__gitcomp "
normal black red green yellow blue magenta cyan white
bold dim ul blink reverse
- "
+ " "" "$cur_"
+ return
+ ;;
+ color.*)
+ __gitcomp "false true always never auto" "" "$cur_"
return
;;
diff.submodule)
- __gitcomp "$__git_diff_submodule_formats"
+ __gitcomp "$__git_diff_submodule_formats" "" "$cur_"
return
;;
help.format)
- __gitcomp "man info web html"
+ __gitcomp "man info web html" "" "$cur_"
return
;;
log.date)
- __gitcomp "$__git_log_date_formats"
+ __gitcomp "$__git_log_date_formats" "" "$cur_"
return
;;
sendemail.aliasfiletype)
- __gitcomp "mutt mailrc pine elm gnus"
+ __gitcomp "mutt mailrc pine elm gnus" "" "$cur_"
return
;;
sendemail.confirm)
- __gitcomp "$__git_send_email_confirm_options"
+ __gitcomp "$__git_send_email_confirm_options" "" "$cur_"
return
;;
sendemail.suppresscc)
- __gitcomp "$__git_send_email_suppresscc_options"
+ __gitcomp "$__git_send_email_suppresscc_options" "" "$cur_"
return
;;
sendemail.transferencoding)
- __gitcomp "7bit 8bit quoted-printable base64"
- return
- ;;
- --get|--get-all|--unset|--unset-all)
- __gitcomp_nl "$(__git_config_get_set_variables)"
+ __gitcomp "7bit 8bit quoted-printable base64" "" "$cur_"
return
;;
*.*)
return
;;
esac
- case "$cur" in
- --*)
- __gitcomp_builtin config
- return
- ;;
+}
+
+# Completes configuration sections, subsections, variable names.
+#
+# Usage: __git_complete_config_variable_name [<option>]...
+# --cur=<word>: The current configuration section/variable name to be
+# completed. Defaults to the current word to be completed.
+# --sfx=<suffix>: A suffix to be appended to each fully completed
+# configuration variable name (but not to sections or
+# subsections) instead of the default space.
+__git_complete_config_variable_name ()
+{
+ local cur_="$cur" sfx
+
+ while test $# != 0; do
+ case "$1" in
+ --cur=*) cur_="${1##--cur=}" ;;
+ --sfx=*) sfx="${1##--sfx=}" ;;
+ *) return 1 ;;
+ esac
+ shift
+ done
+
+ case "$cur_" in
branch.*.*)
- local pfx="${cur%.*}." cur_="${cur##*.}"
- __gitcomp "remote pushRemote merge mergeOptions rebase" "$pfx" "$cur_"
+ local pfx="${cur_%.*}."
+ cur_="${cur_##*.}"
+ __gitcomp "remote pushRemote merge mergeOptions rebase" "$pfx" "$cur_" "$sfx"
return
;;
branch.*)
- local pfx="${cur%.*}." cur_="${cur#*.}"
+ local pfx="${cur%.*}."
+ cur_="${cur#*.}"
__gitcomp_direct "$(__git_heads "$pfx" "$cur_" ".")"
- __gitcomp_nl_append $'autoSetupMerge\nautoSetupRebase\n' "$pfx" "$cur_"
+ __gitcomp_nl_append $'autoSetupMerge\nautoSetupRebase\n' "$pfx" "$cur_" "$sfx"
return
;;
guitool.*.*)
- local pfx="${cur%.*}." cur_="${cur##*.}"
+ local pfx="${cur_%.*}."
+ cur_="${cur_##*.}"
__gitcomp "
argPrompt cmd confirm needsFile noConsole noRescan
prompt revPrompt revUnmerged title
- " "$pfx" "$cur_"
+ " "$pfx" "$cur_" "$sfx"
return
;;
difftool.*.*)
- local pfx="${cur%.*}." cur_="${cur##*.}"
- __gitcomp "cmd path" "$pfx" "$cur_"
+ local pfx="${cur_%.*}."
+ cur_="${cur_##*.}"
+ __gitcomp "cmd path" "$pfx" "$cur_" "$sfx"
return
;;
man.*.*)
- local pfx="${cur%.*}." cur_="${cur##*.}"
- __gitcomp "cmd path" "$pfx" "$cur_"
+ local pfx="${cur_%.*}."
+ cur_="${cur_##*.}"
+ __gitcomp "cmd path" "$pfx" "$cur_" "$sfx"
return
;;
mergetool.*.*)
- local pfx="${cur%.*}." cur_="${cur##*.}"
- __gitcomp "cmd path trustExitCode" "$pfx" "$cur_"
+ local pfx="${cur_%.*}."
+ cur_="${cur_##*.}"
+ __gitcomp "cmd path trustExitCode" "$pfx" "$cur_" "$sfx"
return
;;
pager.*)
- local pfx="${cur%.*}." cur_="${cur#*.}"
+ local pfx="${cur_%.*}."
+ cur_="${cur_#*.}"
__git_compute_all_commands
- __gitcomp_nl "$__git_all_commands" "$pfx" "$cur_"
+ __gitcomp_nl "$__git_all_commands" "$pfx" "$cur_" "$sfx"
return
;;
remote.*.*)
- local pfx="${cur%.*}." cur_="${cur##*.}"
+ local pfx="${cur_%.*}."
+ cur_="${cur_##*.}"
__gitcomp "
url proxy fetch push mirror skipDefaultUpdate
receivepack uploadpack tagOpt pushurl
- " "$pfx" "$cur_"
+ " "$pfx" "$cur_" "$sfx"
return
;;
remote.*)
- local pfx="${cur%.*}." cur_="${cur#*.}"
+ local pfx="${cur_%.*}."
+ cur_="${cur_#*.}"
__gitcomp_nl "$(__git_remotes)" "$pfx" "$cur_" "."
- __gitcomp_nl_append "pushDefault" "$pfx" "$cur_"
+ __gitcomp_nl_append "pushDefault" "$pfx" "$cur_" "$sfx"
return
;;
url.*.*)
- local pfx="${cur%.*}." cur_="${cur##*.}"
- __gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur_"
+ local pfx="${cur_%.*}."
+ cur_="${cur_##*.}"
+ __gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur_" "$sfx"
return
;;
*.*)
__git_compute_config_vars
- __gitcomp "$__git_config_vars"
+ __gitcomp "$__git_config_vars" "" "$cur_" "$sfx"
;;
*)
__git_compute_config_vars
- __gitcomp "$(echo "$__git_config_vars" | sed 's/\.[^ ]*/./g')"
+ __gitcomp "$(echo "$__git_config_vars" |
+ awk -F . '{
+ sections[$1] = 1
+ }
+ END {
+ for (s in sections)
+ print s "."
+ }
+ ')" "" "$cur_"
+ ;;
+ esac
+}
+
+# Completes '='-separated configuration sections/variable names and values
+# for 'git -c section.name=value'.
+#
+# Usage: __git_complete_config_variable_name_and_value [<option>]...
+# --cur=<word>: The current configuration section/variable name/value to be
+# completed. Defaults to the current word to be completed.
+__git_complete_config_variable_name_and_value ()
+{
+ local cur_="$cur"
+
+ while test $# != 0; do
+ case "$1" in
+ --cur=*) cur_="${1##--cur=}" ;;
+ *) return 1 ;;
+ esac
+ shift
+ done
+
+ case "$cur_" in
+ *=*)
+ __git_complete_config_variable_value \
+ --varname="${cur_%%=*}" --cur="${cur_#*=}"
+ ;;
+ *)
+ __git_complete_config_variable_name --cur="$cur_" --sfx='='
+ ;;
+ esac
+}
+
+_git_config ()
+{
+ case "$prev" in
+ --get|--get-all|--unset|--unset-all)
+ __gitcomp_nl "$(__git_config_get_set_variables)"
+ return
+ ;;
+ *.*)
+ __git_complete_config_variable_value
+ return
+ ;;
+ esac
+ case "$cur" in
+ --*)
+ __gitcomp_builtin config
+ ;;
+ *)
+ __git_complete_config_variable_name
+ ;;
esac
}
@@ -2512,7 +2624,7 @@ _git_restore ()
esac
}
-__git_revert_inprogress_options="--continue --quit --abort"
+__git_revert_inprogress_options=$__git_sequencer_inprogress_options
_git_revert ()
{
@@ -2580,8 +2692,9 @@ _git_show ()
return
;;
--*)
- __gitcomp "--pretty= --format= --abbrev-commit --oneline
- --show-signature
+ __gitcomp "--pretty= --format= --abbrev-commit --no-abbrev-commit
+ --oneline --show-signature --patch
+ --expand-tabs --expand-tabs= --no-expand-tabs
$__git_diff_common_options
"
return
@@ -2956,7 +3069,11 @@ __git_main ()
# Bash filename completion
return
;;
- -c|--namespace)
+ -c)
+ __git_complete_config_variable_name_and_value
+ return
+ ;;
+ --namespace)
# we don't support completing these options' arguments
return
;;
diff --git a/contrib/hg-to-git/hg-to-git.py b/contrib/hg-to-git/hg-to-git.py
index de3f81667e..7eb1b24cc7 100755
--- a/contrib/hg-to-git/hg-to-git.py
+++ b/contrib/hg-to-git/hg-to-git.py
@@ -42,7 +42,7 @@ hgnewcsets = 0
def usage():
- print """\
+ print("""\
%s: [OPTIONS] <hgprj>
options:
@@ -54,7 +54,7 @@ options:
required:
hgprj: name of the HG project to import (directory)
-""" % sys.argv[0]
+""" % sys.argv[0])
#------------------------------------------------------------------------------
@@ -104,22 +104,22 @@ os.chdir(hgprj)
if state:
if os.path.exists(state):
if verbose:
- print 'State does exist, reading'
+ print('State does exist, reading')
f = open(state, 'r')
hgvers = pickle.load(f)
else:
- print 'State does not exist, first run'
+ print('State does not exist, first run')
sock = os.popen('hg tip --template "{rev}"')
tip = sock.read()
if sock.close():
sys.exit(1)
if verbose:
- print 'tip is', tip
+ print('tip is', tip)
# Calculate the branches
if verbose:
- print 'analysing the branches...'
+ print('analysing the branches...')
hgchildren["0"] = ()
hgparents["0"] = (None, None)
hgbranch["0"] = "master"
@@ -154,15 +154,15 @@ for cset in range(1, int(tip) + 1):
else:
hgbranch[str(cset)] = "branch-" + str(cset)
-if not hgvers.has_key("0"):
- print 'creating repository'
+if "0" not in hgvers:
+ print('creating repository')
os.system('git init')
# loop through every hg changeset
for cset in range(int(tip) + 1):
# incremental, already seen
- if hgvers.has_key(str(cset)):
+ if str(cset) in hgvers:
continue
hgnewcsets += 1
@@ -180,27 +180,27 @@ for cset in range(int(tip) + 1):
os.write(fdcomment, csetcomment)
os.close(fdcomment)
- print '-----------------------------------------'
- print 'cset:', cset
- print 'branch:', hgbranch[str(cset)]
- print 'user:', user
- print 'date:', date
- print 'comment:', csetcomment
+ print('-----------------------------------------')
+ print('cset:', cset)
+ print('branch:', hgbranch[str(cset)])
+ print('user:', user)
+ print('date:', date)
+ print('comment:', csetcomment)
if parent:
- print 'parent:', parent
+ print('parent:', parent)
if mparent:
- print 'mparent:', mparent
+ print('mparent:', mparent)
if tag:
- print 'tag:', tag
- print '-----------------------------------------'
+ print('tag:', tag)
+ print('-----------------------------------------')
# checkout the parent if necessary
if cset != 0:
if hgbranch[str(cset)] == "branch-" + str(cset):
- print 'creating new branch', hgbranch[str(cset)]
+ print('creating new branch', hgbranch[str(cset)])
os.system('git checkout -b %s %s' % (hgbranch[str(cset)], hgvers[parent]))
else:
- print 'checking out branch', hgbranch[str(cset)]
+ print('checking out branch', hgbranch[str(cset)])
os.system('git checkout %s' % hgbranch[str(cset)])
# merge
@@ -209,7 +209,7 @@ for cset in range(int(tip) + 1):
otherbranch = hgbranch[mparent]
else:
otherbranch = hgbranch[parent]
- print 'merging', otherbranch, 'into', hgbranch[str(cset)]
+ print('merging', otherbranch, 'into', hgbranch[str(cset)])
os.system(getgitenv(user, date) + 'git merge --no-commit -s ours "" %s %s' % (hgbranch[str(cset)], otherbranch))
# remove everything except .git and .hg directories
@@ -233,12 +233,12 @@ for cset in range(int(tip) + 1):
# delete branch if not used anymore...
if mparent and len(hgchildren[str(cset)]):
- print "Deleting unused branch:", otherbranch
+ print("Deleting unused branch:", otherbranch)
os.system('git branch -d %s' % otherbranch)
# retrieve and record the version
vvv = os.popen('git show --quiet --pretty=format:%H').read()
- print 'record', cset, '->', vvv
+ print('record', cset, '->', vvv)
hgvers[str(cset)] = vvv
if hgnewcsets >= opt_nrepack and opt_nrepack != -1:
@@ -247,7 +247,7 @@ if hgnewcsets >= opt_nrepack and opt_nrepack != -1:
# write the state for incrementals
if state:
if verbose:
- print 'Writing state'
+ print('Writing state')
f = open(state, 'w')
pickle.dump(hgvers, f)
diff --git a/contrib/svn-fe/svn-fe.txt b/contrib/svn-fe/svn-fe.txt
index a3425f4770..19333fc8df 100644
--- a/contrib/svn-fe/svn-fe.txt
+++ b/contrib/svn-fe/svn-fe.txt
@@ -56,7 +56,7 @@ line. This line has the form `git-svn-id: URL@REVNO UUID`.
The resulting repository will generally require further processing
to put each project in its own repository and to separate the history
-of each branch. The 'git filter-branch --subdirectory-filter' command
+of each branch. The 'git filter-repo --subdirectory-filter' command
may be useful for this purpose.
BUGS
@@ -67,5 +67,5 @@ The exit status does not reflect whether an error was detected.
SEE ALSO
--------
-git-svn(1), svn2git(1), svk(1), git-filter-branch(1), git-fast-import(1),
+git-svn(1), svn2git(1), svk(1), git-filter-repo(1), git-fast-import(1),
https://svn.apache.org/repos/asf/subversion/trunk/notes/dump-load-format.txt
diff --git a/contrib/svn-fe/svnrdump_sim.py b/contrib/svn-fe/svnrdump_sim.py
index 11ac6f6927..50c6a4f89d 100755
--- a/contrib/svn-fe/svnrdump_sim.py
+++ b/contrib/svn-fe/svnrdump_sim.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/env python
"""
Simulates svnrdump by replaying an existing dump from a file, taking care
of the specified revision range.
diff --git a/convert.c b/convert.c
index 94ff837649..25ac525d5f 100644
--- a/convert.c
+++ b/convert.c
@@ -8,6 +8,7 @@
#include "pkt-line.h"
#include "sub-process.h"
#include "utf8.h"
+#include "ll-merge.h"
/*
* convert.c - convert a file when checking it out and checking it in.
@@ -289,8 +290,8 @@ static int validate_encoding(const char *path, const char *enc,
const char *stripped = NULL;
char *upper = xstrdup_toupper(enc);
upper[strlen(upper)-2] = '\0';
- if (!skip_prefix(upper, "UTF-", &stripped))
- skip_prefix(stripped, "UTF", &stripped);
+ if (skip_prefix(upper, "UTF", &stripped))
+ skip_prefix(stripped, "-", &stripped);
advise(advise_msg, path, stripped);
free(upper);
if (die_on_error)
@@ -309,8 +310,8 @@ static int validate_encoding(const char *path, const char *enc,
"working-tree-encoding.");
const char *stripped = NULL;
char *upper = xstrdup_toupper(enc);
- if (!skip_prefix(upper, "UTF-", &stripped))
- skip_prefix(stripped, "UTF", &stripped);
+ if (skip_prefix(upper, "UTF", &stripped))
+ skip_prefix(stripped, "-", &stripped);
advise(advise_msg, path, stripped, stripped);
free(upper);
if (die_on_error)
@@ -1293,10 +1294,11 @@ struct conv_attrs {
const char *working_tree_encoding; /* Supported encoding or default encoding if NULL */
};
+static struct attr_check *check;
+
static void convert_attrs(const struct index_state *istate,
struct conv_attrs *ca, const char *path)
{
- static struct attr_check *check;
struct attr_check_item *ccheck = NULL;
if (!check) {
@@ -1339,6 +1341,23 @@ static void convert_attrs(const struct index_state *istate,
ca->crlf_action = CRLF_AUTO_INPUT;
}
+void reset_parsed_attributes(void)
+{
+ struct convert_driver *drv, *next;
+
+ attr_check_free(check);
+ check = NULL;
+ reset_merge_attributes();
+
+ for (drv = user_convert; drv; drv = next) {
+ next = drv->next;
+ free((void *)drv->name);
+ free(drv);
+ }
+ user_convert = NULL;
+ user_convert_tail = NULL;
+}
+
int would_convert_to_git_filter_fd(const struct index_state *istate, const char *path)
{
struct conv_attrs ca;
diff --git a/convert.h b/convert.h
index 831559f10d..3710969d43 100644
--- a/convert.h
+++ b/convert.h
@@ -94,6 +94,12 @@ void convert_to_git_filter_fd(const struct index_state *istate,
int would_convert_to_git_filter_fd(const struct index_state *istate,
const char *path);
+/*
+ * Reset the internal list of attributes used by convert_to_git and
+ * convert_to_working_tree.
+ */
+void reset_parsed_attributes(void);
+
/*****************************************************************
*
* Streaming conversion support
diff --git a/credential-store.c b/credential-store.c
index ac295420dd..c010497cb2 100644
--- a/credential-store.c
+++ b/credential-store.c
@@ -72,15 +72,16 @@ static void store_credential_file(const char *fn, struct credential *c)
struct strbuf buf = STRBUF_INIT;
strbuf_addf(&buf, "%s://", c->protocol);
- strbuf_addstr_urlencode(&buf, c->username, 1);
+ strbuf_addstr_urlencode(&buf, c->username, is_rfc3986_unreserved);
strbuf_addch(&buf, ':');
- strbuf_addstr_urlencode(&buf, c->password, 1);
+ strbuf_addstr_urlencode(&buf, c->password, is_rfc3986_unreserved);
strbuf_addch(&buf, '@');
if (c->host)
- strbuf_addstr_urlencode(&buf, c->host, 1);
+ strbuf_addstr_urlencode(&buf, c->host, is_rfc3986_unreserved);
if (c->path) {
strbuf_addch(&buf, '/');
- strbuf_addstr_urlencode(&buf, c->path, 0);
+ strbuf_addstr_urlencode(&buf, c->path,
+ is_rfc3986_reserved_or_unreserved);
}
rewrite_credential_file(fn, c, &buf);
diff --git a/date.c b/date.c
index 8126146c50..041db7db4e 100644
--- a/date.c
+++ b/date.c
@@ -128,16 +128,17 @@ static void get_time(struct timeval *now)
gettimeofday(now, NULL);
}
-void show_date_relative(timestamp_t time,
- const struct timeval *now,
- struct strbuf *timebuf)
+void show_date_relative(timestamp_t time, struct strbuf *timebuf)
{
+ struct timeval now;
timestamp_t diff;
- if (now->tv_sec < time) {
+
+ get_time(&now);
+ if (now.tv_sec < time) {
strbuf_addstr(timebuf, _("in the future"));
return;
}
- diff = now->tv_sec - time;
+ diff = now.tv_sec - time;
if (diff < 90) {
strbuf_addf(timebuf,
Q_("%"PRItime" second ago", "%"PRItime" seconds ago", diff), diff);
@@ -240,9 +241,7 @@ static void show_date_normal(struct strbuf *buf, timestamp_t time, struct tm *tm
/* Show "today" times as just relative times */
if (hide.wday) {
- struct timeval now;
- get_time(&now);
- show_date_relative(time, &now, buf);
+ show_date_relative(time, buf);
return;
}
@@ -313,11 +312,8 @@ const char *show_date(timestamp_t time, int tz, const struct date_mode *mode)
}
if (mode->type == DATE_RELATIVE) {
- struct timeval now;
-
strbuf_reset(&timebuf);
- get_time(&now);
- show_date_relative(time, &now, &timebuf);
+ show_date_relative(time, &timebuf);
return timebuf.buf;
}
@@ -1288,15 +1284,18 @@ static timestamp_t approxidate_str(const char *date,
return (timestamp_t)update_tm(&tm, &now, 0);
}
-timestamp_t approxidate_relative(const char *date, const struct timeval *tv)
+timestamp_t approxidate_relative(const char *date)
{
+ struct timeval tv;
timestamp_t timestamp;
int offset;
int errors = 0;
if (!parse_date_basic(date, &timestamp, &offset))
return timestamp;
- return approxidate_str(date, tv, &errors);
+
+ get_time(&tv);
+ return approxidate_str(date, (const struct timeval *) &tv, &errors);
}
timestamp_t approxidate_careful(const char *date, int *error_ret)
diff --git a/diff-delta.c b/diff-delta.c
index e49643353b..77fea08dfb 100644
--- a/diff-delta.c
+++ b/diff-delta.c
@@ -326,6 +326,8 @@ create_delta(const struct delta_index *index,
const unsigned char *ref_data, *ref_top, *data, *top;
unsigned char *out;
+ *delta_size = 0;
+
if (!trg_buf || !trg_size)
return NULL;
diff --git a/diff.c b/diff.c
index efe42b341a..afe4400a60 100644
--- a/diff.c
+++ b/diff.c
@@ -25,7 +25,7 @@
#include "packfile.h"
#include "parse-options.h"
#include "help.h"
-#include "fetch-object.h"
+#include "promisor-remote.h"
#ifdef NO_FAST_WORKING_DIRECTORY
#define FAST_WORKING_DIRECTORY 0
@@ -933,16 +933,18 @@ static int cmp_in_block_with_wsd(const struct diff_options *o,
}
static int moved_entry_cmp(const void *hashmap_cmp_fn_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *keydata)
{
const struct diff_options *diffopt = hashmap_cmp_fn_data;
- const struct moved_entry *a = entry;
- const struct moved_entry *b = entry_or_key;
+ const struct moved_entry *a, *b;
unsigned flags = diffopt->color_moved_ws_handling
& XDF_WHITESPACE_FLAGS;
+ a = container_of(eptr, const struct moved_entry, ent);
+ b = container_of(entry_or_key, const struct moved_entry, ent);
+
if (diffopt->color_moved_ws_handling &
COLOR_MOVED_WS_ALLOW_INDENTATION_CHANGE)
/*
@@ -964,8 +966,9 @@ static struct moved_entry *prepare_entry(struct diff_options *o,
struct moved_entry *ret = xmalloc(sizeof(*ret));
struct emitted_diff_symbol *l = &o->emitted_symbols->buf[line_no];
unsigned flags = o->color_moved_ws_handling & XDF_WHITESPACE_FLAGS;
+ unsigned int hash = xdiff_hash_string(l->line, l->len, flags);
- ret->ent.hash = xdiff_hash_string(l->line, l->len, flags);
+ hashmap_entry_init(&ret->ent, hash);
ret->es = l;
ret->next_line = NULL;
@@ -1002,7 +1005,7 @@ static void add_lines_to_move_detection(struct diff_options *o,
if (prev_line && prev_line->es->s == o->emitted_symbols->buf[n].s)
prev_line->next_line = key;
- hashmap_add(hm, key);
+ hashmap_add(hm, &key->ent);
prev_line = key;
}
}
@@ -1018,7 +1021,7 @@ static void pmb_advance_or_null(struct diff_options *o,
struct moved_entry *prev = pmb[i].match;
struct moved_entry *cur = (prev && prev->next_line) ?
prev->next_line : NULL;
- if (cur && !hm->cmpfn(o, cur, match, NULL)) {
+ if (cur && !hm->cmpfn(o, &cur->ent, &match->ent, NULL)) {
pmb[i].match = cur;
} else {
pmb[i].match = NULL;
@@ -1035,7 +1038,7 @@ static void pmb_advance_or_null_multi_match(struct diff_options *o,
int i;
char *got_match = xcalloc(1, pmb_nr);
- for (; match; match = hashmap_get_next(hm, match)) {
+ hashmap_for_each_entry_from(hm, match, ent) {
for (i = 0; i < pmb_nr; i++) {
struct moved_entry *prev = pmb[i].match;
struct moved_entry *cur = (prev && prev->next_line) ?
@@ -1143,13 +1146,13 @@ static void mark_color_as_moved(struct diff_options *o,
case DIFF_SYMBOL_PLUS:
hm = del_lines;
key = prepare_entry(o, n);
- match = hashmap_get(hm, key, NULL);
+ match = hashmap_get_entry(hm, key, ent, NULL);
free(key);
break;
case DIFF_SYMBOL_MINUS:
hm = add_lines;
key = prepare_entry(o, n);
- match = hashmap_get(hm, key, NULL);
+ match = hashmap_get_entry(hm, key, ent, NULL);
free(key);
break;
default:
@@ -1188,7 +1191,7 @@ static void mark_color_as_moved(struct diff_options *o,
* The current line is the start of a new block.
* Setup the set of potential blocks.
*/
- for (; match; match = hashmap_get_next(hm, match)) {
+ hashmap_for_each_entry_from(hm, match, ent) {
ALLOC_GROW(pmb, pmb_nr + 1, pmb_alloc);
if (o->color_moved_ws_handling &
COLOR_MOVED_WS_ALLOW_INDENTATION_CHANGE) {
@@ -5978,7 +5981,7 @@ static void diff_summary(struct diff_options *opt, struct diff_filepair *p)
}
struct patch_id_t {
- git_SHA_CTX *ctx;
+ git_hash_ctx *ctx;
int patchlen;
};
@@ -5995,16 +5998,16 @@ static int remove_space(char *line, int len)
return dst - line;
}
-void flush_one_hunk(struct object_id *result, git_SHA_CTX *ctx)
+void flush_one_hunk(struct object_id *result, git_hash_ctx *ctx)
{
unsigned char hash[GIT_MAX_RAWSZ];
unsigned short carry = 0;
int i;
- git_SHA1_Final(hash, ctx);
- git_SHA1_Init(ctx);
+ the_hash_algo->final_fn(hash, ctx);
+ the_hash_algo->init_fn(ctx);
/* 20-byte sum, with carry */
- for (i = 0; i < GIT_SHA1_RAWSZ; ++i) {
+ for (i = 0; i < the_hash_algo->rawsz; ++i) {
carry += result->hash[i] + hash[i];
result->hash[i] = carry;
carry >>= 8;
@@ -6018,21 +6021,21 @@ static void patch_id_consume(void *priv, char *line, unsigned long len)
new_len = remove_space(line, len);
- git_SHA1_Update(data->ctx, line, new_len);
+ the_hash_algo->update_fn(data->ctx, line, new_len);
data->patchlen += new_len;
}
-static void patch_id_add_string(git_SHA_CTX *ctx, const char *str)
+static void patch_id_add_string(git_hash_ctx *ctx, const char *str)
{
- git_SHA1_Update(ctx, str, strlen(str));
+ the_hash_algo->update_fn(ctx, str, strlen(str));
}
-static void patch_id_add_mode(git_SHA_CTX *ctx, unsigned mode)
+static void patch_id_add_mode(git_hash_ctx *ctx, unsigned mode)
{
/* large enough for 2^32 in octal */
char buf[12];
int len = xsnprintf(buf, sizeof(buf), "%06o", mode);
- git_SHA1_Update(ctx, buf, len);
+ the_hash_algo->update_fn(ctx, buf, len);
}
/* returns 0 upon success, and writes result into oid */
@@ -6040,10 +6043,10 @@ static int diff_get_patch_id(struct diff_options *options, struct object_id *oid
{
struct diff_queue_struct *q = &diff_queued_diff;
int i;
- git_SHA_CTX ctx;
+ git_hash_ctx ctx;
struct patch_id_t data;
- git_SHA1_Init(&ctx);
+ the_hash_algo->init_fn(&ctx);
memset(&data, 0, sizeof(struct patch_id_t));
data.ctx = &ctx;
oidclr(oid);
@@ -6076,27 +6079,27 @@ static int diff_get_patch_id(struct diff_options *options, struct object_id *oid
len2 = remove_space(p->two->path, strlen(p->two->path));
patch_id_add_string(&ctx, "diff--git");
patch_id_add_string(&ctx, "a/");
- git_SHA1_Update(&ctx, p->one->path, len1);
+ the_hash_algo->update_fn(&ctx, p->one->path, len1);
patch_id_add_string(&ctx, "b/");
- git_SHA1_Update(&ctx, p->two->path, len2);
+ the_hash_algo->update_fn(&ctx, p->two->path, len2);
if (p->one->mode == 0) {
patch_id_add_string(&ctx, "newfilemode");
patch_id_add_mode(&ctx, p->two->mode);
patch_id_add_string(&ctx, "---/dev/null");
patch_id_add_string(&ctx, "+++b/");
- git_SHA1_Update(&ctx, p->two->path, len2);
+ the_hash_algo->update_fn(&ctx, p->two->path, len2);
} else if (p->two->mode == 0) {
patch_id_add_string(&ctx, "deletedfilemode");
patch_id_add_mode(&ctx, p->one->mode);
patch_id_add_string(&ctx, "---a/");
- git_SHA1_Update(&ctx, p->one->path, len1);
+ the_hash_algo->update_fn(&ctx, p->one->path, len1);
patch_id_add_string(&ctx, "+++/dev/null");
} else {
patch_id_add_string(&ctx, "---a/");
- git_SHA1_Update(&ctx, p->one->path, len1);
+ the_hash_algo->update_fn(&ctx, p->one->path, len1);
patch_id_add_string(&ctx, "+++b/");
- git_SHA1_Update(&ctx, p->two->path, len2);
+ the_hash_algo->update_fn(&ctx, p->two->path, len2);
}
if (diff_header_only)
@@ -6108,10 +6111,10 @@ static int diff_get_patch_id(struct diff_options *options, struct object_id *oid
if (diff_filespec_is_binary(options->repo, p->one) ||
diff_filespec_is_binary(options->repo, p->two)) {
- git_SHA1_Update(&ctx, oid_to_hex(&p->one->oid),
- GIT_SHA1_HEXSZ);
- git_SHA1_Update(&ctx, oid_to_hex(&p->two->oid),
- GIT_SHA1_HEXSZ);
+ the_hash_algo->update_fn(&ctx, oid_to_hex(&p->one->oid),
+ the_hash_algo->hexsz);
+ the_hash_algo->update_fn(&ctx, oid_to_hex(&p->two->oid),
+ the_hash_algo->hexsz);
continue;
}
@@ -6128,7 +6131,7 @@ static int diff_get_patch_id(struct diff_options *options, struct object_id *oid
}
if (!stable)
- git_SHA1_Final(oid->hash, &ctx);
+ the_hash_algo->final_fn(oid->hash, &ctx);
return 0;
}
@@ -6230,8 +6233,10 @@ static void diff_flush_patch_all_file_pairs(struct diff_options *o)
if (o->color_moved == COLOR_MOVED_ZEBRA_DIM)
dim_moved_lines(o);
- hashmap_free(&add_lines, 1);
- hashmap_free(&del_lines, 1);
+ hashmap_free_entries(&add_lines, struct moved_entry,
+ ent);
+ hashmap_free_entries(&del_lines, struct moved_entry,
+ ent);
}
for (i = 0; i < esm.nr; i++)
@@ -6512,6 +6517,7 @@ static void add_if_missing(struct repository *r,
const struct diff_filespec *filespec)
{
if (filespec && filespec->oid_valid &&
+ !S_ISGITLINK(filespec->mode) &&
oid_object_info_extended(r, &filespec->oid, NULL,
OBJECT_INFO_FOR_PREFETCH))
oid_array_append(to_fetch, &filespec->oid);
@@ -6519,8 +6525,7 @@ static void add_if_missing(struct repository *r,
void diffcore_std(struct diff_options *options)
{
- if (options->repo == the_repository &&
- repository_format_partial_clone) {
+ if (options->repo == the_repository && has_promisor_remote()) {
/*
* Prefetch the diff pairs that are about to be flushed.
*/
@@ -6537,8 +6542,8 @@ void diffcore_std(struct diff_options *options)
/*
* NEEDSWORK: Consider deduplicating the OIDs sent.
*/
- fetch_objects(repository_format_partial_clone,
- to_fetch.oid, to_fetch.nr);
+ promisor_remote_get_direct(options->repo,
+ to_fetch.oid, to_fetch.nr);
oid_array_clear(&to_fetch);
}
diff --git a/diff.h b/diff.h
index c2c3056810..7f8f024feb 100644
--- a/diff.h
+++ b/diff.h
@@ -438,7 +438,7 @@ int run_diff_index(struct rev_info *revs, int cached);
int do_diff_cache(const struct object_id *, struct diff_options *);
int diff_flush_patch_id(struct diff_options *, struct object_id *, int, int);
-void flush_one_hunk(struct object_id *, git_SHA_CTX *);
+void flush_one_hunk(struct object_id *result, git_hash_ctx *ctx);
int diff_result_code(struct diff_options *, int);
diff --git a/diffcore-break.c b/diffcore-break.c
index 875aefd3fe..9d20a6a6fc 100644
--- a/diffcore-break.c
+++ b/diffcore-break.c
@@ -286,17 +286,17 @@ void diffcore_merge_broken(void)
/* Peer survived. Merge them */
merge_broken(p, pp, &outq);
q->queue[j] = NULL;
- break;
+ goto next;
}
}
- if (q->nr <= j)
- /* The peer did not survive, so we keep
- * it in the output.
- */
- diff_q(&outq, p);
+ /* The peer did not survive, so we keep
+ * it in the output.
+ */
+ diff_q(&outq, p);
}
else
diff_q(&outq, p);
+next:;
}
free(q->queue);
*q = outq;
diff --git a/diffcore-rename.c b/diffcore-rename.c
index 9624864858..531d7adeaf 100644
--- a/diffcore-rename.c
+++ b/diffcore-rename.c
@@ -274,18 +274,17 @@ static int find_identical_files(struct hashmap *srcs,
struct diff_options *options)
{
int renames = 0;
-
struct diff_filespec *target = rename_dst[dst_index].two;
struct file_similarity *p, *best = NULL;
int i = 100, best_score = -1;
+ unsigned int hash = hash_filespec(options->repo, target);
/*
* Find the best source match for specified destination.
*/
- p = hashmap_get_from_hash(srcs,
- hash_filespec(options->repo, target),
- NULL);
- for (; p; p = hashmap_get_next(srcs, p)) {
+ p = hashmap_get_entry_from_hash(srcs, hash, NULL,
+ struct file_similarity, entry);
+ hashmap_for_each_entry_from(srcs, p, entry) {
int score;
struct diff_filespec *source = p->filespec;
@@ -329,8 +328,8 @@ static void insert_file_table(struct repository *r,
entry->index = index;
entry->filespec = filespec;
- hashmap_entry_init(entry, hash_filespec(r, filespec));
- hashmap_add(table, entry);
+ hashmap_entry_init(&entry->entry, hash_filespec(r, filespec));
+ hashmap_add(table, &entry->entry);
}
/*
@@ -359,7 +358,7 @@ static int find_exact_renames(struct diff_options *options)
renames += find_identical_files(&file_table, i, options);
/* Free the hash data structure and entries */
- hashmap_free(&file_table, 1);
+ hashmap_free_entries(&file_table, struct file_similarity, entry);
return renames;
}
@@ -585,7 +584,7 @@ void diffcore_rename(struct diff_options *options)
stop_progress(&progress);
/* cost matrix sorted by most to least similar pair */
- QSORT(mx, dst_cnt * NUM_CANDIDATE_PER_DST, score_compare);
+ STABLE_QSORT(mx, dst_cnt * NUM_CANDIDATE_PER_DST, score_compare);
rename_count += find_renames(mx, dst_cnt, minimum_score, 0);
if (detect_rename == DIFF_DETECT_COPY)
diff --git a/dir.c b/dir.c
index d021c908e5..61f559f980 100644
--- a/dir.c
+++ b/dir.c
@@ -139,7 +139,7 @@ static size_t common_prefix_len(const struct pathspec *pathspec)
* ":(icase)path" is treated as a pathspec full of
* wildcard. In other words, only prefix is considered common
* prefix. If the pathspec is abc/foo abc/bar, running in
- * subdir xyz, the common prefix is still xyz, not xuz/abc as
+ * subdir xyz, the common prefix is still xyz, not xyz/abc as
* in non-:(icase).
*/
GUARD_PATHSPEC(pathspec,
@@ -273,19 +273,30 @@ static int do_read_blob(const struct object_id *oid, struct oid_stat *oid_stat,
#define DO_MATCH_EXCLUDE (1<<0)
#define DO_MATCH_DIRECTORY (1<<1)
-#define DO_MATCH_SUBMODULE (1<<2)
+#define DO_MATCH_LEADING_PATHSPEC (1<<2)
/*
- * Does 'match' match the given name?
- * A match is found if
+ * Does the given pathspec match the given name? A match is found if
*
- * (1) the 'match' string is leading directory of 'name', or
- * (2) the 'match' string is a wildcard and matches 'name', or
- * (3) the 'match' string is exactly the same as 'name'.
+ * (1) the pathspec string is leading directory of 'name' ("RECURSIVELY"), or
+ * (2) the pathspec string has a leading part matching 'name' ("LEADING"), or
+ * (3) the pathspec string is a wildcard and matches 'name' ("WILDCARD"), or
+ * (4) the pathspec string is exactly the same as 'name' ("EXACT").
*
- * and the return value tells which case it was.
+ * Return value tells which case it was (1-4), or 0 when there is no match.
*
- * It returns 0 when there is no match.
+ * It may be instructive to look at a small table of concrete examples
+ * to understand the differences between 1, 2, and 4:
+ *
+ * Pathspecs
+ * | a/b | a/b/ | a/b/c
+ * ------+-----------+-----------+------------
+ * a/b | EXACT | EXACT[1] | LEADING[2]
+ * Names a/b/ | RECURSIVE | EXACT | LEADING[2]
+ * a/b/c | RECURSIVE | RECURSIVE | EXACT
+ *
+ * [1] Only if DO_MATCH_DIRECTORY is passed; otherwise, this is NOT a match.
+ * [2] Only if DO_MATCH_LEADING_PATHSPEC is passed; otherwise, not a match.
*/
static int match_pathspec_item(const struct index_state *istate,
const struct pathspec_item *item, int prefix,
@@ -353,13 +364,14 @@ static int match_pathspec_item(const struct index_state *istate,
item->nowildcard_len - prefix))
return MATCHED_FNMATCH;
- /* Perform checks to see if "name" is a super set of the pathspec */
- if (flags & DO_MATCH_SUBMODULE) {
+ /* Perform checks to see if "name" is a leading string of the pathspec */
+ if (flags & DO_MATCH_LEADING_PATHSPEC) {
/* name is a literal prefix of the pathspec */
+ int offset = name[namelen-1] == '/' ? 1 : 0;
if ((namelen < matchlen) &&
- (match[namelen] == '/') &&
+ (match[namelen-offset] == '/') &&
!ps_strncmp(item, match, name, namelen))
- return MATCHED_RECURSIVELY;
+ return MATCHED_RECURSIVELY_LEADING_PATHSPEC;
/* name" doesn't match up to the first wild character */
if (item->nowildcard_len < item->len &&
@@ -376,7 +388,7 @@ static int match_pathspec_item(const struct index_state *istate,
* The submodules themselves will be able to perform more
* accurate matching to determine if the pathspec matches.
*/
- return MATCHED_RECURSIVELY;
+ return MATCHED_RECURSIVELY_LEADING_PATHSPEC;
}
return 0;
@@ -497,7 +509,7 @@ int submodule_path_match(const struct index_state *istate,
strlen(submodule_name),
0, seen,
DO_MATCH_DIRECTORY |
- DO_MATCH_SUBMODULE);
+ DO_MATCH_LEADING_PATHSPEC);
return matched;
}
@@ -561,7 +573,7 @@ int no_wildcard(const char *string)
return string[simple_length(string)] == '\0';
}
-void parse_exclude_pattern(const char **pattern,
+void parse_path_pattern(const char **pattern,
int *patternlen,
unsigned *flags,
int *nowildcardlen)
@@ -571,20 +583,20 @@ void parse_exclude_pattern(const char **pattern,
*flags = 0;
if (*p == '!') {
- *flags |= EXC_FLAG_NEGATIVE;
+ *flags |= PATTERN_FLAG_NEGATIVE;
p++;
}
len = strlen(p);
if (len && p[len - 1] == '/') {
len--;
- *flags |= EXC_FLAG_MUSTBEDIR;
+ *flags |= PATTERN_FLAG_MUSTBEDIR;
}
for (i = 0; i < len; i++) {
if (p[i] == '/')
break;
}
if (i == len)
- *flags |= EXC_FLAG_NODIR;
+ *flags |= PATTERN_FLAG_NODIR;
*nowildcardlen = simple_length(p);
/*
* we should have excluded the trailing slash from 'p' too,
@@ -594,35 +606,35 @@ void parse_exclude_pattern(const char **pattern,
if (*nowildcardlen > len)
*nowildcardlen = len;
if (*p == '*' && no_wildcard(p + 1))
- *flags |= EXC_FLAG_ENDSWITH;
+ *flags |= PATTERN_FLAG_ENDSWITH;
*pattern = p;
*patternlen = len;
}
-void add_exclude(const char *string, const char *base,
- int baselen, struct exclude_list *el, int srcpos)
+void add_pattern(const char *string, const char *base,
+ int baselen, struct pattern_list *pl, int srcpos)
{
- struct exclude *x;
+ struct path_pattern *pattern;
int patternlen;
unsigned flags;
int nowildcardlen;
- parse_exclude_pattern(&string, &patternlen, &flags, &nowildcardlen);
- if (flags & EXC_FLAG_MUSTBEDIR) {
- FLEXPTR_ALLOC_MEM(x, pattern, string, patternlen);
+ parse_path_pattern(&string, &patternlen, &flags, &nowildcardlen);
+ if (flags & PATTERN_FLAG_MUSTBEDIR) {
+ FLEXPTR_ALLOC_MEM(pattern, pattern, string, patternlen);
} else {
- x = xmalloc(sizeof(*x));
- x->pattern = string;
+ pattern = xmalloc(sizeof(*pattern));
+ pattern->pattern = string;
}
- x->patternlen = patternlen;
- x->nowildcardlen = nowildcardlen;
- x->base = base;
- x->baselen = baselen;
- x->flags = flags;
- x->srcpos = srcpos;
- ALLOC_GROW(el->excludes, el->nr + 1, el->alloc);
- el->excludes[el->nr++] = x;
- x->el = el;
+ pattern->patternlen = patternlen;
+ pattern->nowildcardlen = nowildcardlen;
+ pattern->base = base;
+ pattern->baselen = baselen;
+ pattern->flags = flags;
+ pattern->srcpos = srcpos;
+ ALLOC_GROW(pl->patterns, pl->nr + 1, pl->alloc);
+ pl->patterns[pl->nr++] = pattern;
+ pattern->pl = pl;
}
static int read_skip_worktree_file_from_index(const struct index_state *istate,
@@ -643,19 +655,19 @@ static int read_skip_worktree_file_from_index(const struct index_state *istate,
}
/*
- * Frees memory within el which was allocated for exclude patterns and
- * the file buffer. Does not free el itself.
+ * Frees memory within pl which was allocated for exclude patterns and
+ * the file buffer. Does not free pl itself.
*/
-void clear_exclude_list(struct exclude_list *el)
+void clear_pattern_list(struct pattern_list *pl)
{
int i;
- for (i = 0; i < el->nr; i++)
- free(el->excludes[i]);
- free(el->excludes);
- free(el->filebuf);
+ for (i = 0; i < pl->nr; i++)
+ free(pl->patterns[i]);
+ free(pl->patterns);
+ free(pl->filebuf);
- memset(el, 0, sizeof(*el));
+ memset(pl, 0, sizeof(*pl));
}
static void trim_trailing_spaces(char *buf)
@@ -762,21 +774,21 @@ static void invalidate_directory(struct untracked_cache *uc,
dir->dirs[i]->recurse = 0;
}
-static int add_excludes_from_buffer(char *buf, size_t size,
+static int add_patterns_from_buffer(char *buf, size_t size,
const char *base, int baselen,
- struct exclude_list *el);
+ struct pattern_list *pl);
/*
* Given a file with name "fname", read it (either from disk, or from
* an index if 'istate' is non-null), parse it and store the
- * exclude rules in "el".
+ * exclude rules in "pl".
*
* If "ss" is not NULL, compute SHA-1 of the exclude file and fill
- * stat data from disk (only valid if add_excludes returns zero). If
+ * stat data from disk (only valid if add_patterns returns zero). If
* ss_valid is non-zero, "ss" must contain good value as input.
*/
-static int add_excludes(const char *fname, const char *base, int baselen,
- struct exclude_list *el, struct index_state *istate,
+static int add_patterns(const char *fname, const char *base, int baselen,
+ struct pattern_list *pl, struct index_state *istate,
struct oid_stat *oid_stat)
{
struct stat st;
@@ -837,21 +849,21 @@ static int add_excludes(const char *fname, const char *base, int baselen,
}
}
- add_excludes_from_buffer(buf, size, base, baselen, el);
+ add_patterns_from_buffer(buf, size, base, baselen, pl);
return 0;
}
-static int add_excludes_from_buffer(char *buf, size_t size,
+static int add_patterns_from_buffer(char *buf, size_t size,
const char *base, int baselen,
- struct exclude_list *el)
+ struct pattern_list *pl)
{
int i, lineno = 1;
char *entry;
- el->filebuf = buf;
+ pl->filebuf = buf;
if (skip_utf8_bom(&buf, size))
- size -= buf - el->filebuf;
+ size -= buf - pl->filebuf;
entry = buf;
@@ -860,7 +872,7 @@ static int add_excludes_from_buffer(char *buf, size_t size,
if (entry != buf + i && entry[0] != '#') {
buf[i - (i && buf[i-1] == '\r')] = 0;
trim_trailing_spaces(entry);
- add_exclude(entry, base, baselen, el, lineno);
+ add_pattern(entry, base, baselen, pl, lineno);
}
lineno++;
entry = buf + i + 1;
@@ -869,17 +881,17 @@ static int add_excludes_from_buffer(char *buf, size_t size,
return 0;
}
-int add_excludes_from_file_to_list(const char *fname, const char *base,
- int baselen, struct exclude_list *el,
+int add_patterns_from_file_to_list(const char *fname, const char *base,
+ int baselen, struct pattern_list *pl,
struct index_state *istate)
{
- return add_excludes(fname, base, baselen, el, istate, NULL);
+ return add_patterns(fname, base, baselen, pl, istate, NULL);
}
-int add_excludes_from_blob_to_list(
+int add_patterns_from_blob_to_list(
struct object_id *oid,
const char *base, int baselen,
- struct exclude_list *el)
+ struct pattern_list *pl)
{
char *buf;
size_t size;
@@ -889,31 +901,31 @@ int add_excludes_from_blob_to_list(
if (r != 1)
return r;
- add_excludes_from_buffer(buf, size, base, baselen, el);
+ add_patterns_from_buffer(buf, size, base, baselen, pl);
return 0;
}
-struct exclude_list *add_exclude_list(struct dir_struct *dir,
+struct pattern_list *add_pattern_list(struct dir_struct *dir,
int group_type, const char *src)
{
- struct exclude_list *el;
+ struct pattern_list *pl;
struct exclude_list_group *group;
group = &dir->exclude_list_group[group_type];
- ALLOC_GROW(group->el, group->nr + 1, group->alloc);
- el = &group->el[group->nr++];
- memset(el, 0, sizeof(*el));
- el->src = src;
- return el;
+ ALLOC_GROW(group->pl, group->nr + 1, group->alloc);
+ pl = &group->pl[group->nr++];
+ memset(pl, 0, sizeof(*pl));
+ pl->src = src;
+ return pl;
}
/*
* Used to set up core.excludesfile and .git/info/exclude lists.
*/
-static void add_excludes_from_file_1(struct dir_struct *dir, const char *fname,
+static void add_patterns_from_file_1(struct dir_struct *dir, const char *fname,
struct oid_stat *oid_stat)
{
- struct exclude_list *el;
+ struct pattern_list *pl;
/*
* catch setup_standard_excludes() that's called before
* dir->untracked is assigned. That function behaves
@@ -921,15 +933,15 @@ static void add_excludes_from_file_1(struct dir_struct *dir, const char *fname,
*/
if (!dir->untracked)
dir->unmanaged_exclude_files++;
- el = add_exclude_list(dir, EXC_FILE, fname);
- if (add_excludes(fname, "", 0, el, NULL, oid_stat) < 0)
+ pl = add_pattern_list(dir, EXC_FILE, fname);
+ if (add_patterns(fname, "", 0, pl, NULL, oid_stat) < 0)
die(_("cannot use %s as an exclude file"), fname);
}
-void add_excludes_from_file(struct dir_struct *dir, const char *fname)
+void add_patterns_from_file(struct dir_struct *dir, const char *fname)
{
dir->unmanaged_exclude_files++; /* see validate_untracked_cache() */
- add_excludes_from_file_1(dir, fname, NULL);
+ add_patterns_from_file_1(dir, fname, NULL);
}
int match_basename(const char *basename, int basenamelen,
@@ -940,7 +952,7 @@ int match_basename(const char *basename, int basenamelen,
if (patternlen == basenamelen &&
!fspathncmp(pattern, basename, basenamelen))
return 1;
- } else if (flags & EXC_FLAG_ENDSWITH) {
+ } else if (flags & PATTERN_FLAG_ENDSWITH) {
/* "*literal" matching against "fooliteral" */
if (patternlen - 1 <= basenamelen &&
!fspathncmp(pattern + 1,
@@ -1021,85 +1033,97 @@ int match_pathname(const char *pathname, int pathlen,
* any, determines the fate. Returns the exclude_list element which
* matched, or NULL for undecided.
*/
-static struct exclude *last_exclude_matching_from_list(const char *pathname,
+static struct path_pattern *last_matching_pattern_from_list(const char *pathname,
int pathlen,
const char *basename,
int *dtype,
- struct exclude_list *el,
+ struct pattern_list *pl,
struct index_state *istate)
{
- struct exclude *exc = NULL; /* undecided */
+ struct path_pattern *res = NULL; /* undecided */
int i;
- if (!el->nr)
+ if (!pl->nr)
return NULL; /* undefined */
- for (i = el->nr - 1; 0 <= i; i--) {
- struct exclude *x = el->excludes[i];
- const char *exclude = x->pattern;
- int prefix = x->nowildcardlen;
+ for (i = pl->nr - 1; 0 <= i; i--) {
+ struct path_pattern *pattern = pl->patterns[i];
+ const char *exclude = pattern->pattern;
+ int prefix = pattern->nowildcardlen;
- if (x->flags & EXC_FLAG_MUSTBEDIR) {
+ if (pattern->flags & PATTERN_FLAG_MUSTBEDIR) {
if (*dtype == DT_UNKNOWN)
*dtype = get_dtype(NULL, istate, pathname, pathlen);
if (*dtype != DT_DIR)
continue;
}
- if (x->flags & EXC_FLAG_NODIR) {
+ if (pattern->flags & PATTERN_FLAG_NODIR) {
if (match_basename(basename,
pathlen - (basename - pathname),
- exclude, prefix, x->patternlen,
- x->flags)) {
- exc = x;
+ exclude, prefix, pattern->patternlen,
+ pattern->flags)) {
+ res = pattern;
break;
}
continue;
}
- assert(x->baselen == 0 || x->base[x->baselen - 1] == '/');
+ assert(pattern->baselen == 0 ||
+ pattern->base[pattern->baselen - 1] == '/');
if (match_pathname(pathname, pathlen,
- x->base, x->baselen ? x->baselen - 1 : 0,
- exclude, prefix, x->patternlen, x->flags)) {
- exc = x;
+ pattern->base,
+ pattern->baselen ? pattern->baselen - 1 : 0,
+ exclude, prefix, pattern->patternlen,
+ pattern->flags)) {
+ res = pattern;
break;
}
}
- return exc;
+ return res;
}
/*
- * Scan the list and let the last match determine the fate.
- * Return 1 for exclude, 0 for include and -1 for undecided.
+ * Scan the list of patterns to determine if the ordered list
+ * of patterns matches on 'pathname'.
+ *
+ * Return 1 for a match, 0 for not matched and -1 for undecided.
*/
-int is_excluded_from_list(const char *pathname,
- int pathlen, const char *basename, int *dtype,
- struct exclude_list *el, struct index_state *istate)
-{
- struct exclude *exclude;
- exclude = last_exclude_matching_from_list(pathname, pathlen, basename,
- dtype, el, istate);
- if (exclude)
- return exclude->flags & EXC_FLAG_NEGATIVE ? 0 : 1;
- return -1; /* undecided */
+enum pattern_match_result path_matches_pattern_list(
+ const char *pathname, int pathlen,
+ const char *basename, int *dtype,
+ struct pattern_list *pl,
+ struct index_state *istate)
+{
+ struct path_pattern *pattern;
+ pattern = last_matching_pattern_from_list(pathname, pathlen, basename,
+ dtype, pl, istate);
+ if (pattern) {
+ if (pattern->flags & PATTERN_FLAG_NEGATIVE)
+ return NOT_MATCHED;
+ else
+ return MATCHED;
+ }
+
+ return UNDECIDED;
}
-static struct exclude *last_exclude_matching_from_lists(struct dir_struct *dir,
- struct index_state *istate,
- const char *pathname, int pathlen, const char *basename,
- int *dtype_p)
+static struct path_pattern *last_matching_pattern_from_lists(
+ struct dir_struct *dir, struct index_state *istate,
+ const char *pathname, int pathlen,
+ const char *basename, int *dtype_p)
{
int i, j;
struct exclude_list_group *group;
- struct exclude *exclude;
+ struct path_pattern *pattern;
for (i = EXC_CMDL; i <= EXC_FILE; i++) {
group = &dir->exclude_list_group[i];
for (j = group->nr - 1; j >= 0; j--) {
- exclude = last_exclude_matching_from_list(
+ pattern = last_matching_pattern_from_list(
pathname, pathlen, basename, dtype_p,
- &group->el[j], istate);
- if (exclude)
- return exclude;
+ &group->pl[j], istate);
+ if (pattern)
+ return pattern;
}
}
return NULL;
@@ -1114,7 +1138,7 @@ static void prep_exclude(struct dir_struct *dir,
const char *base, int baselen)
{
struct exclude_list_group *group;
- struct exclude_list *el;
+ struct pattern_list *pl;
struct exclude_stack *stk = NULL;
struct untracked_cache_dir *untracked;
int current;
@@ -1130,17 +1154,17 @@ static void prep_exclude(struct dir_struct *dir,
if (stk->baselen <= baselen &&
!strncmp(dir->basebuf.buf, base, stk->baselen))
break;
- el = &group->el[dir->exclude_stack->exclude_ix];
+ pl = &group->pl[dir->exclude_stack->exclude_ix];
dir->exclude_stack = stk->prev;
- dir->exclude = NULL;
- free((char *)el->src); /* see strbuf_detach() below */
- clear_exclude_list(el);
+ dir->pattern = NULL;
+ free((char *)pl->src); /* see strbuf_detach() below */
+ clear_pattern_list(pl);
free(stk);
group->nr--;
}
/* Skip traversing into sub directories if the parent is excluded */
- if (dir->exclude)
+ if (dir->pattern)
return;
/*
@@ -1181,7 +1205,7 @@ static void prep_exclude(struct dir_struct *dir,
stk->baselen = cp - base;
stk->exclude_ix = group->nr;
stk->ucd = untracked;
- el = add_exclude_list(dir, EXC_DIRS, NULL);
+ pl = add_pattern_list(dir, EXC_DIRS, NULL);
strbuf_add(&dir->basebuf, base + current, stk->baselen - current);
assert(stk->baselen == dir->basebuf.len);
@@ -1189,15 +1213,15 @@ static void prep_exclude(struct dir_struct *dir,
if (stk->baselen) {
int dt = DT_DIR;
dir->basebuf.buf[stk->baselen - 1] = 0;
- dir->exclude = last_exclude_matching_from_lists(dir,
+ dir->pattern = last_matching_pattern_from_lists(dir,
istate,
dir->basebuf.buf, stk->baselen - 1,
dir->basebuf.buf + current, &dt);
dir->basebuf.buf[stk->baselen - 1] = '/';
- if (dir->exclude &&
- dir->exclude->flags & EXC_FLAG_NEGATIVE)
- dir->exclude = NULL;
- if (dir->exclude) {
+ if (dir->pattern &&
+ dir->pattern->flags & PATTERN_FLAG_NEGATIVE)
+ dir->pattern = NULL;
+ if (dir->pattern) {
dir->exclude_stack = stk;
return;
}
@@ -1223,30 +1247,30 @@ static void prep_exclude(struct dir_struct *dir,
/*
* dir->basebuf gets reused by the traversal, but we
* need fname to remain unchanged to ensure the src
- * member of each struct exclude correctly
+ * member of each struct path_pattern correctly
* back-references its source file. Other invocations
- * of add_exclude_list provide stable strings, so we
+ * of add_pattern_list provide stable strings, so we
* strbuf_detach() and free() here in the caller.
*/
struct strbuf sb = STRBUF_INIT;
strbuf_addbuf(&sb, &dir->basebuf);
strbuf_addstr(&sb, dir->exclude_per_dir);
- el->src = strbuf_detach(&sb, NULL);
- add_excludes(el->src, el->src, stk->baselen, el, istate,
+ pl->src = strbuf_detach(&sb, NULL);
+ add_patterns(pl->src, pl->src, stk->baselen, pl, istate,
untracked ? &oid_stat : NULL);
}
/*
* NEEDSWORK: when untracked cache is enabled, prep_exclude()
* will first be called in valid_cached_dir() then maybe many
- * times more in last_exclude_matching(). When the cache is
- * used, last_exclude_matching() will not be called and
+ * times more in last_matching_pattern(). When the cache is
+ * used, last_matching_pattern() will not be called and
* reading .gitignore content will be a waste.
*
* So when it's called by valid_cached_dir() and we can get
* .gitignore SHA-1 from the index (i.e. .gitignore is not
* modified on work tree), we could delay reading the
* .gitignore content until we absolutely need it in
- * last_exclude_matching(). Be careful about ignore rule
+ * last_matching_pattern(). Be careful about ignore rule
* order, though, if you do that.
*/
if (untracked &&
@@ -1266,7 +1290,7 @@ static void prep_exclude(struct dir_struct *dir,
* Returns the exclude_list element which matched, or NULL for
* undecided.
*/
-struct exclude *last_exclude_matching(struct dir_struct *dir,
+struct path_pattern *last_matching_pattern(struct dir_struct *dir,
struct index_state *istate,
const char *pathname,
int *dtype_p)
@@ -1277,10 +1301,10 @@ struct exclude *last_exclude_matching(struct dir_struct *dir,
prep_exclude(dir, istate, pathname, basename-pathname);
- if (dir->exclude)
- return dir->exclude;
+ if (dir->pattern)
+ return dir->pattern;
- return last_exclude_matching_from_lists(dir, istate, pathname, pathlen,
+ return last_matching_pattern_from_lists(dir, istate, pathname, pathlen,
basename, dtype_p);
}
@@ -1292,10 +1316,10 @@ struct exclude *last_exclude_matching(struct dir_struct *dir,
int is_excluded(struct dir_struct *dir, struct index_state *istate,
const char *pathname, int *dtype_p)
{
- struct exclude *exclude =
- last_exclude_matching(dir, istate, pathname, dtype_p);
- if (exclude)
- return exclude->flags & EXC_FLAG_NEGATIVE ? 0 : 1;
+ struct path_pattern *pattern =
+ last_matching_pattern(dir, istate, pathname, dtype_p);
+ if (pattern)
+ return pattern->flags & PATTERN_FLAG_NEGATIVE ? 0 : 1;
return 0;
}
@@ -1439,6 +1463,16 @@ static enum path_treatment treat_directory(struct dir_struct *dir,
return path_none;
case index_nonexistent:
+ if (dir->flags & DIR_SKIP_NESTED_GIT) {
+ int nested_repo;
+ struct strbuf sb = STRBUF_INIT;
+ strbuf_addstr(&sb, dirname);
+ nested_repo = is_nonbare_repository_dir(&sb);
+ strbuf_release(&sb);
+ if (nested_repo)
+ return path_none;
+ }
+
if (dir->flags & DIR_SHOW_OTHER_DIRECTORIES)
break;
if (exclude &&
@@ -1808,7 +1842,7 @@ static int valid_cached_dir(struct dir_struct *dir,
/*
* prep_exclude will be called eventually on this directory,
- * but it's called much later in last_exclude_matching(). We
+ * but it's called much later in last_matching_pattern(). We
* need it now to determine the validity of the cache for this
* path. The next calls will be nearly no-op, the way
* prep_exclude() is designed.
@@ -1938,8 +1972,11 @@ static enum path_treatment read_directory_recursive(struct dir_struct *dir,
/* recurse into subdir if instructed by treat_path */
if ((state == path_recurse) ||
((state == path_untracked) &&
- (dir->flags & DIR_SHOW_IGNORED_TOO) &&
- (get_dtype(cdir.de, istate, path.buf, path.len) == DT_DIR))) {
+ (get_dtype(cdir.de, istate, path.buf, path.len) == DT_DIR) &&
+ ((dir->flags & DIR_SHOW_IGNORED_TOO) ||
+ (pathspec &&
+ do_match_pathspec(istate, pathspec, path.buf, path.len,
+ baselen, NULL, DO_MATCH_LEADING_PATHSPEC) == MATCHED_RECURSIVELY_LEADING_PATHSPEC)))) {
struct untracked_cache_dir *ud;
ud = lookup_untracked(dir->untracked, untracked,
path.buf + baselen,
@@ -1950,6 +1987,12 @@ static enum path_treatment read_directory_recursive(struct dir_struct *dir,
check_only, stop_at_first_file, pathspec);
if (subdir_state > dir_state)
dir_state = subdir_state;
+
+ if (pathspec &&
+ !match_pathspec(istate, pathspec, path.buf, path.len,
+ 0 /* prefix */, NULL,
+ 0 /* do NOT special case dirs */))
+ state = path_none;
}
if (check_only) {
@@ -2488,14 +2531,14 @@ void setup_standard_excludes(struct dir_struct *dir)
if (!excludes_file)
excludes_file = xdg_config_home("ignore");
if (excludes_file && !access_or_warn(excludes_file, R_OK, 0))
- add_excludes_from_file_1(dir, excludes_file,
+ add_patterns_from_file_1(dir, excludes_file,
dir->untracked ? &dir->ss_excludes_file : NULL);
/* per repository user preference */
if (startup_info->have_repository) {
const char *path = git_path_info_exclude();
if (!access_or_warn(path, R_OK, 0))
- add_excludes_from_file_1(dir, path,
+ add_patterns_from_file_1(dir, path,
dir->untracked ? &dir->ss_info_exclude : NULL);
}
}
@@ -2527,18 +2570,18 @@ void clear_directory(struct dir_struct *dir)
{
int i, j;
struct exclude_list_group *group;
- struct exclude_list *el;
+ struct pattern_list *pl;
struct exclude_stack *stk;
for (i = EXC_CMDL; i <= EXC_FILE; i++) {
group = &dir->exclude_list_group[i];
for (j = 0; j < group->nr; j++) {
- el = &group->el[j];
+ pl = &group->pl[j];
if (i == EXC_DIRS)
- free((char *)el->src);
- clear_exclude_list(el);
+ free((char *)pl->src);
+ clear_pattern_list(pl);
}
- free(group->el);
+ free(group->pl);
}
stk = dir->exclude_stack;
diff --git a/dir.h b/dir.h
index 680079bbe3..2fbdef014f 100644
--- a/dir.h
+++ b/dir.h
@@ -11,24 +11,24 @@ struct dir_entry {
char name[FLEX_ARRAY]; /* more */
};
-#define EXC_FLAG_NODIR 1
-#define EXC_FLAG_ENDSWITH 4
-#define EXC_FLAG_MUSTBEDIR 8
-#define EXC_FLAG_NEGATIVE 16
+#define PATTERN_FLAG_NODIR 1
+#define PATTERN_FLAG_ENDSWITH 4
+#define PATTERN_FLAG_MUSTBEDIR 8
+#define PATTERN_FLAG_NEGATIVE 16
-struct exclude {
+struct path_pattern {
/*
- * This allows callers of last_exclude_matching() etc.
+ * This allows callers of last_matching_pattern() etc.
* to determine the origin of the matching pattern.
*/
- struct exclude_list *el;
+ struct pattern_list *pl;
const char *pattern;
int patternlen;
int nowildcardlen;
const char *base;
int baselen;
- unsigned flags; /* EXC_FLAG_* */
+ unsigned flags; /* PATTERN_FLAG_* */
/*
* Counting starts from 1 for line numbers in ignore files,
@@ -44,7 +44,7 @@ struct exclude {
* can also be used to represent the list of --exclude values passed
* via CLI args.
*/
-struct exclude_list {
+struct pattern_list {
int nr;
int alloc;
@@ -54,7 +54,7 @@ struct exclude_list {
/* origin of list, e.g. path to filename, or descriptive string */
const char *src;
- struct exclude **excludes;
+ struct path_pattern **patterns;
};
/*
@@ -72,7 +72,7 @@ struct exclude_stack {
struct exclude_list_group {
int nr, alloc;
- struct exclude_list *el;
+ struct pattern_list *pl;
};
struct oid_stat {
@@ -156,7 +156,8 @@ struct dir_struct {
DIR_SHOW_IGNORED_TOO = 1<<5,
DIR_COLLECT_KILLED_ONLY = 1<<6,
DIR_KEEP_UNTRACKED_CONTENTS = 1<<7,
- DIR_SHOW_IGNORED_TOO_MODE_MATCHING = 1<<8
+ DIR_SHOW_IGNORED_TOO_MODE_MATCHING = 1<<8,
+ DIR_SKIP_NESTED_GIT = 1<<9
} flags;
struct dir_entry **entries;
struct dir_entry **ignored;
@@ -191,7 +192,7 @@ struct dir_struct {
* matching exclude struct if the directory is excluded.
*/
struct exclude_stack *exclude_stack;
- struct exclude *exclude;
+ struct path_pattern *pattern;
struct strbuf basebuf;
/* Enable untracked file cache if set */
@@ -211,8 +212,9 @@ int count_slashes(const char *s);
* when populating the seen[] array.
*/
#define MATCHED_RECURSIVELY 1
-#define MATCHED_FNMATCH 2
-#define MATCHED_EXACTLY 3
+#define MATCHED_RECURSIVELY_LEADING_PATHSPEC 2
+#define MATCHED_FNMATCH 3
+#define MATCHED_EXACTLY 4
int simple_length(const char *match);
int no_wildcard(const char *string);
char *common_prefix(const struct pathspec *pathspec);
@@ -230,10 +232,23 @@ int read_directory(struct dir_struct *, struct index_state *istate,
const char *path, int len,
const struct pathspec *pathspec);
-int is_excluded_from_list(const char *pathname, int pathlen,
- const char *basename, int *dtype,
- struct exclude_list *el,
- struct index_state *istate);
+enum pattern_match_result {
+ UNDECIDED = -1,
+ NOT_MATCHED = 0,
+ MATCHED = 1,
+};
+
+/*
+ * Scan the list of patterns to determine if the ordered list
+ * of patterns matches on 'pathname'.
+ *
+ * Return 1 for a match, 0 for not matched and -1 for undecided.
+ */
+enum pattern_match_result path_matches_pattern_list(const char *pathname,
+ int pathlen,
+ const char *basename, int *dtype,
+ struct pattern_list *pl,
+ struct index_state *istate);
struct dir_entry *dir_add_ignored(struct dir_struct *dir,
struct index_state *istate,
const char *pathname, int len);
@@ -248,26 +263,26 @@ int match_pathname(const char *, int,
const char *, int,
const char *, int, int, unsigned);
-struct exclude *last_exclude_matching(struct dir_struct *dir,
- struct index_state *istate,
- const char *name, int *dtype);
+struct path_pattern *last_matching_pattern(struct dir_struct *dir,
+ struct index_state *istate,
+ const char *name, int *dtype);
int is_excluded(struct dir_struct *dir,
struct index_state *istate,
const char *name, int *dtype);
-struct exclude_list *add_exclude_list(struct dir_struct *dir,
+struct pattern_list *add_pattern_list(struct dir_struct *dir,
int group_type, const char *src);
-int add_excludes_from_file_to_list(const char *fname, const char *base, int baselen,
- struct exclude_list *el, struct index_state *istate);
-void add_excludes_from_file(struct dir_struct *, const char *fname);
-int add_excludes_from_blob_to_list(struct object_id *oid,
+int add_patterns_from_file_to_list(const char *fname, const char *base, int baselen,
+ struct pattern_list *pl, struct index_state *istate);
+void add_patterns_from_file(struct dir_struct *, const char *fname);
+int add_patterns_from_blob_to_list(struct object_id *oid,
const char *base, int baselen,
- struct exclude_list *el);
-void parse_exclude_pattern(const char **string, int *patternlen, unsigned *flags, int *nowildcardlen);
-void add_exclude(const char *string, const char *base,
- int baselen, struct exclude_list *el, int srcpos);
-void clear_exclude_list(struct exclude_list *el);
+ struct pattern_list *pl);
+void parse_path_pattern(const char **string, int *patternlen, unsigned *flags, int *nowildcardlen);
+void add_pattern(const char *string, const char *base,
+ int baselen, struct pattern_list *pl, int srcpos);
+void clear_pattern_list(struct pattern_list *pl);
void clear_directory(struct dir_struct *dir);
int repo_file_exists(struct repository *repo, const char *path);
diff --git a/environment.c b/environment.c
index 89af47cb85..efa072680a 100644
--- a/environment.c
+++ b/environment.c
@@ -31,8 +31,6 @@ int warn_ambiguous_refs = 1;
int warn_on_object_refname_ambiguity = 1;
int ref_paranoia = -1;
int repository_format_precious_objects;
-char *repository_format_partial_clone;
-const char *core_partial_clone_filter_default;
int repository_format_worktree_config;
const char *git_commit_encoding;
const char *git_log_output_encoding;
diff --git a/fast-import.c b/fast-import.c
index b44d6a467e..9503d087b2 100644
--- a/fast-import.c
+++ b/fast-import.c
@@ -1763,7 +1763,6 @@ static int read_next_command(void)
} else {
struct recent_command *rc;
- strbuf_detach(&command_buf, NULL);
stdin_eof = strbuf_getline_lf(&command_buf, stdin);
if (stdin_eof)
return EOF;
@@ -1784,7 +1783,7 @@ static int read_next_command(void)
free(rc->buf);
}
- rc->buf = command_buf.buf;
+ rc->buf = xstrdup(command_buf.buf);
rc->prev = cmd_tail;
rc->next = cmd_hist.prev;
rc->prev->next = rc;
@@ -1833,7 +1832,6 @@ static int parse_data(struct strbuf *sb, uintmax_t limit, uintmax_t *len_res)
char *term = xstrdup(data);
size_t term_len = command_buf.len - (data - command_buf.buf);
- strbuf_detach(&command_buf, NULL);
for (;;) {
if (strbuf_getline_lf(&command_buf, stdin) == EOF)
die("EOF in data (terminator '%s' not found)", term);
@@ -2491,18 +2489,14 @@ static void parse_from_existing(struct branch *b)
}
}
-static int parse_from(struct branch *b)
+static int parse_objectish(struct branch *b, const char *objectish)
{
- const char *from;
struct branch *s;
struct object_id oid;
- if (!skip_prefix(command_buf.buf, "from ", &from))
- return 0;
-
oidcpy(&oid, &b->branch_tree.versions[1].oid);
- s = lookup_branch(from);
+ s = lookup_branch(objectish);
if (b == s)
die("Can't create a branch from itself: %s", b->name);
else if (s) {
@@ -2510,8 +2504,8 @@ static int parse_from(struct branch *b)
oidcpy(&b->oid, &s->oid);
oidcpy(&b->branch_tree.versions[0].oid, t);
oidcpy(&b->branch_tree.versions[1].oid, t);
- } else if (*from == ':') {
- uintmax_t idnum = parse_mark_ref_eol(from);
+ } else if (*objectish == ':') {
+ uintmax_t idnum = parse_mark_ref_eol(objectish);
struct object_entry *oe = find_mark(idnum);
if (oe->type != OBJ_COMMIT)
die("Mark :%" PRIuMAX " not a commit", idnum);
@@ -2525,13 +2519,13 @@ static int parse_from(struct branch *b)
} else
parse_from_existing(b);
}
- } else if (!get_oid(from, &b->oid)) {
+ } else if (!get_oid(objectish, &b->oid)) {
parse_from_existing(b);
if (is_null_oid(&b->oid))
b->delete = 1;
}
else
- die("Invalid ref name or SHA1 expression: %s", from);
+ die("Invalid ref name or SHA1 expression: %s", objectish);
if (b->branch_tree.tree && !oideq(&oid, &b->branch_tree.versions[1].oid)) {
release_tree_content_recursive(b->branch_tree.tree);
@@ -2542,6 +2536,26 @@ static int parse_from(struct branch *b)
return 1;
}
+static int parse_from(struct branch *b)
+{
+ const char *from;
+
+ if (!skip_prefix(command_buf.buf, "from ", &from))
+ return 0;
+
+ return parse_objectish(b, from);
+}
+
+static int parse_objectish_with_prefix(struct branch *b, const char *prefix)
+{
+ const char *base;
+
+ if (!skip_prefix(command_buf.buf, prefix, &base))
+ return 0;
+
+ return parse_objectish(b, base);
+}
+
static struct hash_list *parse_merge(unsigned int *count)
{
struct hash_list *list = NULL, **tail = &list, *n;
@@ -2588,7 +2602,7 @@ static void parse_new_commit(const char *arg)
struct branch *b;
char *author = NULL;
char *committer = NULL;
- const char *encoding = NULL;
+ char *encoding = NULL;
struct hash_list *merge_list = NULL;
unsigned int merge_count;
unsigned char prev_fanout, new_fanout;
@@ -2611,8 +2625,10 @@ static void parse_new_commit(const char *arg)
}
if (!committer)
die("Expected committer but didn't get one");
- if (skip_prefix(command_buf.buf, "encoding ", &encoding))
+ if (skip_prefix(command_buf.buf, "encoding ", &v)) {
+ encoding = xstrdup(v);
read_next_command();
+ }
parse_data(&msg, 0, NULL);
read_next_command();
parse_from(b);
@@ -2686,6 +2702,7 @@ static void parse_new_commit(const char *arg)
strbuf_addbuf(&new_data, &msg);
free(author);
free(committer);
+ free(encoding);
if (!store_object(OBJ_COMMIT, &new_data, NULL, &b->oid, next_mark))
b->pack_id = pack_id;
@@ -2713,6 +2730,7 @@ static void parse_new_tag(const char *arg)
first_tag = t;
last_tag = t;
read_next_command();
+ parse_mark();
/* from ... */
if (!skip_prefix(command_buf.buf, "from ", &from))
@@ -2769,7 +2787,7 @@ static void parse_new_tag(const char *arg)
strbuf_addbuf(&new_data, &msg);
free(tagger);
- if (store_object(OBJ_TAG, &new_data, NULL, &t->oid, 0))
+ if (store_object(OBJ_TAG, &new_data, NULL, &t->oid, next_mark))
t->pack_id = MAX_PACK_ID;
else
t->pack_id = pack_id;
@@ -2778,6 +2796,7 @@ static void parse_new_tag(const char *arg)
static void parse_reset_branch(const char *arg)
{
struct branch *b;
+ const char *tag_name;
b = lookup_branch(arg);
if (b) {
@@ -2793,6 +2812,32 @@ static void parse_reset_branch(const char *arg)
b = new_branch(arg);
read_next_command();
parse_from(b);
+ if (b->delete && skip_prefix(b->name, "refs/tags/", &tag_name)) {
+ /*
+ * Elsewhere, we call dump_branches() before dump_tags(),
+ * and dump_branches() will handle ref deletions first, so
+ * in order to make sure the deletion actually takes effect,
+ * we need to remove the tag from our list of tags to update.
+ *
+ * NEEDSWORK: replace list of tags with hashmap for faster
+ * deletion?
+ */
+ struct tag *t, *prev = NULL;
+ for (t = first_tag; t; t = t->next_tag) {
+ if (!strcmp(t->name, tag_name))
+ break;
+ prev = t;
+ }
+ if (t) {
+ if (prev)
+ prev->next_tag = t->next_tag;
+ else
+ first_tag = t->next_tag;
+ if (!t->next_tag)
+ last_tag = prev;
+ /* There is no mem_pool_free(t) function to call. */
+ }
+ }
if (command_buf.len > 0)
unread_command_buf = 1;
}
@@ -3059,6 +3104,28 @@ static void parse_progress(void)
skip_optional_lf();
}
+static void parse_alias(void)
+{
+ struct object_entry *e;
+ struct branch b;
+
+ skip_optional_lf();
+ read_next_command();
+
+ /* mark ... */
+ parse_mark();
+ if (!next_mark)
+ die(_("Expected 'mark' command, got %s"), command_buf.buf);
+
+ /* to ... */
+ memset(&b, 0, sizeof(b));
+ if (!parse_objectish_with_prefix(&b, "to "))
+ die(_("Expected 'to' command, got %s"), command_buf.buf);
+ e = find_object(&b.oid);
+ assert(e);
+ insert_mark(next_mark, e);
+}
+
static char* make_fast_import_path(const char *path)
{
if (!relative_marks_paths || is_absolute_path(path))
@@ -3186,6 +3253,8 @@ static int parse_one_feature(const char *feature, int from_stream)
option_import_marks(arg, from_stream, 1);
} else if (skip_prefix(feature, "export-marks=", &arg)) {
option_export_marks(arg);
+ } else if (!strcmp(feature, "alias")) {
+ ; /* Don't die - this feature is supported */
} else if (!strcmp(feature, "get-mark")) {
; /* Don't die - this feature is supported */
} else if (!strcmp(feature, "cat-blob")) {
@@ -3342,6 +3411,8 @@ int cmd_main(int argc, const char **argv)
parse_checkpoint();
else if (!strcmp("done", command_buf.buf))
break;
+ else if (!strcmp("alias", command_buf.buf))
+ parse_alias();
else if (starts_with(command_buf.buf, "progress "))
parse_progress();
else if (skip_prefix(command_buf.buf, "feature ", &v))
diff --git a/fetch-negotiator.c b/fetch-negotiator.c
index d6d685cba0..0a1357dc9d 100644
--- a/fetch-negotiator.c
+++ b/fetch-negotiator.c
@@ -2,19 +2,20 @@
#include "fetch-negotiator.h"
#include "negotiator/default.h"
#include "negotiator/skipping.h"
+#include "repository.h"
-void fetch_negotiator_init(struct fetch_negotiator *negotiator,
- const char *algorithm)
+void fetch_negotiator_init(struct repository *r,
+ struct fetch_negotiator *negotiator)
{
- if (algorithm) {
- if (!strcmp(algorithm, "skipping")) {
- skipping_negotiator_init(negotiator);
- return;
- } else if (!strcmp(algorithm, "default")) {
- /* Fall through to default initialization */
- } else {
- die("unknown fetch negotiation algorithm '%s'", algorithm);
- }
+ prepare_repo_settings(r);
+ switch(r->settings.fetch_negotiation_algorithm) {
+ case FETCH_NEGOTIATION_SKIPPING:
+ skipping_negotiator_init(negotiator);
+ return;
+
+ case FETCH_NEGOTIATION_DEFAULT:
+ default:
+ default_negotiator_init(negotiator);
+ return;
}
- default_negotiator_init(negotiator);
}
diff --git a/fetch-negotiator.h b/fetch-negotiator.h
index 9e3967ce66..ea78868504 100644
--- a/fetch-negotiator.h
+++ b/fetch-negotiator.h
@@ -2,6 +2,7 @@
#define FETCH_NEGOTIATOR_H
struct commit;
+struct repository;
/*
* An object that supplies the information needed to negotiate the contents of
@@ -52,7 +53,7 @@ struct fetch_negotiator {
void *data;
};
-void fetch_negotiator_init(struct fetch_negotiator *negotiator,
- const char *algorithm);
+void fetch_negotiator_init(struct repository *r,
+ struct fetch_negotiator *negotiator);
#endif
diff --git a/fetch-object.c b/fetch-object.c
deleted file mode 100644
index 4266548800..0000000000
--- a/fetch-object.c
+++ /dev/null
@@ -1,40 +0,0 @@
-#include "cache.h"
-#include "packfile.h"
-#include "pkt-line.h"
-#include "strbuf.h"
-#include "transport.h"
-#include "fetch-object.h"
-
-static void fetch_refs(const char *remote_name, struct ref *ref)
-{
- struct remote *remote;
- struct transport *transport;
- int original_fetch_if_missing = fetch_if_missing;
-
- fetch_if_missing = 0;
- remote = remote_get(remote_name);
- if (!remote->url[0])
- die(_("Remote with no URL"));
- transport = transport_get(remote, remote->url[0]);
-
- transport_set_option(transport, TRANS_OPT_FROM_PROMISOR, "1");
- transport_set_option(transport, TRANS_OPT_NO_DEPENDENTS, "1");
- transport_fetch_refs(transport, ref);
- fetch_if_missing = original_fetch_if_missing;
-}
-
-void fetch_objects(const char *remote_name, const struct object_id *oids,
- int oid_nr)
-{
- struct ref *ref = NULL;
- int i;
-
- for (i = 0; i < oid_nr; i++) {
- struct ref *new_ref = alloc_ref(oid_to_hex(&oids[i]));
- oidcpy(&new_ref->old_oid, &oids[i]);
- new_ref->exact_oid = 1;
- new_ref->next = ref;
- ref = new_ref;
- }
- fetch_refs(remote_name, ref);
-}
diff --git a/fetch-object.h b/fetch-object.h
deleted file mode 100644
index d6444caa5a..0000000000
--- a/fetch-object.h
+++ /dev/null
@@ -1,9 +0,0 @@
-#ifndef FETCH_OBJECT_H
-#define FETCH_OBJECT_H
-
-struct object_id;
-
-void fetch_objects(const char *remote_name, const struct object_id *oids,
- int oid_nr);
-
-#endif
diff --git a/fetch-pack.c b/fetch-pack.c
index 65be043f2a..0130b44112 100644
--- a/fetch-pack.c
+++ b/fetch-pack.c
@@ -36,7 +36,6 @@ static int agent_supported;
static int server_supports_filtering;
static struct lock_file shallow_lock;
static const char *alternate_shallow_file;
-static char *negotiation_algorithm;
static struct strbuf fsck_msg_types = STRBUF_INIT;
/* Remember to update object flag allocation in object.h */
@@ -168,16 +167,16 @@ static enum ack_type get_ack(struct packet_reader *reader,
if (!strcmp(reader->line, "NAK"))
return NAK;
if (skip_prefix(reader->line, "ACK ", &arg)) {
- if (!get_oid_hex(arg, result_oid)) {
- arg += 40;
- len -= arg - reader->line;
+ const char *p;
+ if (!parse_oid_hex(arg, result_oid, &p)) {
+ len -= p - reader->line;
if (len < 1)
return ACK;
- if (strstr(arg, "continue"))
+ if (strstr(p, "continue"))
return ACK_continue;
- if (strstr(arg, "common"))
+ if (strstr(p, "common"))
return ACK_common;
- if (strstr(arg, "ready"))
+ if (strstr(p, "ready"))
return ACK_ready;
return ACK;
}
@@ -339,12 +338,9 @@ static int find_common(struct fetch_negotiator *negotiator,
}
}
if (server_supports_filtering && args->filter_options.choice) {
- struct strbuf expanded_filter_spec = STRBUF_INIT;
- expand_list_objects_filter_spec(&args->filter_options,
- &expanded_filter_spec);
- packet_buf_write(&req_buf, "filter %s",
- expanded_filter_spec.buf);
- strbuf_release(&expanded_filter_spec);
+ const char *spec =
+ expand_list_objects_filter_spec(&args->filter_options);
+ packet_buf_write(&req_buf, "filter %s", spec);
}
packet_buf_flush(&req_buf);
state_len = req_buf.len;
@@ -386,6 +382,7 @@ static int find_common(struct fetch_negotiator *negotiator,
state_len = 0;
}
+ trace2_region_enter("fetch-pack", "negotiation_v0_v1", the_repository);
flushes = 0;
retval = -1;
if (args->no_dependents)
@@ -470,6 +467,7 @@ static int find_common(struct fetch_negotiator *negotiator,
}
}
done:
+ trace2_region_leave("fetch-pack", "negotiation_v0_v1", the_repository);
if (!got_ready || !no_done) {
packet_buf_write(&req_buf, "done\n");
send_request(args, fd[1], &req_buf);
@@ -892,12 +890,13 @@ static struct ref *do_fetch_pack(struct fetch_pack_args *args,
struct shallow_info *si,
char **pack_lockfile)
{
+ struct repository *r = the_repository;
struct ref *ref = copy_ref_list(orig_ref);
struct object_id oid;
const char *agent_feature;
int agent_len;
struct fetch_negotiator negotiator;
- fetch_negotiator_init(&negotiator, negotiation_algorithm);
+ fetch_negotiator_init(r, &negotiator);
sort_ref_list(&ref, ref_compare_name);
QSORT(sought, nr_sought, cmp_ref_by_name);
@@ -911,7 +910,7 @@ static struct ref *do_fetch_pack(struct fetch_pack_args *args,
if (server_supports("shallow"))
print_verbose(args, _("Server supports %s"), "shallow");
- else if (args->depth > 0 || is_repository_shallow(the_repository))
+ else if (args->depth > 0 || is_repository_shallow(r))
die(_("Server does not support shallow clients"));
if (args->depth > 0 || args->deepen_since || args->deepen_not)
args->deepen = 1;
@@ -1112,7 +1111,7 @@ static int add_haves(struct fetch_negotiator *negotiator,
}
static int send_fetch_request(struct fetch_negotiator *negotiator, int fd_out,
- const struct fetch_pack_args *args,
+ struct fetch_pack_args *args,
const struct ref *wants, struct oidset *common,
int *haves_to_send, int *in_vain,
int sideband_all)
@@ -1153,13 +1152,10 @@ static int send_fetch_request(struct fetch_negotiator *negotiator, int fd_out,
/* Add filter */
if (server_supports_feature("fetch", "filter", 0) &&
args->filter_options.choice) {
- struct strbuf expanded_filter_spec = STRBUF_INIT;
+ const char *spec =
+ expand_list_objects_filter_spec(&args->filter_options);
print_verbose(args, _("Server supports filter"));
- expand_list_objects_filter_spec(&args->filter_options,
- &expanded_filter_spec);
- packet_buf_write(&req_buf, "filter %s",
- expanded_filter_spec.buf);
- strbuf_release(&expanded_filter_spec);
+ packet_buf_write(&req_buf, "filter %s", spec);
} else if (args->filter_options.choice) {
warning("filtering not recognized by server, ignoring");
}
@@ -1379,14 +1375,15 @@ static struct ref *do_fetch_pack_v2(struct fetch_pack_args *args,
struct shallow_info *si,
char **pack_lockfile)
{
+ struct repository *r = the_repository;
struct ref *ref = copy_ref_list(orig_ref);
enum fetch_state state = FETCH_CHECK_LOCAL;
struct oidset common = OIDSET_INIT;
struct packet_reader reader;
- int in_vain = 0;
+ int in_vain = 0, negotiation_started = 0;
int haves_to_send = INITIAL_FLUSH;
struct fetch_negotiator negotiator;
- fetch_negotiator_init(&negotiator, negotiation_algorithm);
+ fetch_negotiator_init(r, &negotiator);
packet_reader_init(&reader, fd[0], NULL, 0,
PACKET_READ_CHOMP_NEWLINE |
PACKET_READ_DIE_ON_ERR_PACKET);
@@ -1426,6 +1423,12 @@ static struct ref *do_fetch_pack_v2(struct fetch_pack_args *args,
}
break;
case FETCH_SEND_REQUEST:
+ if (!negotiation_started) {
+ negotiation_started = 1;
+ trace2_region_enter("fetch-pack",
+ "negotiation_v2",
+ the_repository);
+ }
if (send_fetch_request(&negotiator, fd[1], args, ref,
&common,
&haves_to_send, &in_vain,
@@ -1449,6 +1452,9 @@ static struct ref *do_fetch_pack_v2(struct fetch_pack_args *args,
}
break;
case FETCH_GET_PACK:
+ trace2_region_leave("fetch-pack",
+ "negotiation_v2",
+ the_repository);
/* Check for shallow-info section */
if (process_section_header(&reader, "shallow-info", 1))
receive_shallow_info(args, &reader, shallows, si);
@@ -1505,8 +1511,6 @@ static void fetch_pack_config(void)
git_config_get_bool("repack.usedeltabaseoffset", &prefer_ofs_delta);
git_config_get_bool("fetch.fsckobjects", &fetch_fsck_objects);
git_config_get_bool("transfer.fsckobjects", &transfer_fsck_objects);
- git_config_get_string("fetch.negotiationalgorithm",
- &negotiation_algorithm);
git_config(fetch_pack_config_cb, NULL);
}
diff --git a/git-add--interactive.perl b/git-add--interactive.perl
index c20ae9e210..52659bb74c 100755
--- a/git-add--interactive.perl
+++ b/git-add--interactive.perl
@@ -1541,7 +1541,7 @@ sub patch_update_file {
for (@{$hunk[$ix]{DISPLAY}}) {
print;
}
- print colored $prompt_color,
+ print colored $prompt_color, "(", ($ix+1), "/$num) ",
sprintf(__($patch_update_prompt_modes{$patch_mode}{$hunk[$ix]{TYPE}}), $other);
my $line = prompt_single_character;
diff --git a/git-compat-util.h b/git-compat-util.h
index 83be89de0a..8b8b29a867 100644
--- a/git-compat-util.h
+++ b/git-compat-util.h
@@ -344,6 +344,7 @@ typedef uintmax_t timestamp_t;
#define PRItime PRIuMAX
#define parse_timestamp strtoumax
#define TIME_MAX UINTMAX_MAX
+#define TIME_MIN 0
#ifndef PATH_SEP
#define PATH_SEP ':'
@@ -818,9 +819,6 @@ const char *inet_ntop(int af, const void *src, char *dst, size_t size);
int git_atexit(void (*handler)(void));
#endif
-typedef void (*try_to_free_t)(size_t);
-try_to_free_t set_try_to_free_routine(try_to_free_t);
-
static inline size_t st_add(size_t a, size_t b)
{
if (unsigned_add_overflows(a, b))
@@ -1094,10 +1092,10 @@ static inline int strtol_i(char const *s, int base, int *result)
return 0;
}
+void git_stable_qsort(void *base, size_t nmemb, size_t size,
+ int(*compar)(const void *, const void *));
#ifdef INTERNAL_QSORT
-void git_qsort(void *base, size_t nmemb, size_t size,
- int(*compar)(const void *, const void *));
-#define qsort git_qsort
+#define qsort git_stable_qsort
#endif
#define QSORT(base, n, compar) sane_qsort((base), (n), sizeof(*(base)), compar)
@@ -1108,6 +1106,9 @@ static inline void sane_qsort(void *base, size_t nmemb, size_t size,
qsort(base, nmemb, size, compar);
}
+#define STABLE_QSORT(base, n, compar) \
+ git_stable_qsort((base), (n), sizeof(*(base)), compar)
+
#ifndef HAVE_ISO_QSORT_S
int git_qsort_s(void *base, size_t nmemb, size_t size,
int (*compar)(const void *, const void *, void *), void *ctx);
@@ -1312,4 +1313,42 @@ void unleak_memory(const void *ptr, size_t len);
*/
#include "banned.h"
+/*
+ * container_of - Get the address of an object containing a field.
+ *
+ * @ptr: pointer to the field.
+ * @type: type of the object.
+ * @member: name of the field within the object.
+ */
+#define container_of(ptr, type, member) \
+ ((type *) ((char *)(ptr) - offsetof(type, member)))
+
+/*
+ * helper function for `container_of_or_null' to avoid multiple
+ * evaluation of @ptr
+ */
+static inline void *container_of_or_null_offset(void *ptr, size_t offset)
+{
+ return ptr ? (char *)ptr - offset : NULL;
+}
+
+/*
+ * like `container_of', but allows returned value to be NULL
+ */
+#define container_of_or_null(ptr, type, member) \
+ (type *)container_of_or_null_offset(ptr, offsetof(type, member))
+
+/*
+ * like offsetof(), but takes a pointer to a a variable of type which
+ * contains @member, instead of a specified type.
+ * @ptr is subject to multiple evaluation since we can't rely on __typeof__
+ * everywhere.
+ */
+#if defined(__GNUC__) /* clang sets this, too */
+#define OFFSETOF_VAR(ptr, member) offsetof(__typeof__(*ptr), member)
+#else /* !__GNUC__ */
+#define OFFSETOF_VAR(ptr, member) \
+ ((uintptr_t)&(ptr)->member - (uintptr_t)(ptr))
+#endif /* !__GNUC__ */
+
#endif
diff --git a/git-filter-branch.sh b/git-filter-branch.sh
index 5c5afa2b98..fea7964617 100755
--- a/git-filter-branch.sh
+++ b/git-filter-branch.sh
@@ -83,6 +83,20 @@ set_ident () {
finish_ident COMMITTER
}
+if test -z "$FILTER_BRANCH_SQUELCH_WARNING$GIT_TEST_DISALLOW_ABBREVIATED_OPTIONS"
+then
+ cat <<EOF
+WARNING: git-filter-branch has a glut of gotchas generating mangled history
+ rewrites. Hit Ctrl-C before proceeding to abort, then use an
+ alternative filtering tool such as 'git filter-repo'
+ (https://github.com/newren/git-filter-repo/) instead. See the
+ filter-branch manual page for more details; to squelch this warning,
+ set FILTER_BRANCH_SQUELCH_WARNING=1.
+EOF
+ sleep 10
+ printf "Proceeding with filter-branch...\n\n"
+fi
+
USAGE="[--setup <command>] [--subdirectory-filter <directory>] [--env-filter <command>]
[--tree-filter <command>] [--index-filter <command>]
[--parent-filter <command>] [--msg-filter <command>]
diff --git a/git-gui/git-gui.sh b/git-gui/git-gui.sh
index 6de74ce639..fd476b6999 100755
--- a/git-gui/git-gui.sh
+++ b/git-gui/git-gui.sh
@@ -1340,6 +1340,7 @@ set HEAD {}
set PARENT {}
set MERGE_HEAD [list]
set commit_type {}
+set commit_type_is_amend 0
set empty_tree {}
set current_branch {}
set is_detached 0
@@ -1347,8 +1348,9 @@ set current_diff_path {}
set is_3way_diff 0
set is_submodule_diff 0
set is_conflict_diff 0
-set selected_commit_type new
set diff_empty_count 0
+set last_revert {}
+set last_revert_enc {}
set nullid "0000000000000000000000000000000000000000"
set nullid2 "0000000000000000000000000000000000000001"
@@ -1434,7 +1436,7 @@ proc PARENT {} {
}
proc force_amend {} {
- global selected_commit_type
+ global commit_type_is_amend
global HEAD PARENT MERGE_HEAD commit_type
repository_state newType newHEAD newMERGE_HEAD
@@ -1443,7 +1445,7 @@ proc force_amend {} {
set MERGE_HEAD $newMERGE_HEAD
set commit_type $newType
- set selected_commit_type amend
+ set commit_type_is_amend 1
do_select_commit_type
}
@@ -2494,7 +2496,7 @@ proc force_first_diff {after} {
proc toggle_or_diff {mode w args} {
global file_states file_lists current_diff_path ui_index ui_workdir
- global last_clicked selected_paths
+ global last_clicked selected_paths file_lists_last_clicked
if {$mode eq "click"} {
foreach {x y} $args break
@@ -2551,6 +2553,8 @@ proc toggle_or_diff {mode w args} {
$ui_index tag remove in_sel 0.0 end
$ui_workdir tag remove in_sel 0.0 end
+ set file_lists_last_clicked($w) $path
+
# Determine the state of the file
if {[info exists file_states($path)]} {
set state [lindex $file_states($path) 0]
@@ -2664,6 +2668,32 @@ proc show_less_context {} {
}
}
+proc focus_widget {widget} {
+ global file_lists last_clicked selected_paths
+ global file_lists_last_clicked
+
+ if {[llength $file_lists($widget)] > 0} {
+ set path $file_lists_last_clicked($widget)
+ set index [lsearch -sorted -exact $file_lists($widget) $path]
+ if {$index < 0} {
+ set index 0
+ set path [lindex $file_lists($widget) $index]
+ }
+
+ focus $widget
+ set last_clicked [list $widget [expr $index + 1]]
+ array unset selected_paths
+ set selected_paths($path) 1
+ show_diff $path $widget
+ }
+}
+
+proc toggle_commit_type {} {
+ global commit_type_is_amend
+ set commit_type_is_amend [expr !$commit_type_is_amend]
+ do_select_commit_type
+}
+
######################################################################
##
## ui construction
@@ -2852,19 +2882,11 @@ if {[is_enabled multicommit] || [is_enabled singlecommit]} {
menu .mbar.commit
if {![is_enabled nocommit]} {
- .mbar.commit add radiobutton \
- -label [mc "New Commit"] \
- -command do_select_commit_type \
- -variable selected_commit_type \
- -value new
- lappend disable_on_lock \
- [list .mbar.commit entryconf [.mbar.commit index last] -state]
-
- .mbar.commit add radiobutton \
+ .mbar.commit add checkbutton \
-label [mc "Amend Last Commit"] \
- -command do_select_commit_type \
- -variable selected_commit_type \
- -value amend
+ -accelerator $M1T-E \
+ -variable commit_type_is_amend \
+ -command do_select_commit_type
lappend disable_on_lock \
[list .mbar.commit entryconf [.mbar.commit index last] -state]
@@ -3030,8 +3052,23 @@ unset doc_path doc_url
wm protocol . WM_DELETE_WINDOW do_quit
bind all <$M1B-Key-q> do_quit
bind all <$M1B-Key-Q> do_quit
-bind all <$M1B-Key-w> {destroy [winfo toplevel %W]}
-bind all <$M1B-Key-W> {destroy [winfo toplevel %W]}
+
+set m1b_w_script {
+ set toplvl_win [winfo toplevel %W]
+
+ # If we are destroying the main window, we should call do_quit to take
+ # care of cleanup before exiting the program.
+ if {$toplvl_win eq "."} {
+ do_quit
+ } else {
+ destroy $toplvl_win
+ }
+}
+
+bind all <$M1B-Key-w> $m1b_w_script
+bind all <$M1B-Key-W> $m1b_w_script
+
+unset m1b_w_script
set subcommand_args {}
proc usage {} {
@@ -3337,18 +3374,10 @@ set ui_comm .vpane.lower.commarea.buffer.frame.t
set ui_coml .vpane.lower.commarea.buffer.header.l
if {![is_enabled nocommit]} {
- ${NS}::radiobutton .vpane.lower.commarea.buffer.header.new \
- -text [mc "New Commit"] \
- -command do_select_commit_type \
- -variable selected_commit_type \
- -value new
- lappend disable_on_lock \
- [list .vpane.lower.commarea.buffer.header.new conf -state]
- ${NS}::radiobutton .vpane.lower.commarea.buffer.header.amend \
+ ${NS}::checkbutton .vpane.lower.commarea.buffer.header.amend \
-text [mc "Amend Last Commit"] \
- -command do_select_commit_type \
- -variable selected_commit_type \
- -value amend
+ -variable commit_type_is_amend \
+ -command do_select_commit_type
lappend disable_on_lock \
[list .vpane.lower.commarea.buffer.header.amend conf -state]
}
@@ -3373,7 +3402,6 @@ pack $ui_coml -side left -fill x
if {![is_enabled nocommit]} {
pack .vpane.lower.commarea.buffer.header.amend -side right
- pack .vpane.lower.commarea.buffer.header.new -side right
}
textframe .vpane.lower.commarea.buffer.frame
@@ -3387,10 +3415,16 @@ ttext $ui_comm -background white -foreground black \
-relief sunken \
-width $repo_config(gui.commitmsgwidth) -height 9 -wrap none \
-font font_diff \
+ -xscrollcommand {.vpane.lower.commarea.buffer.frame.sbx set} \
-yscrollcommand {.vpane.lower.commarea.buffer.frame.sby set}
+${NS}::scrollbar .vpane.lower.commarea.buffer.frame.sbx \
+ -orient horizontal \
+ -command [list $ui_comm xview]
${NS}::scrollbar .vpane.lower.commarea.buffer.frame.sby \
+ -orient vertical \
-command [list $ui_comm yview]
+pack .vpane.lower.commarea.buffer.frame.sbx -side bottom -fill x
pack .vpane.lower.commarea.buffer.frame.sby -side right -fill y
pack $ui_comm -side left -fill y
pack .vpane.lower.commarea.buffer.header -side top -fill x
@@ -3606,16 +3640,32 @@ set ctxm .vpane.lower.diff.body.ctxm
menu $ctxm -tearoff 0
$ctxm add command \
-label [mc "Apply/Reverse Hunk"] \
- -command {apply_hunk $cursorX $cursorY}
+ -command {apply_or_revert_hunk $cursorX $cursorY 0}
set ui_diff_applyhunk [$ctxm index last]
lappend diff_actions [list $ctxm entryconf $ui_diff_applyhunk -state]
$ctxm add command \
-label [mc "Apply/Reverse Line"] \
- -command {apply_range_or_line $cursorX $cursorY; do_rescan}
+ -command {apply_or_revert_range_or_line $cursorX $cursorY 0; do_rescan}
set ui_diff_applyline [$ctxm index last]
lappend diff_actions [list $ctxm entryconf $ui_diff_applyline -state]
$ctxm add separator
$ctxm add command \
+ -label [mc "Revert Hunk"] \
+ -command {apply_or_revert_hunk $cursorX $cursorY 1}
+set ui_diff_reverthunk [$ctxm index last]
+lappend diff_actions [list $ctxm entryconf $ui_diff_reverthunk -state]
+$ctxm add command \
+ -label [mc "Revert Line"] \
+ -command {apply_or_revert_range_or_line $cursorX $cursorY 1; do_rescan}
+set ui_diff_revertline [$ctxm index last]
+lappend diff_actions [list $ctxm entryconf $ui_diff_revertline -state]
+$ctxm add command \
+ -label [mc "Undo Last Revert"] \
+ -command {undo_last_revert; do_rescan}
+set ui_diff_undorevert [$ctxm index last]
+lappend diff_actions [list $ctxm entryconf $ui_diff_undorevert -state]
+$ctxm add separator
+$ctxm add command \
-label [mc "Show Less Context"] \
-command show_less_context
lappend diff_actions [list $ctxm entryconf [$ctxm index last] -state]
@@ -3693,7 +3743,7 @@ proc has_textconv {path} {
}
proc popup_diff_menu {ctxm ctxmmg ctxmsm x y X Y} {
- global current_diff_path file_states
+ global current_diff_path file_states last_revert
set ::cursorX $x
set ::cursorY $y
if {[info exists file_states($current_diff_path)]} {
@@ -3707,19 +3757,28 @@ proc popup_diff_menu {ctxm ctxmmg ctxmsm x y X Y} {
tk_popup $ctxmsm $X $Y
} else {
set has_range [expr {[$::ui_diff tag nextrange sel 0.0] != {}}]
+ set u [mc "Undo Last Revert"]
if {$::ui_index eq $::current_diff_side} {
set l [mc "Unstage Hunk From Commit"]
+ set h [mc "Revert Hunk"]
+
if {$has_range} {
set t [mc "Unstage Lines From Commit"]
+ set r [mc "Revert Lines"]
} else {
set t [mc "Unstage Line From Commit"]
+ set r [mc "Revert Line"]
}
} else {
set l [mc "Stage Hunk For Commit"]
+ set h [mc "Revert Hunk"]
+
if {$has_range} {
set t [mc "Stage Lines For Commit"]
+ set r [mc "Revert Lines"]
} else {
set t [mc "Stage Line For Commit"]
+ set r [mc "Revert Line"]
}
}
if {$::is_3way_diff
@@ -3730,11 +3789,35 @@ proc popup_diff_menu {ctxm ctxmmg ctxmsm x y X Y} {
|| [string match {T?} $state]
|| [has_textconv $current_diff_path]} {
set s disabled
+ set revert_state disabled
} else {
set s normal
+
+ # Only allow reverting changes in the working tree. If
+ # the user wants to revert changes in the index, they
+ # need to unstage those first.
+ if {$::ui_workdir eq $::current_diff_side} {
+ set revert_state normal
+ } else {
+ set revert_state disabled
+ }
+ }
+
+ if {$last_revert eq {}} {
+ set undo_state disabled
+ } else {
+ set undo_state normal
}
+
$ctxm entryconf $::ui_diff_applyhunk -state $s -label $l
$ctxm entryconf $::ui_diff_applyline -state $s -label $t
+ $ctxm entryconf $::ui_diff_revertline -state $revert_state \
+ -label $r
+ $ctxm entryconf $::ui_diff_reverthunk -state $revert_state \
+ -label $h
+ $ctxm entryconf $::ui_diff_undorevert -state $undo_state \
+ -label $u
+
tk_popup $ctxm $X $Y
}
}
@@ -3861,6 +3944,8 @@ bind . <$M1B-Key-j> do_revert_selection
bind . <$M1B-Key-J> do_revert_selection
bind . <$M1B-Key-i> do_add_all
bind . <$M1B-Key-I> do_add_all
+bind . <$M1B-Key-e> toggle_commit_type
+bind . <$M1B-Key-E> toggle_commit_type
bind . <$M1B-Key-minus> {show_less_context;break}
bind . <$M1B-Key-KP_Subtract> {show_less_context;break}
bind . <$M1B-Key-equal> {show_more_context;break}
@@ -3877,6 +3962,14 @@ foreach i [list $ui_index $ui_workdir] {
}
unset i
+bind . <Alt-Key-1> {focus_widget $::ui_workdir}
+bind . <Alt-Key-2> {focus_widget $::ui_index}
+bind . <Alt-Key-3> {focus $::ui_diff}
+bind . <Alt-Key-4> {focus $::ui_comm}
+
+set file_lists_last_clicked($ui_index) {}
+set file_lists_last_clicked($ui_workdir) {}
+
set file_lists($ui_index) [list]
set file_lists($ui_workdir) [list]
diff --git a/git-gui/lib/checkout_op.tcl b/git-gui/lib/checkout_op.tcl
index 9e7412c446..a5228297db 100644
--- a/git-gui/lib/checkout_op.tcl
+++ b/git-gui/lib/checkout_op.tcl
@@ -389,7 +389,7 @@ $err
}
method _after_readtree {} {
- global selected_commit_type commit_type HEAD MERGE_HEAD PARENT
+ global commit_type HEAD MERGE_HEAD PARENT
global current_branch is_detached
global ui_comm
@@ -490,12 +490,12 @@ method _update_repo_state {} {
# amend mode our file lists are accurate and we can avoid
# the rescan.
#
- global selected_commit_type commit_type HEAD MERGE_HEAD PARENT
+ global commit_type_is_amend commit_type HEAD MERGE_HEAD PARENT
global ui_comm
unlock_index
set name [_name $this]
- set selected_commit_type new
+ set commit_type_is_amend 0
if {[string match amend* $commit_type]} {
$ui_comm delete 0.0 end
$ui_comm edit reset
diff --git a/git-gui/lib/commit.tcl b/git-gui/lib/commit.tcl
index 75ea965dac..b516aa2990 100644
--- a/git-gui/lib/commit.tcl
+++ b/git-gui/lib/commit.tcl
@@ -333,7 +333,7 @@ proc commit_writetree {curHEAD msg_p} {
proc commit_committree {fd_wt curHEAD msg_p} {
global HEAD PARENT MERGE_HEAD commit_type commit_author
global current_branch
- global ui_comm selected_commit_type
+ global ui_comm commit_type_is_amend
global file_states selected_paths rescan_active
global repo_config
global env
@@ -467,8 +467,8 @@ A rescan will be automatically started now.
# -- Update in memory status
#
- set selected_commit_type new
set commit_type normal
+ set commit_type_is_amend 0
set HEAD $cmt_id
set PARENT $cmt_id
set MERGE_HEAD [list]
diff --git a/git-gui/lib/diff.tcl b/git-gui/lib/diff.tcl
index 68c4a6c736..958a0fa219 100644
--- a/git-gui/lib/diff.tcl
+++ b/git-gui/lib/diff.tcl
@@ -55,7 +55,7 @@ proc reshow_diff {{after {}}} {
proc force_diff_encoding {enc} {
global current_diff_path
-
+
if {$current_diff_path ne {}} {
force_path_encoding $current_diff_path $enc
reshow_diff
@@ -567,24 +567,31 @@ proc read_diff {fd conflict_size cont_info} {
}
}
-proc apply_hunk {x y} {
+proc apply_or_revert_hunk {x y revert} {
global current_diff_path current_diff_header current_diff_side
- global ui_diff ui_index file_states
+ global ui_diff ui_index file_states last_revert last_revert_enc
if {$current_diff_path eq {} || $current_diff_header eq {}} return
if {![lock_index apply_hunk]} return
- set apply_cmd {apply --cached --whitespace=nowarn}
+ set apply_cmd {apply --whitespace=nowarn}
set mi [lindex $file_states($current_diff_path) 0]
if {$current_diff_side eq $ui_index} {
set failed_msg [mc "Failed to unstage selected hunk."]
- lappend apply_cmd --reverse
+ lappend apply_cmd --reverse --cached
if {[string index $mi 0] ne {M}} {
unlock_index
return
}
} else {
- set failed_msg [mc "Failed to stage selected hunk."]
+ if {$revert} {
+ set failed_msg [mc "Failed to revert selected hunk."]
+ lappend apply_cmd --reverse
+ } else {
+ set failed_msg [mc "Failed to stage selected hunk."]
+ lappend apply_cmd --cached
+ }
+
if {[string index $mi 1] ne {M}} {
unlock_index
return
@@ -603,29 +610,40 @@ proc apply_hunk {x y} {
set e_lno end
}
+ set wholepatch "$current_diff_header[$ui_diff get $s_lno $e_lno]"
+
if {[catch {
set enc [get_path_encoding $current_diff_path]
set p [eval git_write $apply_cmd]
fconfigure $p -translation binary -encoding $enc
- puts -nonewline $p $current_diff_header
- puts -nonewline $p [$ui_diff get $s_lno $e_lno]
+ puts -nonewline $p $wholepatch
close $p} err]} {
error_popup "$failed_msg\n\n$err"
unlock_index
return
}
+ if {$revert} {
+ # Save a copy of this patch for undoing reverts.
+ set last_revert $wholepatch
+ set last_revert_enc $enc
+ }
+
$ui_diff conf -state normal
$ui_diff delete $s_lno $e_lno
$ui_diff conf -state disabled
+ # Check if the hunk was the last one in the file.
if {[$ui_diff get 1.0 end] eq "\n"} {
set o _
} else {
set o ?
}
- if {$current_diff_side eq $ui_index} {
+ # Update the status flags.
+ if {$revert} {
+ set mi [string index $mi 0]$o
+ } elseif {$current_diff_side eq $ui_index} {
set mi ${o}M
} elseif {[string index $mi 0] eq {_}} {
set mi M$o
@@ -640,9 +658,9 @@ proc apply_hunk {x y} {
}
}
-proc apply_range_or_line {x y} {
+proc apply_or_revert_range_or_line {x y revert} {
global current_diff_path current_diff_header current_diff_side
- global ui_diff ui_index file_states
+ global ui_diff ui_index file_states last_revert
set selected [$ui_diff tag nextrange sel 0.0]
@@ -660,19 +678,27 @@ proc apply_range_or_line {x y} {
if {$current_diff_path eq {} || $current_diff_header eq {}} return
if {![lock_index apply_hunk]} return
- set apply_cmd {apply --cached --whitespace=nowarn}
+ set apply_cmd {apply --whitespace=nowarn}
set mi [lindex $file_states($current_diff_path) 0]
if {$current_diff_side eq $ui_index} {
set failed_msg [mc "Failed to unstage selected line."]
set to_context {+}
- lappend apply_cmd --reverse
+ lappend apply_cmd --reverse --cached
if {[string index $mi 0] ne {M}} {
unlock_index
return
}
} else {
- set failed_msg [mc "Failed to stage selected line."]
- set to_context {-}
+ if {$revert} {
+ set failed_msg [mc "Failed to revert selected line."]
+ set to_context {+}
+ lappend apply_cmd --reverse
+ } else {
+ set failed_msg [mc "Failed to stage selected line."]
+ set to_context {-}
+ lappend apply_cmd --cached
+ }
+
if {[string index $mi 1] ne {M}} {
unlock_index
return
@@ -830,7 +856,47 @@ proc apply_range_or_line {x y} {
puts -nonewline $p $wholepatch
close $p} err]} {
error_popup "$failed_msg\n\n$err"
+ unlock_index
+ return
+ }
+
+ if {$revert} {
+ # Save a copy of this patch for undoing reverts.
+ set last_revert $current_diff_header$wholepatch
+ set last_revert_enc $enc
}
unlock_index
}
+
+# Undo the last line/hunk reverted. When hunks and lines are reverted, a copy
+# of the diff applied is saved. Re-apply that diff to undo the revert.
+#
+# Right now, we only use a single variable to hold the copy, and not a
+# stack/deque for simplicity, so multiple undos are not possible. Maybe this
+# can be added if the need for something like this is felt in the future.
+proc undo_last_revert {} {
+ global last_revert current_diff_path current_diff_header
+ global last_revert_enc
+
+ if {$last_revert eq {}} return
+ if {![lock_index apply_hunk]} return
+
+ set apply_cmd {apply --whitespace=nowarn}
+ set failed_msg [mc "Failed to undo last revert."]
+
+ if {[catch {
+ set enc $last_revert_enc
+ set p [eval git_write $apply_cmd]
+ fconfigure $p -translation binary -encoding $enc
+ puts -nonewline $p $last_revert
+ close $p} err]} {
+ error_popup "$failed_msg\n\n$err"
+ unlock_index
+ return
+ }
+
+ set last_revert {}
+
+ unlock_index
+}
diff --git a/git-gui/lib/index.tcl b/git-gui/lib/index.tcl
index b588db11d9..e07b7a3762 100644
--- a/git-gui/lib/index.tcl
+++ b/git-gui/lib/index.tcl
@@ -466,19 +466,19 @@ proc do_revert_selection {} {
}
proc do_select_commit_type {} {
- global commit_type selected_commit_type
+ global commit_type commit_type_is_amend
- if {$selected_commit_type eq {new}
+ if {$commit_type_is_amend == 0
&& [string match amend* $commit_type]} {
create_new_commit
- } elseif {$selected_commit_type eq {amend}
+ } elseif {$commit_type_is_amend == 1
&& ![string match amend* $commit_type]} {
load_last_commit
# The amend request was rejected...
#
if {![string match amend* $commit_type]} {
- set selected_commit_type new
+ set commit_type_is_amend 0
}
}
}
diff --git a/git-p4.py b/git-p4.py
index 3991e7d1a7..60c73b6a37 100755
--- a/git-p4.py
+++ b/git-p4.py
@@ -1160,13 +1160,11 @@ class LargeFileSystem(object):
if contentsSize <= gitConfigInt('git-p4.largeFileCompressedThreshold'):
return False
contentTempFile = self.generateTempFile(contents)
- compressedContentFile = tempfile.NamedTemporaryFile(prefix='git-p4-large-file', delete=False)
- zf = zipfile.ZipFile(compressedContentFile.name, mode='w')
- zf.write(contentTempFile, compress_type=zipfile.ZIP_DEFLATED)
- zf.close()
- compressedContentsSize = zf.infolist()[0].compress_size
+ compressedContentFile = tempfile.NamedTemporaryFile(prefix='git-p4-large-file', delete=True)
+ with zipfile.ZipFile(compressedContentFile, mode='w') as zf:
+ zf.write(contentTempFile, compress_type=zipfile.ZIP_DEFLATED)
+ compressedContentsSize = zf.infolist()[0].compress_size
os.remove(contentTempFile)
- os.remove(compressedContentFile.name)
if compressedContentsSize > gitConfigInt('git-p4.largeFileCompressedThreshold'):
return True
return False
@@ -3525,8 +3523,9 @@ class P4Sync(Command, P4UserMap):
self.updateOptionDict(details)
try:
self.commit(details, self.extractFilesFromCommit(details), self.branch)
- except IOError:
+ except IOError as err:
print("IO error with git fast-import. Is your git version recent enough?")
+ print("IO error details: {}".format(err))
print(self.gitError.read())
def openStreams(self):
diff --git a/git.c b/git.c
index c1ee7124ed..ce6ab0ece2 100644
--- a/git.c
+++ b/git.c
@@ -369,8 +369,7 @@ static int handle_alias(int *argcp, const char ***argv)
die(_("alias '%s' changes environment variables.\n"
"You can use '!git' in the alias to do this"),
alias_command);
- memmove(new_argv - option_count, new_argv,
- count * sizeof(char *));
+ MOVE_ARRAY(new_argv - option_count, new_argv, count);
new_argv -= option_count;
if (count < 1)
@@ -385,7 +384,7 @@ static int handle_alias(int *argcp, const char ***argv)
REALLOC_ARRAY(new_argv, count + *argcp);
/* insert after command name */
- memcpy(new_argv + count, *argv + 1, sizeof(char *) * *argcp);
+ COPY_ARRAY(new_argv + count, *argv + 1, *argcp);
trace2_cmd_alias(alias_command, new_argv);
trace2_cmd_list_config();
diff --git a/gitk-git/gitk b/gitk-git/gitk
index a14d7a16b2..abe4805ade 100755
--- a/gitk-git/gitk
+++ b/gitk-git/gitk
@@ -3404,6 +3404,8 @@ set rectmask {
}
image create bitmap reficon-H -background black -foreground "#00ff00" \
-data $rectdata -maskdata $rectmask
+image create bitmap reficon-R -background black -foreground "#ffddaa" \
+ -data $rectdata -maskdata $rectmask
image create bitmap reficon-o -background black -foreground "#ddddff" \
-data $rectdata -maskdata $rectmask
@@ -7016,6 +7018,7 @@ proc commit_descriptor {p} {
# append some text to the ctext widget, and make any SHA1 ID
# that we know about be a clickable link.
+# Also look for URLs of the form "http[s]://..." and make them web links.
proc appendwithlinks {text tags} {
global ctext linknum curview
@@ -7032,6 +7035,18 @@ proc appendwithlinks {text tags} {
setlink $linkid link$linknum
incr linknum
}
+ set wlinks [regexp -indices -all -inline -line \
+ {https?://[^[:space:]]+} $text]
+ foreach l $wlinks {
+ set s2 [lindex $l 0]
+ set e2 [lindex $l 1]
+ set url [string range $text $s2 $e2]
+ incr e2
+ $ctext tag delete link$linknum
+ $ctext tag add link$linknum "$start + $s2 c" "$start + $e2 c"
+ setwlink $url link$linknum
+ incr linknum
+ }
}
proc setlink {id lk} {
@@ -7064,6 +7079,18 @@ proc setlink {id lk} {
}
}
+proc setwlink {url lk} {
+ global ctext
+ global linkfgcolor
+ global web_browser
+
+ if {$web_browser eq {}} return
+ $ctext tag conf $lk -foreground $linkfgcolor -underline 1
+ $ctext tag bind $lk <1> [list browseweb $url]
+ $ctext tag bind $lk <Enter> {linkcursor %W 1}
+ $ctext tag bind $lk <Leave> {linkcursor %W -1}
+}
+
proc appendshortlink {id {pre {}} {post {}}} {
global ctext linknum
@@ -7098,6 +7125,16 @@ proc linkcursor {w inc} {
}
}
+proc browseweb {url} {
+ global web_browser
+
+ if {$web_browser eq {}} return
+ # Use eval here in case $web_browser is a command plus some arguments
+ if {[catch {eval exec $web_browser [list $url] &} err]} {
+ error_popup "[mc "Error starting web browser:"] $err"
+ }
+}
+
proc viewnextline {dir} {
global canv linespc
@@ -8191,11 +8228,11 @@ proc parseblobdiffline {ids line} {
} else {
$ctext insert end "$line\n" filesep
}
- } elseif {![string compare -length 3 " >" $line]} {
+ } elseif {$currdiffsubmod != "" && ![string compare -length 3 " >" $line]} {
set $currdiffsubmod ""
set line [encoding convertfrom $diffencoding $line]
$ctext insert end "$line\n" dresult
- } elseif {![string compare -length 3 " <" $line]} {
+ } elseif {$currdiffsubmod != "" && ![string compare -length 3 " <" $line]} {
set $currdiffsubmod ""
set line [encoding convertfrom $diffencoding $line]
$ctext insert end "$line\n" d0
@@ -10022,6 +10059,7 @@ proc sel_reflist {w x y} {
set n [lindex $ref 0]
switch -- [lindex $ref 1] {
"H" {selbyid $headids($n)}
+ "R" {selbyid $headids($n)}
"T" {selbyid $tagids($n)}
"o" {selbyid $otherrefids($n)}
}
@@ -10051,7 +10089,11 @@ proc refill_reflist {} {
foreach n [array names headids] {
if {[string match $reflistfilter $n]} {
if {[commitinview $headids($n) $curview]} {
- lappend refs [list $n H]
+ if {[string match "remotes/*" $n]} {
+ lappend refs [list $n R]
+ } else {
+ lappend refs [list $n H]
+ }
} else {
interestedin $headids($n) {run refill_reflist}
}
@@ -11488,7 +11530,7 @@ proc create_prefs_page {w} {
proc prefspage_general {notebook} {
global NS maxwidth maxgraphpct showneartags showlocalchanges
global tabstop limitdiffs autoselect autosellen extdifftool perfile_attrs
- global hideremotes want_ttk have_ttk maxrefs
+ global hideremotes want_ttk have_ttk maxrefs web_browser
set page [create_prefs_page $notebook.general]
@@ -11539,6 +11581,13 @@ proc prefspage_general {notebook} {
pack configure $page.extdifff.l -padx 10
grid x $page.extdifff $page.extdifft -sticky ew
+ ${NS}::entry $page.webbrowser -textvariable web_browser
+ ${NS}::frame $page.webbrowserf
+ ${NS}::label $page.webbrowserf.l -text [mc "Web browser" ]
+ pack $page.webbrowserf.l -side left
+ pack configure $page.webbrowserf.l -padx 10
+ grid x $page.webbrowserf $page.webbrowser -sticky ew
+
${NS}::label $page.lgen -text [mc "General options"]
grid $page.lgen - -sticky w -pady 10
${NS}::checkbutton $page.want_ttk -variable want_ttk \
@@ -12310,6 +12359,7 @@ if {[tk windowingsystem] eq "win32"} {
set bgcolor SystemWindow
set fgcolor SystemWindowText
set selectbgcolor SystemHighlight
+ set web_browser "cmd /c start"
} else {
set uicolor grey85
set uifgcolor black
@@ -12317,6 +12367,11 @@ if {[tk windowingsystem] eq "win32"} {
set bgcolor white
set fgcolor black
set selectbgcolor gray85
+ if {[tk windowingsystem] eq "aqua"} {
+ set web_browser "open"
+ } else {
+ set web_browser "xdg-open"
+ }
}
set diffcolors {red "#00a000" blue}
set diffcontext 3
@@ -12390,6 +12445,7 @@ set config_variables {
filesepbgcolor filesepfgcolor linehoverbgcolor linehoverfgcolor
linehoveroutlinecolor mainheadcirclecolor workingfilescirclecolor
indexcirclecolor circlecolors linkfgcolor circleoutlinecolor
+ web_browser
}
foreach var $config_variables {
config_init_trace $var
diff --git a/gitk-git/po/zh_cn.po b/gitk-git/po/zh_cn.po
new file mode 100644
index 0000000000..17b7f899da
--- /dev/null
+++ b/gitk-git/po/zh_cn.po
@@ -0,0 +1,1367 @@
+# Translation of gitk to Simplified Chinese.
+#
+# Translators:
+# YanKe <imyanke@163.com>, 2017
+
+msgid ""
+msgstr ""
+"Project-Id-Version: Git Chinese Localization Project\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2017-02-28 23:11+0800\n"
+"PO-Revision-Date: 2017-03-11 02:27+0800\n"
+"Last-Translator: YanKe <imyanke@163.com>\n"
+"Language-Team: Chinese\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: zh_CN\n"
+
+#: gitk:140
+msgid "Couldn't get list of unmerged files:"
+msgstr "不能获取未合并文件列表:"
+
+#: gitk:212 gitk:2403
+msgid "Color words"
+msgstr "着色显示差异"
+
+#: gitk:217 gitk:2403 gitk:8249 gitk:8282
+msgid "Markup words"
+msgstr "标记显示差异"
+
+#: gitk:324
+msgid "Error parsing revisions:"
+msgstr "解析版本错误:"
+
+#: gitk:380
+msgid "Error executing --argscmd command:"
+msgstr "运行 --argscmd命令出错"
+
+#: gitk:393
+msgid "No files selected: --merge specified but no files are unmerged."
+msgstr "没有选中文件:--指定merge参数但没有未合并的文件。"
+
+#: gitk:396
+msgid ""
+"No files selected: --merge specified but no unmerged files are within file "
+"limit."
+msgstr "没有选中文件:--指定merge参数但没有未合并的文件在文件中"
+
+#: gitk:418 gitk:566
+msgid "Error executing git log:"
+msgstr "执行git log命令出错:"
+
+#: gitk:436 gitk:582
+msgid "Reading"
+msgstr "读取中"
+
+#: gitk:496 gitk:4549
+msgid "Reading commits..."
+msgstr "提交记录读取中..."
+
+#: gitk:499 gitk:1641 gitk:4552
+msgid "No commits selected"
+msgstr "未选中任何提交"
+
+#: gitk:1449 gitk:4069 gitk:12583
+msgid "Command line"
+msgstr "命令行"
+
+#: gitk:1515
+msgid "Can't parse git log output:"
+msgstr "不能解析git log输出:"
+
+#: gitk:1744
+msgid "No commit information available"
+msgstr "无可用提交信息"
+
+#: gitk:1907 gitk:1936 gitk:4339 gitk:9789 gitk:11388 gitk:11668
+msgid "OK"
+msgstr "确定"
+
+#: gitk:1938 gitk:4341 gitk:9225 gitk:9304 gitk:9434 gitk:9520 gitk:9791
+#: gitk:11389 gitk:11669
+msgid "Cancel"
+msgstr "取消"
+
+#: gitk:2087
+msgid "&Update"
+msgstr "更新"
+
+#: gitk:2088
+msgid "&Reload"
+msgstr "重新加载"
+
+#: gitk:2089
+msgid "Reread re&ferences"
+msgstr "重新读取引用"
+
+#: gitk:2090
+msgid "&List references"
+msgstr "列出引用(分支以及tag)"
+
+#: gitk:2092
+msgid "Start git &gui"
+msgstr "启动git gui客户端"
+
+#: gitk:2094
+msgid "&Quit"
+msgstr "退出"
+
+#: gitk:2086
+msgid "&File"
+msgstr "文件"
+
+#: gitk:2098
+msgid "&Preferences"
+msgstr "偏好设置"
+
+#: gitk:2097
+msgid "&Edit"
+msgstr "编辑"
+
+#: gitk:2102
+msgid "&New view..."
+msgstr "新视图..."
+
+#: gitk:2103
+msgid "&Edit view..."
+msgstr "编辑视图..."
+
+#: gitk:2104
+msgid "&Delete view"
+msgstr "删除视图"
+
+#: gitk:2106
+msgid "&All files"
+msgstr "所有文件"
+
+#: gitk:2101
+msgid "&View"
+msgstr "视图"
+
+#: gitk:2111 gitk:2121
+msgid "&About gitk"
+msgstr "关于gitk"
+
+#: gitk:2112 gitk:2126
+msgid "&Key bindings"
+msgstr "快捷键"
+
+#: gitk:2110 gitk:2125
+msgid "&Help"
+msgstr "帮助"
+
+#: gitk:2203 gitk:8681
+msgid "SHA1 ID:"
+msgstr "SHA1 ID:"
+
+#: gitk:2247
+msgid "Row"
+msgstr "行"
+
+#: gitk:2285
+msgid "Find"
+msgstr "查找"
+
+#: gitk:2313
+msgid "commit"
+msgstr "提交"
+
+#: gitk:2317 gitk:2319 gitk:4711 gitk:4734 gitk:4758 gitk:6779 gitk:6851
+#: gitk:6936
+msgid "containing:"
+msgstr "包含:"
+
+#: gitk:2320 gitk:3550 gitk:3555 gitk:4787
+msgid "touching paths:"
+msgstr "影响路径:"
+
+#: gitk:2321 gitk:4801
+msgid "adding/removing string:"
+msgstr "增加/删除字符串:"
+
+#: gitk:2322 gitk:4803
+msgid "changing lines matching:"
+msgstr "改变行匹配:"
+
+#: gitk:2331 gitk:2333 gitk:4790
+msgid "Exact"
+msgstr "精确匹配"
+
+#: gitk:2333 gitk:4878 gitk:6747
+msgid "IgnCase"
+msgstr "忽略大小写"
+
+#: gitk:2333 gitk:4760 gitk:4876 gitk:6743
+msgid "Regexp"
+msgstr "正则"
+
+#: gitk:2335 gitk:2336 gitk:4898 gitk:4928 gitk:4935 gitk:6872 gitk:6940
+msgid "All fields"
+msgstr "所有字段"
+
+#: gitk:2336 gitk:4895 gitk:4928 gitk:6810
+msgid "Headline"
+msgstr "标题"
+
+#: gitk:2337 gitk:4895 gitk:6810 gitk:6940 gitk:7413
+msgid "Comments"
+msgstr "提交注释"
+
+#: gitk:2337 gitk:4895 gitk:4900 gitk:4935 gitk:6810 gitk:7348 gitk:8859
+#: gitk:8874
+msgid "Author"
+msgstr "作者"
+
+#: gitk:2337 gitk:4895 gitk:6810 gitk:7350
+msgid "Committer"
+msgstr "提交者"
+
+#: gitk:2371
+msgid "Search"
+msgstr "搜索"
+
+#: gitk:2379
+msgid "Diff"
+msgstr "差异"
+
+#: gitk:2381
+msgid "Old version"
+msgstr "老版本"
+
+#: gitk:2383
+msgid "New version"
+msgstr "新版本"
+
+#: gitk:2386
+msgid "Lines of context"
+msgstr "Diff上下文显示行数"
+
+#: gitk:2396
+msgid "Ignore space change"
+msgstr "忽略空格修改"
+
+#: gitk:2400 gitk:2402 gitk:7983 gitk:8235
+msgid "Line diff"
+msgstr "按行显示差异"
+
+#: gitk:2467
+msgid "Patch"
+msgstr "补丁"
+
+#: gitk:2469
+msgid "Tree"
+msgstr "树"
+
+#: gitk:2639 gitk:2660
+msgid "Diff this -> selected"
+msgstr "比较从当前提交到选中提交的差异"
+
+#: gitk:2640 gitk:2661
+msgid "Diff selected -> this"
+msgstr "比较从选中提交到当前提交的差异"
+
+#: gitk:2641 gitk:2662
+msgid "Make patch"
+msgstr "制作补丁"
+
+#: gitk:2642 gitk:9283
+msgid "Create tag"
+msgstr "创建tag"
+
+#: gitk:2643
+msgid "Copy commit summary"
+msgstr "复制提交摘要"
+
+#: gitk:2644 gitk:9414
+msgid "Write commit to file"
+msgstr "写入提交到文件"
+
+#: gitk:2645
+msgid "Create new branch"
+msgstr "创建新分支"
+
+#: gitk:2646
+msgid "Cherry-pick this commit"
+msgstr "在此提交运用补丁(cherry-pick)命令"
+
+#: gitk:2647
+msgid "Reset HEAD branch to here"
+msgstr "将分支头(HEAD)重置到此处"
+
+#: gitk:2648
+msgid "Mark this commit"
+msgstr "标记此提交"
+
+#: gitk:2649
+msgid "Return to mark"
+msgstr "返回到标记"
+
+#: gitk:2650
+msgid "Find descendant of this and mark"
+msgstr "查找本次提交的子提交并标记"
+
+#: gitk:2651
+msgid "Compare with marked commit"
+msgstr "和已标记的提交作比较"
+
+#: gitk:2652 gitk:2663
+msgid "Diff this -> marked commit"
+msgstr "比较从当前提交到已标记提交的差异"
+
+#: gitk:2653 gitk:2664
+msgid "Diff marked commit -> this"
+msgstr "比较从已标记提交到当前提交的差异"
+
+#: gitk:2654
+msgid "Revert this commit"
+msgstr "撤销(revert)此提交"
+
+#: gitk:2670
+msgid "Check out this branch"
+msgstr "检出(checkout)此分支"
+
+#: gitk:2671
+msgid "Rename this branch"
+msgstr "重命名(Rename)此分支"
+
+#: gitk:2672
+msgid "Remove this branch"
+msgstr "删除(Remove)此分支"
+
+#: gitk:2673
+msgid "Copy branch name"
+msgstr "复制分支名称"
+
+#: gitk:2680
+msgid "Highlight this too"
+msgstr "高亮此处"
+
+#: gitk:2681
+msgid "Highlight this only"
+msgstr "只高亮此处"
+
+#: gitk:2682
+msgid "External diff"
+msgstr "外部diff"
+
+#: gitk:2683
+msgid "Blame parent commit"
+msgstr "Blame父提交"
+
+#: gitk:2684
+msgid "Copy path"
+msgstr "复制路径"
+
+#: gitk:2691
+msgid "Show origin of this line"
+msgstr "显示此行原始提交"
+
+#: gitk:2692
+msgid "Run git gui blame on this line"
+msgstr "在此行运行git gui客户端的blame"
+
+#: gitk:3036
+msgid "About gitk"
+msgstr "关于gitk"
+
+#: gitk:3038
+msgid ""
+"\n"
+"Gitk - a commit viewer for git\n"
+"\n"
+"Copyright © 2005-2016 Paul Mackerras\n"
+"\n"
+"Use and redistribute under the terms of the GNU General Public License"
+msgstr "\nGitk — 一个git的提交查看器\n\n© 2005-2016 Paul Mackerras\n\n在GNU许可证下使用以及分发"
+
+#: gitk:3046 gitk:3113 gitk:10004
+msgid "Close"
+msgstr "关闭"
+
+#: gitk:3067
+msgid "Gitk key bindings"
+msgstr "Gitk快捷键"
+
+#: gitk:3070
+msgid "Gitk key bindings:"
+msgstr "Gitk快捷键:"
+
+#: gitk:3072
+#, tcl-format
+msgid "<%s-Q>\t\tQuit"
+msgstr "<%s-Q>\t\t退出"
+
+#: gitk:3073
+#, tcl-format
+msgid "<%s-W>\t\tClose window"
+msgstr "<%s-W>\t\t关闭窗口"
+
+#: gitk:3074
+msgid "<Home>\t\tMove to first commit"
+msgstr "<Home>\t\t移动到第一次提交"
+
+#: gitk:3075
+msgid "<End>\t\tMove to last commit"
+msgstr "<End>\t\t移动到最后一次提交"
+
+#: gitk:3076
+msgid "<Up>, p, k\tMove up one commit"
+msgstr "<Up>, p, k\t移动到上一次提交"
+
+#: gitk:3077
+msgid "<Down>, n, j\tMove down one commit"
+msgstr "<Down>, n, j\t移动到下一次提交"
+
+#: gitk:3078
+msgid "<Left>, z, h\tGo back in history list"
+msgstr "<Left>, z, h\t历史列表的上一项"
+
+#: gitk:3079
+msgid "<Right>, x, l\tGo forward in history list"
+msgstr "<Right>, x, l\t历史列表的下一项"
+
+#: gitk:3080
+#, tcl-format
+msgid "<%s-n>\tGo to n-th parent of current commit in history list"
+msgstr "<%s-n>\t在历史列表中前往本次提交的第n个父提交"
+
+#: gitk:3081
+msgid "<PageUp>\tMove up one page in commit list"
+msgstr "<PageUp>\t上一页提交列表"
+
+#: gitk:3082
+msgid "<PageDown>\tMove down one page in commit list"
+msgstr "<PageDown>\t下一页提交列表"
+
+#: gitk:3083
+#, tcl-format
+msgid "<%s-Home>\tScroll to top of commit list"
+msgstr "<%s-Home>\t滚动到提交列表顶部"
+
+#: gitk:3084
+#, tcl-format
+msgid "<%s-End>\tScroll to bottom of commit list"
+msgstr "<%s-End>\t滚动到提交列表底部"
+
+#: gitk:3085
+#, tcl-format
+msgid "<%s-Up>\tScroll commit list up one line"
+msgstr "<%s-Up>\t向上滚动一行提交列表"
+
+#: gitk:3086
+#, tcl-format
+msgid "<%s-Down>\tScroll commit list down one line"
+msgstr "<%s-Down>\t向下滚动一行提交列表"
+
+#: gitk:3087
+#, tcl-format
+msgid "<%s-PageUp>\tScroll commit list up one page"
+msgstr "<%s-PageUp>\t向上滚动一页提交列表"
+
+#: gitk:3088
+#, tcl-format
+msgid "<%s-PageDown>\tScroll commit list down one page"
+msgstr "<%s-PageDown>\t向下滚动一页提交列表"
+
+#: gitk:3089
+msgid "<Shift-Up>\tFind backwards (upwards, later commits)"
+msgstr "<Shift-Up>\t向后查找(向上的,更晚的提交)"
+
+#: gitk:3090
+msgid "<Shift-Down>\tFind forwards (downwards, earlier commits)"
+msgstr "<Shift-Down>\t向前查找(向下的,更早的提交)"
+
+#: gitk:3091
+msgid "<Delete>, b\tScroll diff view up one page"
+msgstr "<Delete>, b\t向上滚动diff视图一页"
+
+#: gitk:3092
+msgid "<Backspace>\tScroll diff view up one page"
+msgstr "<Backspace>\t向上滚动diff视图一页"
+
+#: gitk:3093
+msgid "<Space>\t\tScroll diff view down one page"
+msgstr "<Space>\t\t向下滚动diff视图一页"
+
+#: gitk:3094
+msgid "u\t\tScroll diff view up 18 lines"
+msgstr "u\t\t向上滚动diff视图18行"
+
+#: gitk:3095
+msgid "d\t\tScroll diff view down 18 lines"
+msgstr "d\t\t向下滚动diff视图18行"
+
+#: gitk:3096
+#, tcl-format
+msgid "<%s-F>\t\tFind"
+msgstr "<%s-F>\t\t查找"
+
+#: gitk:3097
+#, tcl-format
+msgid "<%s-G>\t\tMove to next find hit"
+msgstr "<%s-G>\t\t移动到下一次查找命中"
+
+#: gitk:3098
+msgid "<Return>\tMove to next find hit"
+msgstr "<Return>\t\t移动到下一次查找命中"
+
+#: gitk:3099
+msgid "g\t\tGo to commit"
+msgstr "g\t\t转到提交"
+
+#: gitk:3100
+msgid "/\t\tFocus the search box"
+msgstr "/\t\t选中搜索框"
+
+#: gitk:3101
+msgid "?\t\tMove to previous find hit"
+msgstr "?\t\t移动到上一次查找命中"
+
+#: gitk:3102
+msgid "f\t\tScroll diff view to next file"
+msgstr "f\t\t滚动diff视图到下一个文件"
+
+#: gitk:3103
+#, tcl-format
+msgid "<%s-S>\t\tSearch for next hit in diff view"
+msgstr "<%s-S>\t\t在diff视图中查找下一此命中"
+
+#: gitk:3104
+#, tcl-format
+msgid "<%s-R>\t\tSearch for previous hit in diff view"
+msgstr "<%s-R>\t\t在diff视图中查找上一次命中"
+
+#: gitk:3105
+#, tcl-format
+msgid "<%s-KP+>\tIncrease font size"
+msgstr "<%s-KP+>\t增大字体大小"
+
+#: gitk:3106
+#, tcl-format
+msgid "<%s-plus>\tIncrease font size"
+msgstr "<%s-plus>\t增大字体大小"
+
+#: gitk:3107
+#, tcl-format
+msgid "<%s-KP->\tDecrease font size"
+msgstr "<%s-KP->\t减小字体大小"
+
+#: gitk:3108
+#, tcl-format
+msgid "<%s-minus>\tDecrease font size"
+msgstr "<%s-minus>\t减小字体大小"
+
+#: gitk:3109
+msgid "<F5>\t\tUpdate"
+msgstr "<F5>\t\t更新"
+
+#: gitk:3574 gitk:3583
+#, tcl-format
+msgid "Error creating temporary directory %s:"
+msgstr "创建临时目录出错%s:"
+
+#: gitk:3596
+#, tcl-format
+msgid "Error getting \"%s\" from %s:"
+msgstr "从%s获取\"%s\"出错:"
+
+#: gitk:3659
+msgid "command failed:"
+msgstr "执行命令失败:"
+
+#: gitk:3808
+msgid "No such commit"
+msgstr "无此提交"
+
+#: gitk:3822
+msgid "git gui blame: command failed:"
+msgstr "git gui blame:执行命令失败:"
+
+#: gitk:3853
+#, tcl-format
+msgid "Couldn't read merge head: %s"
+msgstr "不能读取合并头(merge head):%s"
+
+#: gitk:3861
+#, tcl-format
+msgid "Error reading index: %s"
+msgstr "读取索引出错:%s"
+
+#: gitk:3886
+#, tcl-format
+msgid "Couldn't start git blame: %s"
+msgstr "不能执行git blame:%s"
+
+#: gitk:3889 gitk:6778
+msgid "Searching"
+msgstr "搜索中"
+
+#: gitk:3921
+#, tcl-format
+msgid "Error running git blame: %s"
+msgstr "运行git blame出错:%s"
+
+#: gitk:3949
+#, tcl-format
+msgid "That line comes from commit %s, which is not in this view"
+msgstr "此行来自提交%s,不在此视图中"
+
+#: gitk:3963
+msgid "External diff viewer failed:"
+msgstr "外部diff查看器失败:"
+
+#: gitk:4067
+msgid "All files"
+msgstr "所有文件"
+
+#: gitk:4091
+msgid "View"
+msgstr "视图"
+
+#: gitk:4094
+msgid "Gitk view definition"
+msgstr "Gitk视图定义"
+
+#: gitk:4098
+msgid "Remember this view"
+msgstr "记住此视图"
+
+#: gitk:4099
+msgid "References (space separated list):"
+msgstr "引用(空格切分的列表):"
+
+#: gitk:4100
+msgid "Branches & tags:"
+msgstr "分支和tags"
+
+#: gitk:4101
+msgid "All refs"
+msgstr "所有引用"
+
+#: gitk:4102
+msgid "All (local) branches"
+msgstr "所有(本地)分支"
+
+#: gitk:4103
+msgid "All tags"
+msgstr "所有tag"
+
+#: gitk:4104
+msgid "All remote-tracking branches"
+msgstr "所有远程跟踪分支"
+
+#: gitk:4105
+msgid "Commit Info (regular expressions):"
+msgstr "提交信息 (正则表达式):"
+
+#: gitk:4106
+msgid "Author:"
+msgstr "作者:"
+
+#: gitk:4107
+msgid "Committer:"
+msgstr "提交者:"
+
+#: gitk:4108
+msgid "Commit Message:"
+msgstr "提交信息:"
+
+#: gitk:4109
+msgid "Matches all Commit Info criteria"
+msgstr "匹配所有提交信息标准"
+
+#: gitk:4110
+msgid "Matches no Commit Info criteria"
+msgstr "匹配无提交信息标准"
+
+#: gitk:4111
+msgid "Changes to Files:"
+msgstr "文件修改列表:"
+
+#: gitk:4112
+msgid "Fixed String"
+msgstr "固定字符串"
+
+#: gitk:4113
+msgid "Regular Expression"
+msgstr "正则表达式:"
+
+#: gitk:4114
+msgid "Search string:"
+msgstr "搜索字符串:"
+
+#: gitk:4115
+msgid ""
+"Commit Dates (\"2 weeks ago\", \"2009-03-17 15:27:38\", \"March 17, 2009 "
+"15:27:38\"):"
+msgstr "提交日期 (\"2星期之前\", \"2009-03-17 15:27:38\", \"5月 17, 2009 15:27:38\"):"
+
+#: gitk:4116
+msgid "Since:"
+msgstr "自:"
+
+#: gitk:4117
+msgid "Until:"
+msgstr "到:"
+
+#: gitk:4118
+msgid "Limit and/or skip a number of revisions (positive integer):"
+msgstr "限制 且/或 跳过一定数量的版本(正整数):"
+
+#: gitk:4119
+msgid "Number to show:"
+msgstr "显示数量:"
+
+#: gitk:4120
+msgid "Number to skip:"
+msgstr "跳过数量:"
+
+#: gitk:4121
+msgid "Miscellaneous options:"
+msgstr "其他选项:"
+
+#: gitk:4122
+msgid "Strictly sort by date"
+msgstr "严格按日期整理"
+
+#: gitk:4123
+msgid "Mark branch sides"
+msgstr "标记分支边界"
+
+#: gitk:4124
+msgid "Limit to first parent"
+msgstr "限制到第一个父提交"
+
+#: gitk:4125
+msgid "Simple history"
+msgstr "简易历史"
+
+#: gitk:4126
+msgid "Additional arguments to git log:"
+msgstr "git log命令的额外参数:"
+
+#: gitk:4127
+msgid "Enter files and directories to include, one per line:"
+msgstr "输入文件和文件夹来引用,每行一个:"
+
+#: gitk:4128
+msgid "Command to generate more commits to include:"
+msgstr "命令产生更多的提交来引用:"
+
+#: gitk:4252
+msgid "Gitk: edit view"
+msgstr "Gitk: 编辑视图"
+
+#: gitk:4260
+msgid "-- criteria for selecting revisions"
+msgstr "-- 用来选择版本的规则"
+
+#: gitk:4265
+msgid "View Name"
+msgstr "视图名称"
+
+#: gitk:4340
+msgid "Apply (F5)"
+msgstr "应用(F5)"
+
+#: gitk:4378
+msgid "Error in commit selection arguments:"
+msgstr "提交选择参数错误:"
+
+#: gitk:4433 gitk:4486 gitk:4948 gitk:4962 gitk:6232 gitk:12524 gitk:12525
+msgid "None"
+msgstr "无"
+
+#: gitk:5045 gitk:5050
+msgid "Descendant"
+msgstr "子提交"
+
+#: gitk:5046
+msgid "Not descendant"
+msgstr "非子提交"
+
+#: gitk:5053 gitk:5058
+msgid "Ancestor"
+msgstr "父提交"
+
+#: gitk:5054
+msgid "Not ancestor"
+msgstr "非父提交"
+
+#: gitk:5348
+msgid "Local changes checked in to index but not committed"
+msgstr "已添加到索引但未提交的修改"
+
+#: gitk:5384
+msgid "Local uncommitted changes, not checked in to index"
+msgstr "未添加到索引且未提交的修改"
+
+#: gitk:7158
+msgid "and many more"
+msgstr "更多"
+
+#: gitk:7161
+msgid "many"
+msgstr "很多"
+
+#: gitk:7352
+msgid "Tags:"
+msgstr "Tags:"
+
+#: gitk:7369 gitk:7375 gitk:8854
+msgid "Parent"
+msgstr "父节点"
+
+#: gitk:7380
+msgid "Child"
+msgstr "子节点"
+
+#: gitk:7389
+msgid "Branch"
+msgstr "分支"
+
+#: gitk:7392
+msgid "Follows"
+msgstr "之后的tag"
+
+#: gitk:7395
+msgid "Precedes"
+msgstr "之前的tag"
+
+#: gitk:7990
+#, tcl-format
+msgid "Error getting diffs: %s"
+msgstr "获取差异错误:%s"
+
+#: gitk:8679
+msgid "Goto:"
+msgstr "转到:"
+
+#: gitk:8700
+#, tcl-format
+msgid "Short SHA1 id %s is ambiguous"
+msgstr "短格式的SHA1提交号%s不明确、有歧义"
+
+#: gitk:8707
+#, tcl-format
+msgid "Revision %s is not known"
+msgstr "版本%s未知"
+
+#: gitk:8717
+#, tcl-format
+msgid "SHA1 id %s is not known"
+msgstr "提交号(SHA1 id)%s未知"
+
+#: gitk:8719
+#, tcl-format
+msgid "Revision %s is not in the current view"
+msgstr "版本%s不在当前视图中"
+
+#: gitk:8861 gitk:8876
+msgid "Date"
+msgstr "日期"
+
+#: gitk:8864
+msgid "Children"
+msgstr "子节点"
+
+#: gitk:8927
+#, tcl-format
+msgid "Reset %s branch to here"
+msgstr "重置分支%s到此处"
+
+#: gitk:8929
+msgid "Detached head: can't reset"
+msgstr "分离的头(head):不能重置(reset)"
+
+#: gitk:9034 gitk:9040
+msgid "Skipping merge commit "
+msgstr "跳过合并提交"
+
+#: gitk:9049 gitk:9054
+msgid "Error getting patch ID for "
+msgstr "获取补丁ID出错"
+
+#: gitk:9050 gitk:9055
+msgid " - stopping\n"
+msgstr " — 停止中\n"
+
+#: gitk:9060 gitk:9063 gitk:9071 gitk:9085 gitk:9094
+msgid "Commit "
+msgstr "提交"
+
+#: gitk:9064
+msgid ""
+" is the same patch as\n"
+" "
+msgstr " 是相同的补丁(patch)\n "
+
+#: gitk:9072
+msgid ""
+" differs from\n"
+" "
+msgstr " 差异来自\n "
+
+#: gitk:9074
+msgid ""
+"Diff of commits:\n"
+"\n"
+msgstr "提交的差异(Diff):\n\n"
+
+#: gitk:9086 gitk:9095
+#, tcl-format
+msgid " has %s children - stopping\n"
+msgstr "有%s子节点 — 停止中\n"
+
+#: gitk:9114
+#, tcl-format
+msgid "Error writing commit to file: %s"
+msgstr "写入提交到文件出错:%s"
+
+#: gitk:9120
+#, tcl-format
+msgid "Error diffing commits: %s"
+msgstr "比较提交差异出错:%s"
+
+#: gitk:9166
+msgid "Top"
+msgstr "顶部"
+
+#: gitk:9167
+msgid "From"
+msgstr "从"
+
+#: gitk:9172
+msgid "To"
+msgstr "到"
+
+#: gitk:9196
+msgid "Generate patch"
+msgstr "生成补丁(patch)"
+
+#: gitk:9198
+msgid "From:"
+msgstr "从:"
+
+#: gitk:9207
+msgid "To:"
+msgstr "到:"
+
+#: gitk:9216
+msgid "Reverse"
+msgstr "反向(Reverse)"
+
+#: gitk:9218 gitk:9428
+msgid "Output file:"
+msgstr "输出文件:"
+
+#: gitk:9224
+msgid "Generate"
+msgstr "生成"
+
+#: gitk:9262
+msgid "Error creating patch:"
+msgstr "创建补丁(patch)出错:"
+
+#: gitk:9285 gitk:9416 gitk:9504
+msgid "ID:"
+msgstr "ID:"
+
+#: gitk:9294
+msgid "Tag name:"
+msgstr "Tag名称:"
+
+#: gitk:9297
+msgid "Tag message is optional"
+msgstr "Tag信息是可选的"
+
+#: gitk:9299
+msgid "Tag message:"
+msgstr "Tag信息:"
+
+#: gitk:9303 gitk:9474
+msgid "Create"
+msgstr "创建"
+
+#: gitk:9321
+msgid "No tag name specified"
+msgstr "未指定tag名称"
+
+#: gitk:9325
+#, tcl-format
+msgid "Tag \"%s\" already exists"
+msgstr "Tag\"%s\"已经存在"
+
+#: gitk:9335
+msgid "Error creating tag:"
+msgstr "创建tag出错:"
+
+#: gitk:9425
+msgid "Command:"
+msgstr "命令:"
+
+#: gitk:9433
+msgid "Write"
+msgstr "写入"
+
+#: gitk:9451
+msgid "Error writing commit:"
+msgstr "写入提交出错:"
+
+#: gitk:9473
+msgid "Create branch"
+msgstr "创建分支"
+
+#: gitk:9489
+#, tcl-format
+msgid "Rename branch %s"
+msgstr "重命名分支%s"
+
+#: gitk:9490
+msgid "Rename"
+msgstr "重命名"
+
+#: gitk:9514
+msgid "Name:"
+msgstr "名称:"
+
+#: gitk:9538
+msgid "Please specify a name for the new branch"
+msgstr "请指定新分支的名称"
+
+#: gitk:9543
+#, tcl-format
+msgid "Branch '%s' already exists. Overwrite?"
+msgstr "分支\"%s\"已经存在。覆盖它?"
+
+#: gitk:9587
+msgid "Please specify a new name for the branch"
+msgstr "请重新指定新分支的名称"
+
+#: gitk:9650
+#, tcl-format
+msgid "Commit %s is already included in branch %s -- really re-apply it?"
+msgstr "提交%s已经存在于分支%s。确定重新应用它?"
+
+#: gitk:9655
+msgid "Cherry-picking"
+msgstr "打补丁中(Cherry-picking)"
+
+#: gitk:9664
+#, tcl-format
+msgid ""
+"Cherry-pick failed because of local changes to file '%s'.\n"
+"Please commit, reset or stash your changes and try again."
+msgstr "打补丁(Cherry-pick)失败,因为本地修改了文件\"%s\"。\n请提交(commit)、重置(reset)或暂存(stash)修改后重试。"
+
+#: gitk:9670
+msgid ""
+"Cherry-pick failed because of merge conflict.\n"
+"Do you wish to run git citool to resolve it?"
+msgstr "打补丁(Cherry-pick)失败因为合并冲突。\n你是否希望运行git citool 来解决冲突?"
+
+#: gitk:9686 gitk:9744
+msgid "No changes committed"
+msgstr "无已经提交的修改"
+
+#: gitk:9713
+#, tcl-format
+msgid "Commit %s is not included in branch %s -- really revert it?"
+msgstr "提交%s不包含在分支%s中,确认回滚(revert)它?"
+
+#: gitk:9718
+msgid "Reverting"
+msgstr "回滚中(Reverting)"
+
+#: gitk:9726
+#, tcl-format
+msgid ""
+"Revert failed because of local changes to the following files:%s Please "
+"commit, reset or stash your changes and try again."
+msgstr "回滚(revert)失败,因为如下的本地文件修改:%s\n请提交(commit)、重置(reset)或者暂存(stash)改变后重试。"
+
+#: gitk:9730
+msgid ""
+"Revert failed because of merge conflict.\n"
+" Do you wish to run git citool to resolve it?"
+msgstr "回滚(revert)失败,因为合并冲突。\n你是否希望运行git citool来解决冲突?"
+
+#: gitk:9773
+msgid "Confirm reset"
+msgstr "确认重置(reset)"
+
+#: gitk:9775
+#, tcl-format
+msgid "Reset branch %s to %s?"
+msgstr "重置(reset)分支%s到%s?"
+
+#: gitk:9777
+msgid "Reset type:"
+msgstr "重置(reset)类型:"
+
+#: gitk:9780
+msgid "Soft: Leave working tree and index untouched"
+msgstr "软性:离开工作树,索引未改变"
+
+#: gitk:9783
+msgid "Mixed: Leave working tree untouched, reset index"
+msgstr "混合:离开工作树(未改变),索引重置"
+
+#: gitk:9786
+msgid ""
+"Hard: Reset working tree and index\n"
+"(discard ALL local changes)"
+msgstr "硬性:重置工作树和索引\n(丢弃所有的本地修改)"
+
+#: gitk:9803
+msgid "Resetting"
+msgstr "重置中(Resetting)"
+
+#: gitk:9876
+#, tcl-format
+msgid "A local branch named %s exists already"
+msgstr "本地分支%s已经存在"
+
+#: gitk:9884
+msgid "Checking out"
+msgstr "检出中(Checking out)"
+
+#: gitk:9943
+msgid "Cannot delete the currently checked-out branch"
+msgstr "不能删除当前检出(checkout)分支"
+
+#: gitk:9949
+#, tcl-format
+msgid ""
+"The commits on branch %s aren't on any other branch.\n"
+"Really delete branch %s?"
+msgstr "在分支%s上的提交不在其他任何分支上。\n确认删除分支%s?"
+
+#: gitk:9980
+#, tcl-format
+msgid "Tags and heads: %s"
+msgstr "Tags和头指针(heads):%s"
+
+#: gitk:9997
+msgid "Filter"
+msgstr "过滤器"
+
+#: gitk:10293
+msgid ""
+"Error reading commit topology information; branch and preceding/following "
+"tag information will be incomplete."
+msgstr "读取提交拓扑信息出错;分支和之前/之后的tag信息将不能完成。"
+
+#: gitk:11270
+msgid "Tag"
+msgstr "标签(Tag)"
+
+#: gitk:11274
+msgid "Id"
+msgstr "Id"
+
+#: gitk:11357
+msgid "Gitk font chooser"
+msgstr "Gitk字体选择"
+
+#: gitk:11374
+msgid "B"
+msgstr "粗体"
+
+#: gitk:11377
+msgid "I"
+msgstr "斜体"
+
+#: gitk:11495
+msgid "Commit list display options"
+msgstr "提交列表展示选项"
+
+#: gitk:11498
+msgid "Maximum graph width (lines)"
+msgstr "最大图宽度(行数)"
+
+#: gitk:11502
+#, no-tcl-format
+msgid "Maximum graph width (% of pane)"
+msgstr "最大图宽度(%窗口百分比)"
+
+#: gitk:11505
+msgid "Show local changes"
+msgstr "显示本地修改"
+
+#: gitk:11508
+msgid "Auto-select SHA1 (length)"
+msgstr "自动选择SHA1(长度)"
+
+#: gitk:11512
+msgid "Hide remote refs"
+msgstr "隐藏远程引用"
+
+#: gitk:11516
+msgid "Diff display options"
+msgstr "差异(Diff)展示选项"
+
+#: gitk:11518
+msgid "Tab spacing"
+msgstr "制表符宽度"
+
+#: gitk:11521
+msgid "Display nearby tags/heads"
+msgstr "显示临近的tags/heads"
+
+#: gitk:11524
+msgid "Maximum # tags/heads to show"
+msgstr "最大tags/heads展示数量"
+
+#: gitk:11527
+msgid "Limit diffs to listed paths"
+msgstr "diff中列出文件限制"
+
+#: gitk:11530
+msgid "Support per-file encodings"
+msgstr "单独文件编码支持"
+
+#: gitk:11536 gitk:11683
+msgid "External diff tool"
+msgstr "外部差异(diff)工具"
+
+#: gitk:11537
+msgid "Choose..."
+msgstr "选择..."
+
+#: gitk:11542
+msgid "General options"
+msgstr "常规选项"
+
+#: gitk:11545
+msgid "Use themed widgets"
+msgstr "使用主题小部件"
+
+#: gitk:11547
+msgid "(change requires restart)"
+msgstr "(需重启生效)"
+
+#: gitk:11549
+msgid "(currently unavailable)"
+msgstr "(当前不可用)"
+
+#: gitk:11560
+msgid "Colors: press to choose"
+msgstr "颜色:点击来选择"
+
+#: gitk:11563
+msgid "Interface"
+msgstr "界面"
+
+#: gitk:11564
+msgid "interface"
+msgstr "界面"
+
+#: gitk:11567
+msgid "Background"
+msgstr "背景"
+
+#: gitk:11568 gitk:11598
+msgid "background"
+msgstr "背景"
+
+#: gitk:11571
+msgid "Foreground"
+msgstr "前景"
+
+#: gitk:11572
+msgid "foreground"
+msgstr "前景"
+
+#: gitk:11575
+msgid "Diff: old lines"
+msgstr "差异(Diff):老代码行"
+
+#: gitk:11576
+msgid "diff old lines"
+msgstr "差异(diff)老代码行"
+
+#: gitk:11580
+msgid "Diff: new lines"
+msgstr "差异(Diff):新代码行"
+
+#: gitk:11581
+msgid "diff new lines"
+msgstr "差异(diff)新代码行"
+
+#: gitk:11585
+msgid "Diff: hunk header"
+msgstr "差异(Diff):补丁片段头信息"
+
+#: gitk:11587
+msgid "diff hunk header"
+msgstr "差异(diff)补丁片段头信息"
+
+#: gitk:11591
+msgid "Marked line bg"
+msgstr "已标记代码行背景"
+
+#: gitk:11593
+msgid "marked line background"
+msgstr "已标记代码行背景"
+
+#: gitk:11597
+msgid "Select bg"
+msgstr "选择背景"
+
+#: gitk:11606
+msgid "Fonts: press to choose"
+msgstr "字体:点击来选择"
+
+#: gitk:11608
+msgid "Main font"
+msgstr "主字体"
+
+#: gitk:11609
+msgid "Diff display font"
+msgstr "差异(Diff)显示字体"
+
+#: gitk:11610
+msgid "User interface font"
+msgstr "用户界面字体"
+
+#: gitk:11632
+msgid "Gitk preferences"
+msgstr "Gitk偏好设置"
+
+#: gitk:11641
+msgid "General"
+msgstr "常规"
+
+#: gitk:11642
+msgid "Colors"
+msgstr "颜色"
+
+#: gitk:11643
+msgid "Fonts"
+msgstr "字体"
+
+#: gitk:11693
+#, tcl-format
+msgid "Gitk: choose color for %s"
+msgstr "Gitk:选择颜色用于%s"
+
+#: gitk:12206
+msgid ""
+"Sorry, gitk cannot run with this version of Tcl/Tk.\n"
+" Gitk requires at least Tcl/Tk 8.4."
+msgstr "对不起,gitk不能运行在当前版本的Tcl/Tk中。\nGitk运行需要最低版本为Tcl/Tk8.4。"
+
+#: gitk:12416
+msgid "Cannot find a git repository here."
+msgstr "在此位置未发现git仓库。"
+
+#: gitk:12463
+#, tcl-format
+msgid "Ambiguous argument '%s': both revision and filename"
+msgstr "不明确有歧义的参数\"%s\":版本和文件名称"
+
+#: gitk:12475
+msgid "Bad arguments to gitk:"
+msgstr "运行gitk参数错误:"
diff --git a/grep.c b/grep.c
index cd952ef5d3..0bb4cbd3d8 100644
--- a/grep.c
+++ b/grep.c
@@ -368,31 +368,20 @@ static int is_fixed(const char *s, size_t len)
return 1;
}
-static int has_null(const char *s, size_t len)
-{
- /*
- * regcomp cannot accept patterns with NULs so when using it
- * we consider any pattern containing a NUL fixed.
- */
- if (memchr(s, 0, len))
- return 1;
-
- return 0;
-}
-
#ifdef USE_LIBPCRE1
static void compile_pcre1_regexp(struct grep_pat *p, const struct grep_opt *opt)
{
const char *error;
int erroffset;
int options = PCRE_MULTILINE;
+ int study_options = 0;
if (opt->ignore_case) {
- if (has_non_ascii(p->pattern))
+ if (!opt->ignore_locale && has_non_ascii(p->pattern))
p->pcre1_tables = pcre_maketables();
options |= PCRE_CASELESS;
}
- if (is_utf8_locale() && has_non_ascii(p->pattern))
+ if (!opt->ignore_locale && is_utf8_locale() && has_non_ascii(p->pattern))
options |= PCRE_UTF8;
p->pcre1_regexp = pcre_compile(p->pattern, options, &error, &erroffset,
@@ -400,44 +389,31 @@ static void compile_pcre1_regexp(struct grep_pat *p, const struct grep_opt *opt)
if (!p->pcre1_regexp)
compile_regexp_failed(p, error);
- p->pcre1_extra_info = pcre_study(p->pcre1_regexp, GIT_PCRE_STUDY_JIT_COMPILE, &error);
- if (!p->pcre1_extra_info && error)
- die("%s", error);
-
-#ifdef GIT_PCRE1_USE_JIT
+#if defined(PCRE_CONFIG_JIT) && !defined(NO_LIBPCRE1_JIT)
pcre_config(PCRE_CONFIG_JIT, &p->pcre1_jit_on);
- if (p->pcre1_jit_on == 1) {
- p->pcre1_jit_stack = pcre_jit_stack_alloc(1, 1024 * 1024);
- if (!p->pcre1_jit_stack)
- die("Couldn't allocate PCRE JIT stack");
- pcre_assign_jit_stack(p->pcre1_extra_info, NULL, p->pcre1_jit_stack);
- } else if (p->pcre1_jit_on != 0) {
- BUG("The pcre1_jit_on variable should be 0 or 1, not %d",
- p->pcre1_jit_on);
- }
+ if (opt->debug)
+ fprintf(stderr, "pcre1_jit_on=%d\n", p->pcre1_jit_on);
+
+ if (p->pcre1_jit_on)
+ study_options = PCRE_STUDY_JIT_COMPILE;
#endif
+
+ p->pcre1_extra_info = pcre_study(p->pcre1_regexp, study_options, &error);
+ if (!p->pcre1_extra_info && error)
+ die("%s", error);
}
static int pcre1match(struct grep_pat *p, const char *line, const char *eol,
regmatch_t *match, int eflags)
{
- int ovector[30], ret, flags = 0;
+ int ovector[30], ret, flags = PCRE_NO_UTF8_CHECK;
if (eflags & REG_NOTBOL)
flags |= PCRE_NOTBOL;
-#ifdef GIT_PCRE1_USE_JIT
- if (p->pcre1_jit_on) {
- ret = pcre_jit_exec(p->pcre1_regexp, p->pcre1_extra_info, line,
- eol - line, 0, flags, ovector,
- ARRAY_SIZE(ovector), p->pcre1_jit_stack);
- } else
-#endif
- {
- ret = pcre_exec(p->pcre1_regexp, p->pcre1_extra_info, line,
- eol - line, 0, flags, ovector,
- ARRAY_SIZE(ovector));
- }
+ ret = pcre_exec(p->pcre1_regexp, p->pcre1_extra_info, line,
+ eol - line, 0, flags, ovector,
+ ARRAY_SIZE(ovector));
if (ret < 0 && ret != PCRE_ERROR_NOMATCH)
die("pcre_exec failed with error code %d", ret);
@@ -453,15 +429,12 @@ static int pcre1match(struct grep_pat *p, const char *line, const char *eol,
static void free_pcre1_regexp(struct grep_pat *p)
{
pcre_free(p->pcre1_regexp);
-#ifdef GIT_PCRE1_USE_JIT
- if (p->pcre1_jit_on) {
+#ifdef PCRE_CONFIG_JIT
+ if (p->pcre1_jit_on)
pcre_free_study(p->pcre1_extra_info);
- pcre_jit_stack_free(p->pcre1_jit_stack);
- } else
+ else
#endif
- {
pcre_free(p->pcre1_extra_info);
- }
pcre_free((void *)p->pcre1_tables);
}
#else /* !USE_LIBPCRE1 */
@@ -498,14 +471,15 @@ static void compile_pcre2_pattern(struct grep_pat *p, const struct grep_opt *opt
p->pcre2_compile_context = NULL;
if (opt->ignore_case) {
- if (has_non_ascii(p->pattern)) {
+ if (!opt->ignore_locale && has_non_ascii(p->pattern)) {
character_tables = pcre2_maketables(NULL);
p->pcre2_compile_context = pcre2_compile_context_create(NULL);
pcre2_set_character_tables(p->pcre2_compile_context, character_tables);
}
options |= PCRE2_CASELESS;
}
- if (is_utf8_locale() && has_non_ascii(p->pattern))
+ if (!opt->ignore_locale && is_utf8_locale() && has_non_ascii(p->pattern) &&
+ !(!opt->ignore_case && (p->fixed || p->is_fixed)))
options |= PCRE2_UTF;
p->pcre2_pattern = pcre2_compile((PCRE2_SPTR)p->pattern,
@@ -522,7 +496,9 @@ static void compile_pcre2_pattern(struct grep_pat *p, const struct grep_opt *opt
}
pcre2_config(PCRE2_CONFIG_JIT, &p->pcre2_jit_on);
- if (p->pcre2_jit_on == 1) {
+ if (opt->debug)
+ fprintf(stderr, "pcre2_jit_on=%d\n", p->pcre2_jit_on);
+ if (p->pcre2_jit_on) {
jitret = pcre2_jit_compile(p->pcre2_pattern, PCRE2_JIT_COMPLETE);
if (jitret)
die("Couldn't JIT the PCRE2 pattern '%s', got '%d'\n", p->pattern, jitret);
@@ -547,19 +523,11 @@ static void compile_pcre2_pattern(struct grep_pat *p, const struct grep_opt *opt
BUG("pcre2_pattern_info() failed: %d", patinforet);
if (jitsizearg == 0) {
p->pcre2_jit_on = 0;
+ if (opt->debug)
+ fprintf(stderr, "pcre2_jit_on=%d: (*NO_JIT) in regex\n",
+ p->pcre2_jit_on);
return;
}
-
- p->pcre2_jit_stack = pcre2_jit_stack_create(1, 1024 * 1024, NULL);
- if (!p->pcre2_jit_stack)
- die("Couldn't allocate PCRE2 JIT stack");
- p->pcre2_match_context = pcre2_match_context_create(NULL);
- if (!p->pcre2_match_context)
- die("Couldn't allocate PCRE2 match context");
- pcre2_jit_stack_assign(p->pcre2_match_context, NULL, p->pcre2_jit_stack);
- } else if (p->pcre2_jit_on != 0) {
- BUG("The pcre2_jit_on variable should be 0 or 1, not %d",
- p->pcre2_jit_on);
}
}
@@ -603,8 +571,6 @@ static void free_pcre2_pattern(struct grep_pat *p)
pcre2_compile_context_free(p->pcre2_compile_context);
pcre2_code_free(p->pcre2_pattern);
pcre2_match_data_free(p->pcre2_match_data);
- pcre2_jit_stack_free(p->pcre2_jit_stack);
- pcre2_match_context_free(p->pcre2_match_context);
}
#else /* !USE_LIBPCRE2 */
static void compile_pcre2_pattern(struct grep_pat *p, const struct grep_opt *opt)
@@ -626,7 +592,6 @@ static int pcre2match(struct grep_pat *p, const char *line, const char *eol,
static void free_pcre2_pattern(struct grep_pat *p)
{
}
-#endif /* !USE_LIBPCRE2 */
static void compile_fixed_regexp(struct grep_pat *p, struct grep_opt *opt)
{
@@ -647,46 +612,66 @@ static void compile_fixed_regexp(struct grep_pat *p, struct grep_opt *opt)
compile_regexp_failed(p, errbuf);
}
}
+#endif /* !USE_LIBPCRE2 */
static void compile_regexp(struct grep_pat *p, struct grep_opt *opt)
{
- int ascii_only;
int err;
int regflags = REG_NEWLINE;
p->word_regexp = opt->word_regexp;
p->ignore_case = opt->ignore_case;
- ascii_only = !has_non_ascii(p->pattern);
+ p->fixed = opt->fixed;
- /*
- * Even when -F (fixed) asks us to do a non-regexp search, we
- * may not be able to correctly case-fold when -i
- * (ignore-case) is asked (in which case, we'll synthesize a
- * regexp to match the pattern that matches regexp special
- * characters literally, while ignoring case differences). On
- * the other hand, even without -F, if the pattern does not
- * have any regexp special characters and there is no need for
- * case-folding search, we can internally turn it into a
- * simple string match using kws. p->fixed tells us if we
- * want to use kws.
- */
- if (opt->fixed ||
- has_null(p->pattern, p->patternlen) ||
- is_fixed(p->pattern, p->patternlen))
- p->fixed = !p->ignore_case || ascii_only;
-
- if (p->fixed) {
- p->kws = kwsalloc(p->ignore_case ? tolower_trans_tbl : NULL);
- kwsincr(p->kws, p->pattern, p->patternlen);
- kwsprep(p->kws);
- return;
- } else if (opt->fixed) {
- /*
- * We come here when the pattern has the non-ascii
- * characters we cannot case-fold, and asked to
- * ignore-case.
- */
+ if (memchr(p->pattern, 0, p->patternlen) && !opt->pcre2)
+ die(_("given pattern contains NULL byte (via -f <file>). This is only supported with -P under PCRE v2"));
+
+ p->is_fixed = is_fixed(p->pattern, p->patternlen);
+#ifdef USE_LIBPCRE2
+ if (!p->fixed && !p->is_fixed) {
+ const char *no_jit = "(*NO_JIT)";
+ const int no_jit_len = strlen(no_jit);
+ if (starts_with(p->pattern, no_jit) &&
+ is_fixed(p->pattern + no_jit_len,
+ p->patternlen - no_jit_len))
+ p->is_fixed = 1;
+ }
+#endif
+ if (p->fixed || p->is_fixed) {
+#ifdef USE_LIBPCRE2
+ opt->pcre2 = 1;
+ if (p->is_fixed) {
+ compile_pcre2_pattern(p, opt);
+ } else {
+ /*
+ * E.g. t7811-grep-open.sh relies on the
+ * pattern being restored.
+ */
+ char *old_pattern = p->pattern;
+ size_t old_patternlen = p->patternlen;
+ struct strbuf sb = STRBUF_INIT;
+
+ /*
+ * There is the PCRE2_LITERAL flag, but it's
+ * only in PCRE v2 10.30 and later. Needing to
+ * ifdef our way around that and dealing with
+ * it + PCRE2_MULTILINE being an error is more
+ * complex than just quoting this ourselves.
+ */
+ strbuf_add(&sb, "\\Q", 2);
+ strbuf_add(&sb, p->pattern, p->patternlen);
+ strbuf_add(&sb, "\\E", 2);
+
+ p->pattern = sb.buf;
+ p->patternlen = sb.len;
+ compile_pcre2_pattern(p, opt);
+ p->pattern = old_pattern;
+ p->patternlen = old_patternlen;
+ strbuf_release(&sb);
+ }
+#else /* !USE_LIBPCRE2 */
compile_fixed_regexp(p, opt);
+#endif /* !USE_LIBPCRE2 */
return;
}
@@ -1053,9 +1038,7 @@ void free_grep_patterns(struct grep_opt *opt)
case GREP_PATTERN: /* atom */
case GREP_PATTERN_HEAD:
case GREP_PATTERN_BODY:
- if (p->kws)
- kwsfree(p->kws);
- else if (p->pcre1_regexp)
+ if (p->pcre1_regexp)
free_pcre1_regexp(p);
else if (p->pcre2_pattern)
free_pcre2_pattern(p);
@@ -1115,29 +1098,12 @@ static void show_name(struct grep_opt *opt, const char *name)
opt->output(opt, opt->null_following_name ? "\0" : "\n", 1);
}
-static int fixmatch(struct grep_pat *p, char *line, char *eol,
- regmatch_t *match)
-{
- struct kwsmatch kwsm;
- size_t offset = kwsexec(p->kws, line, eol - line, &kwsm);
- if (offset == -1) {
- match->rm_so = match->rm_eo = -1;
- return REG_NOMATCH;
- } else {
- match->rm_so = offset;
- match->rm_eo = match->rm_so + kwsm.size[0];
- return 0;
- }
-}
-
static int patmatch(struct grep_pat *p, char *line, char *eol,
regmatch_t *match, int eflags)
{
int hit;
- if (p->fixed)
- hit = !fixmatch(p, line, eol, match);
- else if (p->pcre1_regexp)
+ if (p->pcre1_regexp)
hit = !pcre1match(p, line, eol, match, eflags);
else if (p->pcre2_pattern)
hit = !pcre2match(p, line, eol, match, eflags);
diff --git a/grep.h b/grep.h
index 1875880f37..05dc1bb98e 100644
--- a/grep.h
+++ b/grep.h
@@ -3,24 +3,12 @@
#include "color.h"
#ifdef USE_LIBPCRE1
#include <pcre.h>
-#ifdef PCRE_CONFIG_JIT
-#if PCRE_MAJOR >= 8 && PCRE_MINOR >= 32
-#ifndef NO_LIBPCRE1_JIT
-#define GIT_PCRE1_USE_JIT
-#define GIT_PCRE_STUDY_JIT_COMPILE PCRE_STUDY_JIT_COMPILE
-#endif
-#endif
-#endif
-#ifndef GIT_PCRE_STUDY_JIT_COMPILE
-#define GIT_PCRE_STUDY_JIT_COMPILE 0
-#endif
-#if PCRE_MAJOR <= 8 && PCRE_MINOR < 20
-typedef int pcre_jit_stack;
+#ifndef PCRE_NO_UTF8_CHECK
+#define PCRE_NO_UTF8_CHECK 0
#endif
#else
typedef int pcre;
typedef int pcre_extra;
-typedef int pcre_jit_stack;
#endif
#ifdef USE_LIBPCRE2
#define PCRE2_CODE_UNIT_WIDTH 8
@@ -29,10 +17,7 @@ typedef int pcre_jit_stack;
typedef int pcre2_code;
typedef int pcre2_match_data;
typedef int pcre2_compile_context;
-typedef int pcre2_match_context;
-typedef int pcre2_jit_stack;
#endif
-#include "kwset.h"
#include "thread-utils.h"
#include "userdiff.h"
@@ -88,17 +73,14 @@ struct grep_pat {
regex_t regexp;
pcre *pcre1_regexp;
pcre_extra *pcre1_extra_info;
- pcre_jit_stack *pcre1_jit_stack;
const unsigned char *pcre1_tables;
int pcre1_jit_on;
pcre2_code *pcre2_pattern;
pcre2_match_data *pcre2_match_data;
pcre2_compile_context *pcre2_compile_context;
- pcre2_match_context *pcre2_match_context;
- pcre2_jit_stack *pcre2_jit_stack;
uint32_t pcre2_jit_on;
- kwset_t kws;
unsigned fixed:1;
+ unsigned is_fixed:1;
unsigned ignore_case:1;
unsigned word_regexp:1;
};
@@ -173,6 +155,7 @@ struct grep_opt {
int funcbody;
int extended_regexp_option;
int pattern_type_option;
+ int ignore_locale;
char colors[NR_GREP_COLORS][COLOR_MAXLEN];
unsigned pre_context;
unsigned post_context;
diff --git a/hashmap.c b/hashmap.c
index d42f01ff5a..39c13110bc 100644
--- a/hashmap.c
+++ b/hashmap.c
@@ -140,8 +140,8 @@ static inline struct hashmap_entry **find_entry_ptr(const struct hashmap *map,
}
static int always_equal(const void *unused_cmp_data,
- const void *unused1,
- const void *unused2,
+ const struct hashmap_entry *unused1,
+ const struct hashmap_entry *unused2,
const void *unused_keydata)
{
return 0;
@@ -171,41 +171,49 @@ void hashmap_init(struct hashmap *map, hashmap_cmp_fn equals_function,
map->do_count_items = 1;
}
-void hashmap_free(struct hashmap *map, int free_entries)
+void hashmap_free_(struct hashmap *map, ssize_t entry_offset)
{
if (!map || !map->table)
return;
- if (free_entries) {
+ if (entry_offset >= 0) { /* called by hashmap_free_entries */
struct hashmap_iter iter;
struct hashmap_entry *e;
+
hashmap_iter_init(map, &iter);
while ((e = hashmap_iter_next(&iter)))
- free(e);
+ /*
+ * like container_of, but using caller-calculated
+ * offset (caller being hashmap_free_entries)
+ */
+ free((char *)e - entry_offset);
}
free(map->table);
memset(map, 0, sizeof(*map));
}
-void *hashmap_get(const struct hashmap *map, const void *key, const void *keydata)
+struct hashmap_entry *hashmap_get(const struct hashmap *map,
+ const struct hashmap_entry *key,
+ const void *keydata)
{
return *find_entry_ptr(map, key, keydata);
}
-void *hashmap_get_next(const struct hashmap *map, const void *entry)
+struct hashmap_entry *hashmap_get_next(const struct hashmap *map,
+ const struct hashmap_entry *entry)
{
- struct hashmap_entry *e = ((struct hashmap_entry *) entry)->next;
+ struct hashmap_entry *e = entry->next;
for (; e; e = e->next)
if (entry_equals(map, entry, e, NULL))
return e;
return NULL;
}
-void hashmap_add(struct hashmap *map, void *entry)
+void hashmap_add(struct hashmap *map, struct hashmap_entry *entry)
{
unsigned int b = bucket(map, entry);
/* add entry */
- ((struct hashmap_entry *) entry)->next = map->table[b];
+ entry->next = map->table[b];
map->table[b] = entry;
/* fix size and rehash if appropriate */
@@ -216,7 +224,9 @@ void hashmap_add(struct hashmap *map, void *entry)
}
}
-void *hashmap_remove(struct hashmap *map, const void *key, const void *keydata)
+struct hashmap_entry *hashmap_remove(struct hashmap *map,
+ const struct hashmap_entry *key,
+ const void *keydata)
{
struct hashmap_entry *old;
struct hashmap_entry **e = find_entry_ptr(map, key, keydata);
@@ -238,7 +248,8 @@ void *hashmap_remove(struct hashmap *map, const void *key, const void *keydata)
return old;
}
-void *hashmap_put(struct hashmap *map, void *entry)
+struct hashmap_entry *hashmap_put(struct hashmap *map,
+ struct hashmap_entry *entry)
{
struct hashmap_entry *old = hashmap_remove(map, entry, NULL);
hashmap_add(map, entry);
@@ -252,7 +263,7 @@ void hashmap_iter_init(struct hashmap *map, struct hashmap_iter *iter)
iter->next = NULL;
}
-void *hashmap_iter_next(struct hashmap_iter *iter)
+struct hashmap_entry *hashmap_iter_next(struct hashmap_iter *iter)
{
struct hashmap_entry *current = iter->next;
for (;;) {
@@ -275,10 +286,15 @@ struct pool_entry {
};
static int pool_entry_cmp(const void *unused_cmp_data,
- const struct pool_entry *e1,
- const struct pool_entry *e2,
- const unsigned char *keydata)
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
+ const void *keydata)
{
+ const struct pool_entry *e1, *e2;
+
+ e1 = container_of(eptr, const struct pool_entry, ent);
+ e2 = container_of(entry_or_key, const struct pool_entry, ent);
+
return e1->data != keydata &&
(e1->len != e2->len || memcmp(e1->data, keydata, e1->len));
}
@@ -290,18 +306,18 @@ const void *memintern(const void *data, size_t len)
/* initialize string pool hashmap */
if (!map.tablesize)
- hashmap_init(&map, (hashmap_cmp_fn) pool_entry_cmp, NULL, 0);
+ hashmap_init(&map, pool_entry_cmp, NULL, 0);
/* lookup interned string in pool */
- hashmap_entry_init(&key, memhash(data, len));
+ hashmap_entry_init(&key.ent, memhash(data, len));
key.len = len;
- e = hashmap_get(&map, &key, data);
+ e = hashmap_get_entry(&map, &key, ent, data);
if (!e) {
/* not found: create it */
FLEX_ALLOC_MEM(e, data, data, len);
- hashmap_entry_init(e, key.ent.hash);
+ hashmap_entry_init(&e->ent, key.ent.hash);
e->len = len;
- hashmap_add(&map, e);
+ hashmap_add(&map, &e->ent);
}
return e->data;
}
diff --git a/hashmap.h b/hashmap.h
index 8424911566..bd2701549f 100644
--- a/hashmap.h
+++ b/hashmap.h
@@ -13,7 +13,7 @@
*
* struct hashmap map;
* struct long2string {
- * struct hashmap_entry ent; // must be the first member!
+ * struct hashmap_entry ent;
* long key;
* char value[FLEX_ARRAY]; // be careful with allocating on stack!
* };
@@ -21,12 +21,16 @@
* #define COMPARE_VALUE 1
*
* static int long2string_cmp(const void *hashmap_cmp_fn_data,
- * const struct long2string *e1,
- * const struct long2string *e2,
+ * const struct hashmap_entry *eptr,
+ * const struct hashmap_entry *entry_or_key,
* const void *keydata)
* {
* const char *string = keydata;
* unsigned flags = *(unsigned *)hashmap_cmp_fn_data;
+ * const struct long2string *e1, *e2;
+ *
+ * e1 = container_of(eptr, const struct long2string, ent);
+ * e2 = container_of(entry_or_key, const struct long2string, ent);
*
* if (flags & COMPARE_VALUE)
* return e1->key != e2->key ||
@@ -41,54 +45,58 @@
* char value[255], action[32];
* unsigned flags = 0;
*
- * hashmap_init(&map, (hashmap_cmp_fn) long2string_cmp, &flags, 0);
+ * hashmap_init(&map, long2string_cmp, &flags, 0);
*
* while (scanf("%s %ld %s", action, &key, value)) {
*
* if (!strcmp("add", action)) {
* struct long2string *e;
* FLEX_ALLOC_STR(e, value, value);
- * hashmap_entry_init(e, memhash(&key, sizeof(long)));
+ * hashmap_entry_init(&e->ent, memhash(&key, sizeof(long)));
* e->key = key;
- * hashmap_add(&map, e);
+ * hashmap_add(&map, &e->ent);
* }
*
* if (!strcmp("print_all_by_key", action)) {
* struct long2string k, *e;
- * hashmap_entry_init(&k, memhash(&key, sizeof(long)));
+ * hashmap_entry_init(&k->ent, memhash(&key, sizeof(long)));
* k.key = key;
*
* flags &= ~COMPARE_VALUE;
- * e = hashmap_get(&map, &k, NULL);
+ * e = hashmap_get_entry(&map, &k, ent, NULL);
* if (e) {
* printf("first: %ld %s\n", e->key, e->value);
- * while ((e = hashmap_get_next(&map, e)))
+ * while ((e = hashmap_get_next_entry(&map, e,
+ * struct long2string, ent))) {
* printf("found more: %ld %s\n", e->key, e->value);
+ * }
* }
* }
*
* if (!strcmp("has_exact_match", action)) {
* struct long2string *e;
* FLEX_ALLOC_STR(e, value, value);
- * hashmap_entry_init(e, memhash(&key, sizeof(long)));
+ * hashmap_entry_init(&e->ent, memhash(&key, sizeof(long)));
* e->key = key;
*
* flags |= COMPARE_VALUE;
- * printf("%sfound\n", hashmap_get(&map, e, NULL) ? "" : "not ");
+ * printf("%sfound\n",
+ * hashmap_get(&map, &e->ent, NULL) ? "" : "not ");
* free(e);
* }
*
* if (!strcmp("has_exact_match_no_heap_alloc", action)) {
* struct long2string k;
- * hashmap_entry_init(&k, memhash(&key, sizeof(long)));
+ * hashmap_entry_init(&k->ent, memhash(&key, sizeof(long)));
* k.key = key;
*
* flags |= COMPARE_VALUE;
- * printf("%sfound\n", hashmap_get(&map, &k, value) ? "" : "not ");
+ * printf("%sfound\n",
+ * hashmap_get(&map, &k->ent, value) ? "" : "not ");
* }
*
* if (!strcmp("end", action)) {
- * hashmap_free(&map, 1);
+ * hashmap_free_entries(&map, struct long2string, ent);
* break;
* }
* }
@@ -133,7 +141,7 @@ static inline unsigned int oidhash(const struct object_id *oid)
/*
* struct hashmap_entry is an opaque structure representing an entry in the
- * hash table, which must be used as first member of user data structures.
+ * hash table.
* Ideally it should be followed by an int-sized member to prevent unused
* memory on 64-bit systems due to alignment.
*/
@@ -168,7 +176,8 @@ struct hashmap_entry {
* The `hashmap_cmp_fn_data` entry is the pointer given in the init function.
*/
typedef int (*hashmap_cmp_fn)(const void *hashmap_cmp_fn_data,
- const void *entry, const void *entry_or_key,
+ const struct hashmap_entry *entry,
+ const struct hashmap_entry *entry_or_key,
const void *keydata);
/*
@@ -223,13 +232,20 @@ void hashmap_init(struct hashmap *map,
const void *equals_function_data,
size_t initial_size);
+/* internal function for freeing hashmap */
+void hashmap_free_(struct hashmap *map, ssize_t offset);
+
/*
- * Frees a hashmap structure and allocated memory.
- *
- * If `free_entries` is true, each hashmap_entry in the map is freed as well
- * using stdlibs free().
+ * Frees a hashmap structure and allocated memory, leaves entries undisturbed
*/
-void hashmap_free(struct hashmap *map, int free_entries);
+#define hashmap_free(map) hashmap_free_(map, -1)
+
+/*
+ * Frees @map and all entries. @type is the struct type of the entry
+ * where @member is the hashmap_entry struct used to associate with @map
+ */
+#define hashmap_free_entries(map, type, member) \
+ hashmap_free_(map, offsetof(type, member));
/* hashmap_entry functions */
@@ -244,9 +260,9 @@ void hashmap_free(struct hashmap *map, int free_entries);
* your structure was allocated with xmalloc(), you can just free(3) it,
* and if it is on stack, you can just let it go out of scope).
*/
-static inline void hashmap_entry_init(void *entry, unsigned int hash)
+static inline void hashmap_entry_init(struct hashmap_entry *e,
+ unsigned int hash)
{
- struct hashmap_entry *e = entry;
e->hash = hash;
e->next = NULL;
}
@@ -286,8 +302,9 @@ static inline unsigned int hashmap_get_size(struct hashmap *map)
* If an entry with matching hash code is found, `key` and `keydata` are passed
* to `hashmap_cmp_fn` to decide whether the entry matches the key.
*/
-void *hashmap_get(const struct hashmap *map, const void *key,
- const void *keydata);
+struct hashmap_entry *hashmap_get(const struct hashmap *map,
+ const struct hashmap_entry *key,
+ const void *keydata);
/*
* Returns the hashmap entry for the specified hash code and key data,
@@ -301,9 +318,10 @@ void *hashmap_get(const struct hashmap *map, const void *key,
* `entry_or_key` parameter of `hashmap_cmp_fn` points to a hashmap_entry
* structure that should not be used in the comparison.
*/
-static inline void *hashmap_get_from_hash(const struct hashmap *map,
- unsigned int hash,
- const void *keydata)
+static inline struct hashmap_entry *hashmap_get_from_hash(
+ const struct hashmap *map,
+ unsigned int hash,
+ const void *keydata)
{
struct hashmap_entry key;
hashmap_entry_init(&key, hash);
@@ -318,7 +336,8 @@ static inline void *hashmap_get_from_hash(const struct hashmap *map,
* `entry` is the hashmap_entry to start the search from, obtained via a previous
* call to `hashmap_get` or `hashmap_get_next`.
*/
-void *hashmap_get_next(const struct hashmap *map, const void *entry);
+struct hashmap_entry *hashmap_get_next(const struct hashmap *map,
+ const struct hashmap_entry *entry);
/*
* Adds a hashmap entry. This allows to add duplicate entries (i.e.
@@ -327,7 +346,7 @@ void *hashmap_get_next(const struct hashmap *map, const void *entry);
* `map` is the hashmap structure.
* `entry` is the entry to add.
*/
-void hashmap_add(struct hashmap *map, void *entry);
+void hashmap_add(struct hashmap *map, struct hashmap_entry *entry);
/*
* Adds or replaces a hashmap entry. If the hashmap contains duplicate
@@ -337,7 +356,20 @@ void hashmap_add(struct hashmap *map, void *entry);
* `entry` is the entry to add or replace.
* Returns the replaced entry, or NULL if not found (i.e. the entry was added).
*/
-void *hashmap_put(struct hashmap *map, void *entry);
+struct hashmap_entry *hashmap_put(struct hashmap *map,
+ struct hashmap_entry *entry);
+
+/*
+ * Adds or replaces a hashmap entry contained within @keyvar,
+ * where @keyvar is a pointer to a struct containing a
+ * "struct hashmap_entry" @member.
+ *
+ * Returns the replaced pointer which is of the same type as @keyvar,
+ * or NULL if not found.
+ */
+#define hashmap_put_entry(map, keyvar, member) \
+ container_of_or_null_offset(hashmap_put(map, &(keyvar)->member), \
+ OFFSETOF_VAR(keyvar, member))
/*
* Removes a hashmap entry matching the specified key. If the hashmap contains
@@ -346,8 +378,24 @@ void *hashmap_put(struct hashmap *map, void *entry);
*
* Argument explanation is the same as in `hashmap_get`.
*/
-void *hashmap_remove(struct hashmap *map, const void *key,
- const void *keydata);
+struct hashmap_entry *hashmap_remove(struct hashmap *map,
+ const struct hashmap_entry *key,
+ const void *keydata);
+
+/*
+ * Removes a hashmap entry contained within @keyvar,
+ * where @keyvar is a pointer to a struct containing a
+ * "struct hashmap_entry" @member.
+ *
+ * See `hashmap_get` for an explanation of @keydata
+ *
+ * Returns the replaced pointer which is of the same type as @keyvar,
+ * or NULL if not found.
+ */
+#define hashmap_remove_entry(map, keyvar, member, keydata) \
+ container_of_or_null_offset( \
+ hashmap_remove(map, &(keyvar)->member, keydata), \
+ OFFSETOF_VAR(keyvar, member))
/*
* Returns the `bucket` an entry is stored in.
@@ -370,10 +418,10 @@ struct hashmap_iter {
void hashmap_iter_init(struct hashmap *map, struct hashmap_iter *iter);
/* Returns the next hashmap_entry, or NULL if there are no more entries. */
-void *hashmap_iter_next(struct hashmap_iter *iter);
+struct hashmap_entry *hashmap_iter_next(struct hashmap_iter *iter);
/* Initializes the iterator and returns the first entry, if any. */
-static inline void *hashmap_iter_first(struct hashmap *map,
+static inline struct hashmap_entry *hashmap_iter_first(struct hashmap *map,
struct hashmap_iter *iter)
{
hashmap_iter_init(map, iter);
@@ -381,6 +429,64 @@ static inline void *hashmap_iter_first(struct hashmap *map,
}
/*
+ * returns the first entry in @map using @iter, where the entry is of
+ * @type (e.g. "struct foo") and @member is the name of the
+ * "struct hashmap_entry" in @type
+ */
+#define hashmap_iter_first_entry(map, iter, type, member) \
+ container_of_or_null(hashmap_iter_first(map, iter), type, member)
+
+/* internal macro for hashmap_for_each_entry */
+#define hashmap_iter_next_entry_offset(iter, offset) \
+ container_of_or_null_offset(hashmap_iter_next(iter), offset)
+
+/* internal macro for hashmap_for_each_entry */
+#define hashmap_iter_first_entry_offset(map, iter, offset) \
+ container_of_or_null_offset(hashmap_iter_first(map, iter), offset)
+
+/*
+ * iterate through @map using @iter, @var is a pointer to a type
+ * containing a @member which is a "struct hashmap_entry"
+ */
+#define hashmap_for_each_entry(map, iter, var, member) \
+ for (var = hashmap_iter_first_entry_offset(map, iter, \
+ OFFSETOF_VAR(var, member)); \
+ var; \
+ var = hashmap_iter_next_entry_offset(iter, \
+ OFFSETOF_VAR(var, member)))
+
+/*
+ * returns a pointer of type matching @keyvar, or NULL if nothing found.
+ * @keyvar is a pointer to a struct containing a
+ * "struct hashmap_entry" @member.
+ */
+#define hashmap_get_entry(map, keyvar, member, keydata) \
+ container_of_or_null_offset( \
+ hashmap_get(map, &(keyvar)->member, keydata), \
+ OFFSETOF_VAR(keyvar, member))
+
+#define hashmap_get_entry_from_hash(map, hash, keydata, type, member) \
+ container_of_or_null(hashmap_get_from_hash(map, hash, keydata), \
+ type, member)
+/*
+ * returns the next equal pointer to @var, or NULL if not found.
+ * @var is a pointer of any type containing "struct hashmap_entry"
+ * @member is the name of the "struct hashmap_entry" field
+ */
+#define hashmap_get_next_entry(map, var, member) \
+ container_of_or_null_offset(hashmap_get_next(map, &(var)->member), \
+ OFFSETOF_VAR(var, member))
+
+/*
+ * iterate @map starting from @var, where @var is a pointer of @type
+ * and @member is the name of the "struct hashmap_entry" field in @type
+ */
+#define hashmap_for_each_entry_from(map, var, member) \
+ for (; \
+ var; \
+ var = hashmap_get_next_entry(map, var, member))
+
+/*
* Disable item counting and automatic rehashing when adding/removing items.
*
* Normally, the hashmap keeps track of the number of items in the map
diff --git a/help.c b/help.c
index 5261d83ecf..9ff2be6b18 100644
--- a/help.c
+++ b/help.c
@@ -774,7 +774,8 @@ static struct string_list guess_refs(const char *ref)
return similar_refs;
}
-void help_unknown_ref(const char *ref, const char *cmd, const char *error)
+NORETURN void help_unknown_ref(const char *ref, const char *cmd,
+ const char *error)
{
int i;
struct string_list suggested_refs = guess_refs(ref);
diff --git a/help.h b/help.h
index b8780fbd0f..7a455beeb7 100644
--- a/help.h
+++ b/help.h
@@ -43,7 +43,7 @@ void list_commands(unsigned int colopts, struct cmdnames *main_cmds, struct cmdn
* call this to die(), when it is suspected that the user mistyped a
* ref to the command, to give suggested "correct" refs.
*/
-void help_unknown_ref(const char *ref, const char *cmd, const char *error);
+NORETURN void help_unknown_ref(const char *ref, const char *cmd, const char *error);
static inline void list_config_item(struct string_list *list,
const char *prefix,
diff --git a/http.c b/http.c
index 27aa0a3192..027a86d75d 100644
--- a/http.c
+++ b/http.c
@@ -513,9 +513,11 @@ static void set_proxyauth_name_password(CURL *result)
#else
struct strbuf s = STRBUF_INIT;
- strbuf_addstr_urlencode(&s, proxy_auth.username, 1);
+ strbuf_addstr_urlencode(&s, proxy_auth.username,
+ is_rfc3986_unreserved);
strbuf_addch(&s, ':');
- strbuf_addstr_urlencode(&s, proxy_auth.password, 1);
+ strbuf_addstr_urlencode(&s, proxy_auth.password,
+ is_rfc3986_unreserved);
curl_proxyuserpwd = strbuf_detach(&s, NULL);
curl_easy_setopt(result, CURLOPT_PROXYUSERPWD, curl_proxyuserpwd);
#endif
@@ -1073,6 +1075,7 @@ void http_init(struct remote *remote, const char *url, int proactive_auth)
git_config(urlmatch_config_entry, &config);
free(normalized_url);
+ string_list_clear(&config.vars, 1);
#if LIBCURL_VERSION_NUM >= 0x073800
if (http_ssl_backend) {
diff --git a/http.h b/http.h
index b429f1cf04..5e0ad724f9 100644
--- a/http.h
+++ b/http.h
@@ -25,8 +25,12 @@
#if LIBCURL_VERSION_NUM < 0x070704
#define curl_global_cleanup() do { /* nothing */ } while (0)
#endif
+
#if LIBCURL_VERSION_NUM < 0x070800
#define curl_global_init(a) do { /* nothing */ } while (0)
+#elif LIBCURL_VERSION_NUM >= 0x070c00
+#define curl_global_init(a) curl_global_init_mem(a, xmalloc, free, \
+ xrealloc, xstrdup, xcalloc)
#endif
#if (LIBCURL_VERSION_NUM < 0x070c04) || (LIBCURL_VERSION_NUM == 0x071000)
diff --git a/line-log.c b/line-log.c
index 3aff1849e7..9010e00950 100644
--- a/line-log.c
+++ b/line-log.c
@@ -737,6 +737,38 @@ static struct line_log_data *lookup_line_range(struct rev_info *revs,
return ret;
}
+static int same_paths_in_pathspec_and_range(struct pathspec *pathspec,
+ struct line_log_data *range)
+{
+ int i;
+ struct line_log_data *r;
+
+ for (i = 0, r = range; i < pathspec->nr && r; i++, r = r->next)
+ if (strcmp(pathspec->items[i].match, r->path))
+ return 0;
+ if (i < pathspec->nr || r)
+ /* different number of pathspec items and ranges */
+ return 0;
+
+ return 1;
+}
+
+static void parse_pathspec_from_ranges(struct pathspec *pathspec,
+ struct line_log_data *range)
+{
+ struct line_log_data *r;
+ struct argv_array array = ARGV_ARRAY_INIT;
+ const char **paths;
+
+ for (r = range; r; r = r->next)
+ argv_array_push(&array, r->path);
+ paths = argv_array_detach(&array);
+
+ parse_pathspec(pathspec, 0, PATHSPEC_PREFER_FULL, "", paths);
+ /* strings are now owned by pathspec */
+ free(paths);
+}
+
void line_log_init(struct rev_info *rev, const char *prefix, struct string_list *args)
{
struct commit *commit = NULL;
@@ -746,20 +778,7 @@ void line_log_init(struct rev_info *rev, const char *prefix, struct string_list
range = parse_lines(rev->diffopt.repo, commit, prefix, args);
add_line_range(rev, commit, range);
- if (!rev->diffopt.detect_rename) {
- struct line_log_data *r;
- struct argv_array array = ARGV_ARRAY_INIT;
- const char **paths;
-
- for (r = range; r; r = r->next)
- argv_array_push(&array, r->path);
- paths = argv_array_detach(&array);
-
- parse_pathspec(&rev->diffopt.pathspec, 0,
- PATHSPEC_PREFER_FULL, "", paths);
- /* strings are now owned by pathspec */
- free(paths);
- }
+ parse_pathspec_from_ranges(&rev->diffopt.pathspec, range);
}
static void move_diff_queue(struct diff_queue_struct *dst,
@@ -817,15 +836,29 @@ static void queue_diffs(struct line_log_data *range,
struct diff_queue_struct *queue,
struct commit *commit, struct commit *parent)
{
+ struct object_id *tree_oid, *parent_tree_oid;
+
assert(commit);
+ tree_oid = get_commit_tree_oid(commit);
+ parent_tree_oid = parent ? get_commit_tree_oid(parent) : NULL;
+
+ if (opt->detect_rename &&
+ !same_paths_in_pathspec_and_range(&opt->pathspec, range)) {
+ clear_pathspec(&opt->pathspec);
+ parse_pathspec_from_ranges(&opt->pathspec, range);
+ }
DIFF_QUEUE_CLEAR(&diff_queued_diff);
- diff_tree_oid(parent ? get_commit_tree_oid(parent) : NULL,
- get_commit_tree_oid(commit), "", opt);
- if (opt->detect_rename) {
+ diff_tree_oid(parent_tree_oid, tree_oid, "", opt);
+ if (opt->detect_rename && diff_might_be_rename()) {
+ /* must look at the full tree diff to detect renames */
+ clear_pathspec(&opt->pathspec);
+ DIFF_QUEUE_CLEAR(&diff_queued_diff);
+
+ diff_tree_oid(parent_tree_oid, tree_oid, "", opt);
+
filter_diffs_for_paths(range, 1);
- if (diff_might_be_rename())
- diffcore_std(opt);
+ diffcore_std(opt);
filter_diffs_for_paths(range, 0);
}
move_diff_queue(queue, &diff_queued_diff);
diff --git a/list-objects-filter-options.c b/list-objects-filter-options.c
index 1cb20c659c..256bcfbdfe 100644
--- a/list-objects-filter-options.c
+++ b/list-objects-filter-options.c
@@ -6,6 +6,14 @@
#include "list-objects.h"
#include "list-objects-filter.h"
#include "list-objects-filter-options.h"
+#include "promisor-remote.h"
+#include "trace.h"
+#include "url.h"
+
+static int parse_combine_filter(
+ struct list_objects_filter_options *filter_options,
+ const char *arg,
+ struct strbuf *errbuf);
/*
* Parse value of the argument to the "filter" keyword.
@@ -29,16 +37,11 @@ static int gently_parse_list_objects_filter(
{
const char *v0;
- if (filter_options->choice) {
- if (errbuf) {
- strbuf_addstr(
- errbuf,
- _("multiple filter-specs cannot be combined"));
- }
- return 1;
- }
+ if (!arg)
+ return 0;
- filter_options->filter_spec = strdup(arg);
+ if (filter_options->choice)
+ BUG("filter_options already populated");
if (!strcmp(arg, "blob:none")) {
filter_options->choice = LOFC_BLOB_NONE;
@@ -52,28 +55,14 @@ static int gently_parse_list_objects_filter(
} else if (skip_prefix(arg, "tree:", &v0)) {
if (!git_parse_ulong(v0, &filter_options->tree_exclude_depth)) {
- if (errbuf) {
- strbuf_addstr(
- errbuf,
- _("expected 'tree:<depth>'"));
- }
+ strbuf_addstr(errbuf, _("expected 'tree:<depth>'"));
return 1;
}
filter_options->choice = LOFC_TREE_DEPTH;
return 0;
} else if (skip_prefix(arg, "sparse:oid=", &v0)) {
- struct object_context oc;
- struct object_id sparse_oid;
-
- /*
- * Try to parse <oid-expression> into an OID for the current
- * command, but DO NOT complain if we don't have the blob or
- * ref locally.
- */
- if (!get_oid_with_context(the_repository, v0, GET_OID_BLOB,
- &sparse_oid, &oc))
- filter_options->sparse_oid_value = oiddup(&sparse_oid);
+ filter_options->sparse_oid_name = xstrdup(v0);
filter_options->choice = LOFC_SPARSE_OID;
return 0;
@@ -84,103 +73,298 @@ static int gently_parse_list_objects_filter(
_("sparse:path filters support has been dropped"));
}
return 1;
+
+ } else if (skip_prefix(arg, "combine:", &v0)) {
+ return parse_combine_filter(filter_options, v0, errbuf);
+
}
/*
* Please update _git_fetch() in git-completion.bash when you
* add new filters
*/
- if (errbuf)
- strbuf_addf(errbuf, _("invalid filter-spec '%s'"), arg);
+ strbuf_addf(errbuf, _("invalid filter-spec '%s'"), arg);
memset(filter_options, 0, sizeof(*filter_options));
return 1;
}
-int parse_list_objects_filter(struct list_objects_filter_options *filter_options,
- const char *arg)
+static const char *RESERVED_NON_WS = "~`!@#$^&*()[]{}\\;'\",<>?";
+
+static int has_reserved_character(
+ struct strbuf *sub_spec, struct strbuf *errbuf)
{
- struct strbuf buf = STRBUF_INIT;
- if (gently_parse_list_objects_filter(filter_options, arg, &buf))
- die("%s", buf.buf);
+ const char *c = sub_spec->buf;
+ while (*c) {
+ if (*c <= ' ' || strchr(RESERVED_NON_WS, *c)) {
+ strbuf_addf(
+ errbuf,
+ _("must escape char in sub-filter-spec: '%c'"),
+ *c);
+ return 1;
+ }
+ c++;
+ }
+
return 0;
}
+static int parse_combine_subfilter(
+ struct list_objects_filter_options *filter_options,
+ struct strbuf *subspec,
+ struct strbuf *errbuf)
+{
+ size_t new_index = filter_options->sub_nr;
+ char *decoded;
+ int result;
+
+ ALLOC_GROW_BY(filter_options->sub, filter_options->sub_nr, 1,
+ filter_options->sub_alloc);
+
+ decoded = url_percent_decode(subspec->buf);
+
+ result = has_reserved_character(subspec, errbuf) ||
+ gently_parse_list_objects_filter(
+ &filter_options->sub[new_index], decoded, errbuf);
+
+ free(decoded);
+ return result;
+}
+
+static int parse_combine_filter(
+ struct list_objects_filter_options *filter_options,
+ const char *arg,
+ struct strbuf *errbuf)
+{
+ struct strbuf **subspecs = strbuf_split_str(arg, '+', 0);
+ size_t sub;
+ int result = 0;
+
+ if (!subspecs[0]) {
+ strbuf_addstr(errbuf, _("expected something after combine:"));
+ result = 1;
+ goto cleanup;
+ }
+
+ for (sub = 0; subspecs[sub] && !result; sub++) {
+ if (subspecs[sub + 1]) {
+ /*
+ * This is not the last subspec. Remove trailing "+" so
+ * we can parse it.
+ */
+ size_t last = subspecs[sub]->len - 1;
+ assert(subspecs[sub]->buf[last] == '+');
+ strbuf_remove(subspecs[sub], last, 1);
+ }
+ result = parse_combine_subfilter(
+ filter_options, subspecs[sub], errbuf);
+ }
+
+ filter_options->choice = LOFC_COMBINE;
+
+cleanup:
+ strbuf_list_free(subspecs);
+ if (result) {
+ list_objects_filter_release(filter_options);
+ memset(filter_options, 0, sizeof(*filter_options));
+ }
+ return result;
+}
+
+static int allow_unencoded(char ch)
+{
+ if (ch <= ' ' || ch == '%' || ch == '+')
+ return 0;
+ return !strchr(RESERVED_NON_WS, ch);
+}
+
+static void filter_spec_append_urlencode(
+ struct list_objects_filter_options *filter, const char *raw)
+{
+ struct strbuf buf = STRBUF_INIT;
+ strbuf_addstr_urlencode(&buf, raw, allow_unencoded);
+ trace_printf("Add to combine filter-spec: %s\n", buf.buf);
+ string_list_append(&filter->filter_spec, strbuf_detach(&buf, NULL));
+}
+
+/*
+ * Changes filter_options into an equivalent LOFC_COMBINE filter options
+ * instance. Does not do anything if filter_options is already LOFC_COMBINE.
+ */
+static void transform_to_combine_type(
+ struct list_objects_filter_options *filter_options)
+{
+ assert(filter_options->choice);
+ if (filter_options->choice == LOFC_COMBINE)
+ return;
+ {
+ const int initial_sub_alloc = 2;
+ struct list_objects_filter_options *sub_array =
+ xcalloc(initial_sub_alloc, sizeof(*sub_array));
+ sub_array[0] = *filter_options;
+ memset(filter_options, 0, sizeof(*filter_options));
+ filter_options->sub = sub_array;
+ filter_options->sub_alloc = initial_sub_alloc;
+ }
+ filter_options->sub_nr = 1;
+ filter_options->choice = LOFC_COMBINE;
+ string_list_append(&filter_options->filter_spec, xstrdup("combine:"));
+ filter_spec_append_urlencode(
+ filter_options,
+ list_objects_filter_spec(&filter_options->sub[0]));
+ /*
+ * We don't need the filter_spec strings for subfilter specs, only the
+ * top level.
+ */
+ string_list_clear(&filter_options->sub[0].filter_spec, /*free_util=*/0);
+}
+
+void list_objects_filter_die_if_populated(
+ struct list_objects_filter_options *filter_options)
+{
+ if (filter_options->choice)
+ die(_("multiple filter-specs cannot be combined"));
+}
+
+void parse_list_objects_filter(
+ struct list_objects_filter_options *filter_options,
+ const char *arg)
+{
+ struct strbuf errbuf = STRBUF_INIT;
+ int parse_error;
+
+ if (!filter_options->choice) {
+ string_list_append(&filter_options->filter_spec, xstrdup(arg));
+
+ parse_error = gently_parse_list_objects_filter(
+ filter_options, arg, &errbuf);
+ } else {
+ /*
+ * Make filter_options an LOFC_COMBINE spec so we can trivially
+ * add subspecs to it.
+ */
+ transform_to_combine_type(filter_options);
+
+ string_list_append(&filter_options->filter_spec, xstrdup("+"));
+ filter_spec_append_urlencode(filter_options, arg);
+ ALLOC_GROW_BY(filter_options->sub, filter_options->sub_nr, 1,
+ filter_options->sub_alloc);
+
+ parse_error = gently_parse_list_objects_filter(
+ &filter_options->sub[filter_options->sub_nr - 1], arg,
+ &errbuf);
+ }
+ if (parse_error)
+ die("%s", errbuf.buf);
+}
+
int opt_parse_list_objects_filter(const struct option *opt,
const char *arg, int unset)
{
struct list_objects_filter_options *filter_options = opt->value;
- if (unset || !arg) {
+ if (unset || !arg)
list_objects_filter_set_no_filter(filter_options);
- return 0;
+ else
+ parse_list_objects_filter(filter_options, arg);
+ return 0;
+}
+
+const char *list_objects_filter_spec(struct list_objects_filter_options *filter)
+{
+ if (!filter->filter_spec.nr)
+ BUG("no filter_spec available for this filter");
+ if (filter->filter_spec.nr != 1) {
+ struct strbuf concatted = STRBUF_INIT;
+ strbuf_add_separated_string_list(
+ &concatted, "", &filter->filter_spec);
+ string_list_clear(&filter->filter_spec, /*free_util=*/0);
+ string_list_append(
+ &filter->filter_spec, strbuf_detach(&concatted, NULL));
}
- return parse_list_objects_filter(filter_options, arg);
+ return filter->filter_spec.items[0].string;
}
-void expand_list_objects_filter_spec(
- const struct list_objects_filter_options *filter,
- struct strbuf *expanded_spec)
+const char *expand_list_objects_filter_spec(
+ struct list_objects_filter_options *filter)
{
- strbuf_init(expanded_spec, strlen(filter->filter_spec));
- if (filter->choice == LOFC_BLOB_LIMIT)
- strbuf_addf(expanded_spec, "blob:limit=%lu",
+ if (filter->choice == LOFC_BLOB_LIMIT) {
+ struct strbuf expanded_spec = STRBUF_INIT;
+ strbuf_addf(&expanded_spec, "blob:limit=%lu",
filter->blob_limit_value);
- else if (filter->choice == LOFC_TREE_DEPTH)
- strbuf_addf(expanded_spec, "tree:%lu",
- filter->tree_exclude_depth);
- else
- strbuf_addstr(expanded_spec, filter->filter_spec);
+ string_list_clear(&filter->filter_spec, /*free_util=*/0);
+ string_list_append(
+ &filter->filter_spec,
+ strbuf_detach(&expanded_spec, NULL));
+ }
+
+ return list_objects_filter_spec(filter);
}
void list_objects_filter_release(
struct list_objects_filter_options *filter_options)
{
- free(filter_options->filter_spec);
- free(filter_options->sparse_oid_value);
+ size_t sub;
+
+ if (!filter_options)
+ return;
+ string_list_clear(&filter_options->filter_spec, /*free_util=*/0);
+ free(filter_options->sparse_oid_name);
+ for (sub = 0; sub < filter_options->sub_nr; sub++)
+ list_objects_filter_release(&filter_options->sub[sub]);
+ free(filter_options->sub);
memset(filter_options, 0, sizeof(*filter_options));
}
void partial_clone_register(
const char *remote,
- const struct list_objects_filter_options *filter_options)
+ struct list_objects_filter_options *filter_options)
{
- /*
- * Record the name of the partial clone remote in the
- * config and in the global variable -- the latter is
- * used throughout to indicate that partial clone is
- * enabled and to expect missing objects.
- */
- if (repository_format_partial_clone &&
- *repository_format_partial_clone &&
- strcmp(remote, repository_format_partial_clone))
- die(_("cannot change partial clone promisor remote"));
+ char *cfg_name;
+ char *filter_name;
- git_config_set("core.repositoryformatversion", "1");
- git_config_set("extensions.partialclone", remote);
+ /* Check if it is already registered */
+ if (!promisor_remote_find(remote)) {
+ git_config_set("core.repositoryformatversion", "1");
- repository_format_partial_clone = xstrdup(remote);
+ /* Add promisor config for the remote */
+ cfg_name = xstrfmt("remote.%s.promisor", remote);
+ git_config_set(cfg_name, "true");
+ free(cfg_name);
+ }
/*
* Record the initial filter-spec in the config as
* the default for subsequent fetches from this remote.
*/
- core_partial_clone_filter_default =
- xstrdup(filter_options->filter_spec);
- git_config_set("core.partialclonefilter",
- core_partial_clone_filter_default);
+ filter_name = xstrfmt("remote.%s.partialclonefilter", remote);
+ /* NEEDSWORK: 'expand' result leaking??? */
+ git_config_set(filter_name,
+ expand_list_objects_filter_spec(filter_options));
+ free(filter_name);
+
+ /* Make sure the config info are reset */
+ promisor_remote_reinit();
}
void partial_clone_get_default_filter_spec(
- struct list_objects_filter_options *filter_options)
+ struct list_objects_filter_options *filter_options,
+ const char *remote)
{
+ struct promisor_remote *promisor = promisor_remote_find(remote);
+ struct strbuf errbuf = STRBUF_INIT;
+
/*
* Parse default value, but silently ignore it if it is invalid.
*/
- if (!core_partial_clone_filter_default)
+ if (!promisor)
return;
+
+ string_list_append(&filter_options->filter_spec,
+ promisor->partial_clone_filter);
gently_parse_list_objects_filter(filter_options,
- core_partial_clone_filter_default,
- NULL);
+ promisor->partial_clone_filter,
+ &errbuf);
+ strbuf_release(&errbuf);
}
diff --git a/list-objects-filter-options.h b/list-objects-filter-options.h
index c54f0000fb..2ffb39222c 100644
--- a/list-objects-filter-options.h
+++ b/list-objects-filter-options.h
@@ -2,7 +2,7 @@
#define LIST_OBJECTS_FILTER_OPTIONS_H
#include "parse-options.h"
-#include "strbuf.h"
+#include "string-list.h"
/*
* The list of defined filters for list-objects.
@@ -13,6 +13,7 @@ enum list_objects_filter_choice {
LOFC_BLOB_LIMIT,
LOFC_TREE_DEPTH,
LOFC_SPARSE_OID,
+ LOFC_COMBINE,
LOFC__COUNT /* must be last */
};
@@ -23,8 +24,10 @@ struct list_objects_filter_options {
* commands that launch filtering sub-processes, or for communication
* over the network, don't use this value; use the result of
* expand_list_objects_filter_spec() instead.
+ * To get the raw filter spec given by the user, use the result of
+ * list_objects_filter_spec().
*/
- char *filter_spec;
+ struct string_list filter_spec;
/*
* 'choice' is determined by parsing the filter-spec. This indicates
@@ -38,19 +41,40 @@ struct list_objects_filter_options {
unsigned int no_filter : 1;
/*
- * Parsed values (fields) from within the filter-spec. These are
- * choice-specific; not all values will be defined for any given
- * choice.
+ * BEGIN choice-specific parsed values from within the filter-spec. Only
+ * some values will be defined for any given choice.
*/
- struct object_id *sparse_oid_value;
+
+ char *sparse_oid_name;
unsigned long blob_limit_value;
unsigned long tree_exclude_depth;
+
+ /* LOFC_COMBINE values */
+
+ /* This array contains all the subfilters which this filter combines. */
+ size_t sub_nr, sub_alloc;
+ struct list_objects_filter_options *sub;
+
+ /*
+ * END choice-specific parsed values.
+ */
};
/* Normalized command line arguments */
#define CL_ARG__FILTER "filter"
-int parse_list_objects_filter(
+void list_objects_filter_die_if_populated(
+ struct list_objects_filter_options *filter_options);
+
+/*
+ * Parses the filter spec string given by arg and either (1) simply places the
+ * result in filter_options if it is not yet populated or (2) combines it with
+ * the filter already in filter_options if it is already populated. In the case
+ * of (2), the filter specs are combined as if specified with 'combine:'.
+ *
+ * Dies and prints a user-facing message if an error occurs.
+ */
+void parse_list_objects_filter(
struct list_objects_filter_options *filter_options,
const char *arg);
@@ -65,13 +89,22 @@ int opt_parse_list_objects_filter(const struct option *opt,
/*
* Translates abbreviated numbers in the filter's filter_spec into their
* fully-expanded forms (e.g., "limit:blob=1k" becomes "limit:blob=1024").
+ * Returns a string owned by the list_objects_filter_options object.
*
- * This form should be used instead of the raw filter_spec field when
- * communicating with a remote process or subprocess.
+ * This form should be used instead of the raw list_objects_filter_spec()
+ * value when communicating with a remote process or subprocess.
+ */
+const char *expand_list_objects_filter_spec(
+ struct list_objects_filter_options *filter);
+
+/*
+ * Returns the filter spec string more or less in the form as the user
+ * entered it. This form of the filter_spec can be used in user-facing
+ * messages. Returns a string owned by the list_objects_filter_options
+ * object.
*/
-void expand_list_objects_filter_spec(
- const struct list_objects_filter_options *filter,
- struct strbuf *expanded_spec);
+const char *list_objects_filter_spec(
+ struct list_objects_filter_options *filter);
void list_objects_filter_release(
struct list_objects_filter_options *filter_options);
@@ -85,8 +118,9 @@ static inline void list_objects_filter_set_no_filter(
void partial_clone_register(
const char *remote,
- const struct list_objects_filter_options *filter_options);
-void partial_clone_get_default_filter_spec(
struct list_objects_filter_options *filter_options);
+void partial_clone_get_default_filter_spec(
+ struct list_objects_filter_options *filter_options,
+ const char *remote);
#endif /* LIST_OBJECTS_FILTER_OPTIONS_H */
diff --git a/list-objects-filter.c b/list-objects-filter.c
index 36e1f774bc..1e8d4e763d 100644
--- a/list-objects-filter.c
+++ b/list-objects-filter.c
@@ -26,11 +26,46 @@
*/
#define FILTER_SHOWN_BUT_REVISIT (1<<21)
-/*
- * A filter for list-objects to omit ALL blobs from the traversal.
- * And to OPTIONALLY collect a list of the omitted OIDs.
- */
-struct filter_blobs_none_data {
+struct subfilter {
+ struct filter *filter;
+ struct oidset seen;
+ struct oidset omits;
+ struct object_id skip_tree;
+ unsigned is_skipping_tree : 1;
+};
+
+struct filter {
+ enum list_objects_filter_result (*filter_object_fn)(
+ struct repository *r,
+ enum list_objects_filter_situation filter_situation,
+ struct object *obj,
+ const char *pathname,
+ const char *filename,
+ struct oidset *omits,
+ void *filter_data);
+
+ /*
+ * Optional. If this function is supplied and the filter needs
+ * to collect omits, then this function is called once before
+ * free_fn is called.
+ *
+ * This is required because the following two conditions hold:
+ *
+ * a. A tree filter can add and remove objects as an object
+ * graph is traversed.
+ * b. A combine filter's omit set is the union of all its
+ * subfilters, which may include tree: filters.
+ *
+ * As such, the omits sets must be separate sets, and can only
+ * be unioned after the traversal is completed.
+ */
+ void (*finalize_omits_fn)(struct oidset *omits, void *filter_data);
+
+ void (*free_fn)(void *filter_data);
+
+ void *filter_data;
+
+ /* If non-NULL, the filter collects a list of the omitted OIDs here. */
struct oidset *omits;
};
@@ -40,10 +75,9 @@ static enum list_objects_filter_result filter_blobs_none(
struct object *obj,
const char *pathname,
const char *filename,
+ struct oidset *omits,
void *filter_data_)
{
- struct filter_blobs_none_data *filter_data = filter_data_;
-
switch (filter_situation) {
default:
BUG("unknown filter_situation: %d", filter_situation);
@@ -61,24 +95,18 @@ static enum list_objects_filter_result filter_blobs_none(
assert(obj->type == OBJ_BLOB);
assert((obj->flags & SEEN) == 0);
- if (filter_data->omits)
- oidset_insert(filter_data->omits, &obj->oid);
+ if (omits)
+ oidset_insert(omits, &obj->oid);
return LOFR_MARK_SEEN; /* but not LOFR_DO_SHOW (hard omit) */
}
}
-static void *filter_blobs_none__init(
- struct oidset *omitted,
+static void filter_blobs_none__init(
struct list_objects_filter_options *filter_options,
- filter_object_fn *filter_fn,
- filter_free_fn *filter_free_fn)
+ struct filter *filter)
{
- struct filter_blobs_none_data *d = xcalloc(1, sizeof(*d));
- d->omits = omitted;
-
- *filter_fn = filter_blobs_none;
- *filter_free_fn = free;
- return d;
+ filter->filter_object_fn = filter_blobs_none;
+ filter->free_fn = free;
}
/*
@@ -86,8 +114,6 @@ static void *filter_blobs_none__init(
* Can OPTIONALLY collect a list of the omitted OIDs.
*/
struct filter_trees_depth_data {
- struct oidset *omits;
-
/*
* Maps trees to the minimum depth at which they were seen. It is not
* necessary to re-traverse a tree at deeper or equal depths than it has
@@ -110,16 +136,16 @@ struct seen_map_entry {
/* Returns 1 if the oid was in the omits set before it was invoked. */
static int filter_trees_update_omits(
struct object *obj,
- struct filter_trees_depth_data *filter_data,
+ struct oidset *omits,
int include_it)
{
- if (!filter_data->omits)
+ if (!omits)
return 0;
if (include_it)
- return oidset_remove(filter_data->omits, &obj->oid);
+ return oidset_remove(omits, &obj->oid);
else
- return oidset_insert(filter_data->omits, &obj->oid);
+ return oidset_insert(omits, &obj->oid);
}
static enum list_objects_filter_result filter_trees_depth(
@@ -128,6 +154,7 @@ static enum list_objects_filter_result filter_trees_depth(
struct object *obj,
const char *pathname,
const char *filename,
+ struct oidset *omits,
void *filter_data_)
{
struct filter_trees_depth_data *filter_data = filter_data_;
@@ -152,7 +179,7 @@ static enum list_objects_filter_result filter_trees_depth(
return LOFR_ZERO;
case LOFS_BLOB:
- filter_trees_update_omits(obj, filter_data, include_it);
+ filter_trees_update_omits(obj, omits, include_it);
return include_it ? LOFR_MARK_SEEN | LOFR_DO_SHOW : LOFR_ZERO;
case LOFS_BEGIN_TREE:
@@ -173,12 +200,12 @@ static enum list_objects_filter_result filter_trees_depth(
filter_res = LOFR_SKIP_TREE;
} else {
int been_omitted = filter_trees_update_omits(
- obj, filter_data, include_it);
+ obj, omits, include_it);
seen_info->depth = filter_data->current_depth;
if (include_it)
filter_res = LOFR_DO_SHOW;
- else if (filter_data->omits && !been_omitted)
+ else if (omits && !been_omitted)
/*
* Must update omit information of children
* recursively; they have not been omitted yet.
@@ -201,21 +228,18 @@ static void filter_trees_free(void *filter_data) {
free(d);
}
-static void *filter_trees_depth__init(
- struct oidset *omitted,
+static void filter_trees_depth__init(
struct list_objects_filter_options *filter_options,
- filter_object_fn *filter_fn,
- filter_free_fn *filter_free_fn)
+ struct filter *filter)
{
struct filter_trees_depth_data *d = xcalloc(1, sizeof(*d));
- d->omits = omitted;
oidmap_init(&d->seen_at_depth, 0);
d->exclude_depth = filter_options->tree_exclude_depth;
d->current_depth = 0;
- *filter_fn = filter_trees_depth;
- *filter_free_fn = filter_trees_free;
- return d;
+ filter->filter_data = d;
+ filter->filter_object_fn = filter_trees_depth;
+ filter->free_fn = filter_trees_free;
}
/*
@@ -223,7 +247,6 @@ static void *filter_trees_depth__init(
* And to OPTIONALLY collect a list of the omitted OIDs.
*/
struct filter_blobs_limit_data {
- struct oidset *omits;
unsigned long max_bytes;
};
@@ -233,6 +256,7 @@ static enum list_objects_filter_result filter_blobs_limit(
struct object *obj,
const char *pathname,
const char *filename,
+ struct oidset *omits,
void *filter_data_)
{
struct filter_blobs_limit_data *filter_data = filter_data_;
@@ -270,30 +294,27 @@ static enum list_objects_filter_result filter_blobs_limit(
if (object_length < filter_data->max_bytes)
goto include_it;
- if (filter_data->omits)
- oidset_insert(filter_data->omits, &obj->oid);
+ if (omits)
+ oidset_insert(omits, &obj->oid);
return LOFR_MARK_SEEN; /* but not LOFR_DO_SHOW (hard omit) */
}
include_it:
- if (filter_data->omits)
- oidset_remove(filter_data->omits, &obj->oid);
+ if (omits)
+ oidset_remove(omits, &obj->oid);
return LOFR_MARK_SEEN | LOFR_DO_SHOW;
}
-static void *filter_blobs_limit__init(
- struct oidset *omitted,
+static void filter_blobs_limit__init(
struct list_objects_filter_options *filter_options,
- filter_object_fn *filter_fn,
- filter_free_fn *filter_free_fn)
+ struct filter *filter)
{
struct filter_blobs_limit_data *d = xcalloc(1, sizeof(*d));
- d->omits = omitted;
d->max_bytes = filter_options->blob_limit_value;
- *filter_fn = filter_blobs_limit;
- *filter_free_fn = free;
- return d;
+ filter->filter_data = d;
+ filter->filter_object_fn = filter_blobs_limit;
+ filter->free_fn = free;
}
/*
@@ -307,12 +328,12 @@ static void *filter_blobs_limit__init(
*/
struct frame {
/*
- * defval is the usual default include/exclude value that
+ * default_match is the usual default include/exclude value that
* should be inherited as we recurse into directories based
* upon pattern matching of the directory itself or of a
* containing directory.
*/
- int defval;
+ enum pattern_match_result default_match;
/*
* 1 if the directory (recursively) contains any provisionally
@@ -326,8 +347,7 @@ struct frame {
};
struct filter_sparse_data {
- struct oidset *omits;
- struct exclude_list el;
+ struct pattern_list pl;
size_t nr, alloc;
struct frame *array_frame;
@@ -339,11 +359,13 @@ static enum list_objects_filter_result filter_sparse(
struct object *obj,
const char *pathname,
const char *filename,
+ struct oidset *omits,
void *filter_data_)
{
struct filter_sparse_data *filter_data = filter_data_;
- int val, dtype;
+ int dtype;
struct frame *frame;
+ enum pattern_match_result match;
switch (filter_situation) {
default:
@@ -352,15 +374,15 @@ static enum list_objects_filter_result filter_sparse(
case LOFS_BEGIN_TREE:
assert(obj->type == OBJ_TREE);
dtype = DT_DIR;
- val = is_excluded_from_list(pathname, strlen(pathname),
- filename, &dtype, &filter_data->el,
- r->index);
- if (val < 0)
- val = filter_data->array_frame[filter_data->nr - 1].defval;
+ match = path_matches_pattern_list(pathname, strlen(pathname),
+ filename, &dtype, &filter_data->pl,
+ r->index);
+ if (match == UNDECIDED)
+ match = filter_data->array_frame[filter_data->nr - 1].default_match;
ALLOC_GROW(filter_data->array_frame, filter_data->nr + 1,
filter_data->alloc);
- filter_data->array_frame[filter_data->nr].defval = val;
+ filter_data->array_frame[filter_data->nr].default_match = match;
filter_data->array_frame[filter_data->nr].child_prov_omit = 0;
filter_data->nr++;
@@ -414,14 +436,14 @@ static enum list_objects_filter_result filter_sparse(
frame = &filter_data->array_frame[filter_data->nr - 1];
dtype = DT_REG;
- val = is_excluded_from_list(pathname, strlen(pathname),
- filename, &dtype, &filter_data->el,
+ match = path_matches_pattern_list(pathname, strlen(pathname),
+ filename, &dtype, &filter_data->pl,
r->index);
- if (val < 0)
- val = frame->defval;
- if (val > 0) {
- if (filter_data->omits)
- oidset_remove(filter_data->omits, &obj->oid);
+ if (match == UNDECIDED)
+ match = frame->default_match;
+ if (match == MATCHED) {
+ if (omits)
+ oidset_remove(omits, &obj->oid);
return LOFR_MARK_SEEN | LOFR_DO_SHOW;
}
@@ -435,8 +457,8 @@ static enum list_objects_filter_result filter_sparse(
* Leave the LOFR_ bits unset so that if the blob appears
* again in the traversal, we will be asked again.
*/
- if (filter_data->omits)
- oidset_insert(filter_data->omits, &obj->oid);
+ if (omits)
+ oidset_insert(omits, &obj->oid);
/*
* Remember that at least 1 blob in this tree was
@@ -456,33 +478,169 @@ static void filter_sparse_free(void *filter_data)
free(d);
}
-static void *filter_sparse_oid__init(
- struct oidset *omitted,
+static void filter_sparse_oid__init(
struct list_objects_filter_options *filter_options,
- filter_object_fn *filter_fn,
- filter_free_fn *filter_free_fn)
+ struct filter *filter)
{
struct filter_sparse_data *d = xcalloc(1, sizeof(*d));
- d->omits = omitted;
- if (add_excludes_from_blob_to_list(filter_options->sparse_oid_value,
- NULL, 0, &d->el) < 0)
- die("could not load filter specification");
+ struct object_context oc;
+ struct object_id sparse_oid;
+
+ if (get_oid_with_context(the_repository,
+ filter_options->sparse_oid_name,
+ GET_OID_BLOB, &sparse_oid, &oc))
+ die(_("unable to access sparse blob in '%s'"),
+ filter_options->sparse_oid_name);
+ if (add_patterns_from_blob_to_list(&sparse_oid, "", 0, &d->pl) < 0)
+ die(_("unable to parse sparse filter data in %s"),
+ oid_to_hex(&sparse_oid));
ALLOC_GROW(d->array_frame, d->nr + 1, d->alloc);
- d->array_frame[d->nr].defval = 0; /* default to include */
+ d->array_frame[d->nr].default_match = 0; /* default to include */
d->array_frame[d->nr].child_prov_omit = 0;
d->nr++;
- *filter_fn = filter_sparse;
- *filter_free_fn = filter_sparse_free;
- return d;
+ filter->filter_data = d;
+ filter->filter_object_fn = filter_sparse;
+ filter->free_fn = filter_sparse_free;
}
-typedef void *(*filter_init_fn)(
- struct oidset *omitted,
+/* A filter which only shows objects shown by all sub-filters. */
+struct combine_filter_data {
+ struct subfilter *sub;
+ size_t nr;
+};
+
+static enum list_objects_filter_result process_subfilter(
+ struct repository *r,
+ enum list_objects_filter_situation filter_situation,
+ struct object *obj,
+ const char *pathname,
+ const char *filename,
+ struct subfilter *sub)
+{
+ enum list_objects_filter_result result;
+
+ /*
+ * Check and update is_skipping_tree before oidset_contains so
+ * that is_skipping_tree gets unset even when the object is
+ * marked as seen. As of this writing, no filter uses
+ * LOFR_MARK_SEEN on trees that also uses LOFR_SKIP_TREE, so the
+ * ordering is only theoretically important. Be cautious if you
+ * change the order of the below checks and more filters have
+ * been added!
+ */
+ if (sub->is_skipping_tree) {
+ if (filter_situation == LOFS_END_TREE &&
+ oideq(&obj->oid, &sub->skip_tree))
+ sub->is_skipping_tree = 0;
+ else
+ return LOFR_ZERO;
+ }
+ if (oidset_contains(&sub->seen, &obj->oid))
+ return LOFR_ZERO;
+
+ result = list_objects_filter__filter_object(
+ r, filter_situation, obj, pathname, filename, sub->filter);
+
+ if (result & LOFR_MARK_SEEN)
+ oidset_insert(&sub->seen, &obj->oid);
+
+ if (result & LOFR_SKIP_TREE) {
+ sub->is_skipping_tree = 1;
+ sub->skip_tree = obj->oid;
+ }
+
+ return result;
+}
+
+static enum list_objects_filter_result filter_combine(
+ struct repository *r,
+ enum list_objects_filter_situation filter_situation,
+ struct object *obj,
+ const char *pathname,
+ const char *filename,
+ struct oidset *omits,
+ void *filter_data)
+{
+ struct combine_filter_data *d = filter_data;
+ enum list_objects_filter_result combined_result =
+ LOFR_DO_SHOW | LOFR_MARK_SEEN | LOFR_SKIP_TREE;
+ size_t sub;
+
+ for (sub = 0; sub < d->nr; sub++) {
+ enum list_objects_filter_result sub_result = process_subfilter(
+ r, filter_situation, obj, pathname, filename,
+ &d->sub[sub]);
+ if (!(sub_result & LOFR_DO_SHOW))
+ combined_result &= ~LOFR_DO_SHOW;
+ if (!(sub_result & LOFR_MARK_SEEN))
+ combined_result &= ~LOFR_MARK_SEEN;
+ if (!d->sub[sub].is_skipping_tree)
+ combined_result &= ~LOFR_SKIP_TREE;
+ }
+
+ return combined_result;
+}
+
+static void filter_combine__free(void *filter_data)
+{
+ struct combine_filter_data *d = filter_data;
+ size_t sub;
+ for (sub = 0; sub < d->nr; sub++) {
+ list_objects_filter__free(d->sub[sub].filter);
+ oidset_clear(&d->sub[sub].seen);
+ if (d->sub[sub].omits.set.size)
+ BUG("expected oidset to be cleared already");
+ }
+ free(d->sub);
+}
+
+static void add_all(struct oidset *dest, struct oidset *src) {
+ struct oidset_iter iter;
+ struct object_id *src_oid;
+
+ oidset_iter_init(src, &iter);
+ while ((src_oid = oidset_iter_next(&iter)) != NULL)
+ oidset_insert(dest, src_oid);
+}
+
+static void filter_combine__finalize_omits(
+ struct oidset *omits,
+ void *filter_data)
+{
+ struct combine_filter_data *d = filter_data;
+ size_t sub;
+
+ for (sub = 0; sub < d->nr; sub++) {
+ add_all(omits, &d->sub[sub].omits);
+ oidset_clear(&d->sub[sub].omits);
+ }
+}
+
+static void filter_combine__init(
struct list_objects_filter_options *filter_options,
- filter_object_fn *filter_fn,
- filter_free_fn *filter_free_fn);
+ struct filter* filter)
+{
+ struct combine_filter_data *d = xcalloc(1, sizeof(*d));
+ size_t sub;
+
+ d->nr = filter_options->sub_nr;
+ d->sub = xcalloc(d->nr, sizeof(*d->sub));
+ for (sub = 0; sub < d->nr; sub++)
+ d->sub[sub].filter = list_objects_filter__init(
+ filter->omits ? &d->sub[sub].omits : NULL,
+ &filter_options->sub[sub]);
+
+ filter->filter_data = d;
+ filter->filter_object_fn = filter_combine;
+ filter->free_fn = filter_combine__free;
+ filter->finalize_omits_fn = filter_combine__finalize_omits;
+}
+
+typedef void (*filter_init_fn)(
+ struct list_objects_filter_options *filter_options,
+ struct filter *filter);
/*
* Must match "enum list_objects_filter_choice".
@@ -493,14 +651,14 @@ static filter_init_fn s_filters[] = {
filter_blobs_limit__init,
filter_trees_depth__init,
filter_sparse_oid__init,
+ filter_combine__init,
};
-void *list_objects_filter__init(
+struct filter *list_objects_filter__init(
struct oidset *omitted,
- struct list_objects_filter_options *filter_options,
- filter_object_fn *filter_fn,
- filter_free_fn *filter_free_fn)
+ struct list_objects_filter_options *filter_options)
{
+ struct filter *filter;
filter_init_fn init_fn;
assert((sizeof(s_filters) / sizeof(s_filters[0])) == LOFC__COUNT);
@@ -510,10 +668,44 @@ void *list_objects_filter__init(
filter_options->choice);
init_fn = s_filters[filter_options->choice];
- if (init_fn)
- return init_fn(omitted, filter_options,
- filter_fn, filter_free_fn);
- *filter_fn = NULL;
- *filter_free_fn = NULL;
- return NULL;
+ if (!init_fn)
+ return NULL;
+
+ filter = xcalloc(1, sizeof(*filter));
+ filter->omits = omitted;
+ init_fn(filter_options, filter);
+ return filter;
+}
+
+enum list_objects_filter_result list_objects_filter__filter_object(
+ struct repository *r,
+ enum list_objects_filter_situation filter_situation,
+ struct object *obj,
+ const char *pathname,
+ const char *filename,
+ struct filter *filter)
+{
+ if (filter && (obj->flags & NOT_USER_GIVEN))
+ return filter->filter_object_fn(r, filter_situation, obj,
+ pathname, filename,
+ filter->omits,
+ filter->filter_data);
+ /*
+ * No filter is active or user gave object explicitly. In this case,
+ * always show the object (except when LOFS_END_TREE, since this tree
+ * had already been shown when LOFS_BEGIN_TREE).
+ */
+ if (filter_situation == LOFS_END_TREE)
+ return 0;
+ return LOFR_MARK_SEEN | LOFR_DO_SHOW;
+}
+
+void list_objects_filter__free(struct filter *filter)
+{
+ if (!filter)
+ return;
+ if (filter->finalize_omits_fn && filter->omits)
+ filter->finalize_omits_fn(filter->omits, filter->filter_data);
+ filter->free_fn(filter->filter_data);
+ free(filter);
}
diff --git a/list-objects-filter.h b/list-objects-filter.h
index 1d45a4ad57..cfd784e203 100644
--- a/list-objects-filter.h
+++ b/list-objects-filter.h
@@ -60,30 +60,36 @@ enum list_objects_filter_situation {
LOFS_BLOB
};
-typedef enum list_objects_filter_result (*filter_object_fn)(
+struct filter;
+
+/*
+ * Constructor for the set of defined list-objects filters.
+ * The `omitted` set is optional. It is populated with objects that the
+ * filter excludes. This set should not be considered finalized until
+ * after list_objects_filter__free is called on the returned `struct
+ * filter *`.
+ */
+struct filter *list_objects_filter__init(
+ struct oidset *omitted,
+ struct list_objects_filter_options *filter_options);
+
+/*
+ * Lets `filter` decide how to handle the `obj`. If `filter` is NULL, this
+ * function behaves as expected if no filter is configured: all objects are
+ * included.
+ */
+enum list_objects_filter_result list_objects_filter__filter_object(
struct repository *r,
enum list_objects_filter_situation filter_situation,
struct object *obj,
const char *pathname,
const char *filename,
- void *filter_data);
-
-typedef void (*filter_free_fn)(void *filter_data);
+ struct filter *filter);
/*
- * Constructor for the set of defined list-objects filters.
- * Returns a generic "void *filter_data".
- *
- * The returned "filter_fn" will be used by traverse_commit_list()
- * to filter the results.
- *
- * The returned "filter_free_fn" is a destructor for the
- * filter_data.
+ * Destroys `filter` and finalizes the `omitted` set, if present. Does
+ * nothing if `filter` is null.
*/
-void *list_objects_filter__init(
- struct oidset *omitted,
- struct list_objects_filter_options *filter_options,
- filter_object_fn *filter_fn,
- filter_free_fn *filter_free_fn);
+void list_objects_filter__free(struct filter *filter);
#endif /* LIST_OBJECTS_FILTER_H */
diff --git a/list-objects.c b/list-objects.c
index b5651ddd5b..e19589baa0 100644
--- a/list-objects.c
+++ b/list-objects.c
@@ -18,8 +18,7 @@ struct traversal_context {
show_object_fn show_object;
show_commit_fn show_commit;
void *show_data;
- filter_object_fn filter_fn;
- void *filter_data;
+ struct filter *filter;
};
static void process_blob(struct traversal_context *ctx,
@@ -29,7 +28,7 @@ static void process_blob(struct traversal_context *ctx,
{
struct object *obj = &blob->object;
size_t pathlen;
- enum list_objects_filter_result r = LOFR_MARK_SEEN | LOFR_DO_SHOW;
+ enum list_objects_filter_result r;
if (!ctx->revs->blob_objects)
return;
@@ -54,11 +53,10 @@ static void process_blob(struct traversal_context *ctx,
pathlen = path->len;
strbuf_addstr(path, name);
- if ((obj->flags & NOT_USER_GIVEN) && ctx->filter_fn)
- r = ctx->filter_fn(ctx->revs->repo,
- LOFS_BLOB, obj,
- path->buf, &path->buf[pathlen],
- ctx->filter_data);
+ r = list_objects_filter__filter_object(ctx->revs->repo,
+ LOFS_BLOB, obj,
+ path->buf, &path->buf[pathlen],
+ ctx->filter);
if (r & LOFR_MARK_SEEN)
obj->flags |= SEEN;
if (r & LOFR_DO_SHOW)
@@ -157,7 +155,7 @@ static void process_tree(struct traversal_context *ctx,
struct object *obj = &tree->object;
struct rev_info *revs = ctx->revs;
int baselen = base->len;
- enum list_objects_filter_result r = LOFR_MARK_SEEN | LOFR_DO_SHOW;
+ enum list_objects_filter_result r;
int failed_parse;
if (!revs->tree_objects)
@@ -186,11 +184,10 @@ static void process_tree(struct traversal_context *ctx,
}
strbuf_addstr(base, name);
- if ((obj->flags & NOT_USER_GIVEN) && ctx->filter_fn)
- r = ctx->filter_fn(ctx->revs->repo,
- LOFS_BEGIN_TREE, obj,
- base->buf, &base->buf[baselen],
- ctx->filter_data);
+ r = list_objects_filter__filter_object(ctx->revs->repo,
+ LOFS_BEGIN_TREE, obj,
+ base->buf, &base->buf[baselen],
+ ctx->filter);
if (r & LOFR_MARK_SEEN)
obj->flags |= SEEN;
if (r & LOFR_DO_SHOW)
@@ -203,16 +200,14 @@ static void process_tree(struct traversal_context *ctx,
else if (!failed_parse)
process_tree_contents(ctx, tree, base);
- if ((obj->flags & NOT_USER_GIVEN) && ctx->filter_fn) {
- r = ctx->filter_fn(ctx->revs->repo,
- LOFS_END_TREE, obj,
- base->buf, &base->buf[baselen],
- ctx->filter_data);
- if (r & LOFR_MARK_SEEN)
- obj->flags |= SEEN;
- if (r & LOFR_DO_SHOW)
- ctx->show_object(obj, base->buf, ctx->show_data);
- }
+ r = list_objects_filter__filter_object(ctx->revs->repo,
+ LOFS_END_TREE, obj,
+ base->buf, &base->buf[baselen],
+ ctx->filter);
+ if (r & LOFR_MARK_SEEN)
+ obj->flags |= SEEN;
+ if (r & LOFR_DO_SHOW)
+ ctx->show_object(obj, base->buf, ctx->show_data);
strbuf_setlen(base, baselen);
free_tree_buffer(tree);
@@ -370,7 +365,9 @@ static void do_traverse(struct traversal_context *ctx)
* an uninteresting boundary commit may not have its tree
* parsed yet, but we are not going to show them anyway
*/
- if (get_commit_tree(commit)) {
+ if (!ctx->revs->tree_objects)
+ ; /* do not bother loading tree */
+ else if (get_commit_tree(commit)) {
struct tree *tree = get_commit_tree(commit);
tree->object.flags |= NOT_USER_GIVEN;
add_pending_tree(ctx->revs, tree);
@@ -402,8 +399,7 @@ void traverse_commit_list(struct rev_info *revs,
ctx.show_commit = show_commit;
ctx.show_object = show_object;
ctx.show_data = show_data;
- ctx.filter_fn = NULL;
- ctx.filter_data = NULL;
+ ctx.filter = NULL;
do_traverse(&ctx);
}
@@ -416,17 +412,12 @@ void traverse_commit_list_filtered(
struct oidset *omitted)
{
struct traversal_context ctx;
- filter_free_fn filter_free_fn = NULL;
ctx.revs = revs;
ctx.show_object = show_object;
ctx.show_commit = show_commit;
ctx.show_data = show_data;
- ctx.filter_fn = NULL;
-
- ctx.filter_data = list_objects_filter__init(omitted, filter_options,
- &ctx.filter_fn, &filter_free_fn);
+ ctx.filter = list_objects_filter__init(omitted, filter_options);
do_traverse(&ctx);
- if (ctx.filter_data && filter_free_fn)
- filter_free_fn(ctx.filter_data);
+ list_objects_filter__free(ctx.filter);
}
diff --git a/ll-merge.c b/ll-merge.c
index 5b8d46aede..d65a8971db 100644
--- a/ll-merge.c
+++ b/ll-merge.c
@@ -32,6 +32,20 @@ struct ll_merge_driver {
char *cmdline;
};
+static struct attr_check *merge_attributes;
+static struct attr_check *load_merge_attributes(void)
+{
+ if (!merge_attributes)
+ merge_attributes = attr_check_initl("merge", "conflict-marker-size", NULL);
+ return merge_attributes;
+}
+
+void reset_merge_attributes(void)
+{
+ attr_check_free(merge_attributes);
+ merge_attributes = NULL;
+}
+
/*
* Built-in low-levels
*/
@@ -354,7 +368,7 @@ int ll_merge(mmbuffer_t *result_buf,
struct index_state *istate,
const struct ll_merge_options *opts)
{
- static struct attr_check *check;
+ struct attr_check *check = load_merge_attributes();
static const struct ll_merge_options default_opts;
const char *ll_driver_name = NULL;
int marker_size = DEFAULT_CONFLICT_MARKER_SIZE;
@@ -369,9 +383,6 @@ int ll_merge(mmbuffer_t *result_buf,
normalize_file(theirs, path, istate);
}
- if (!check)
- check = attr_check_initl("merge", "conflict-marker-size", NULL);
-
git_check_attr(istate, path, check);
ll_driver_name = check->items[0].value;
if (check->items[1].value) {
diff --git a/ll-merge.h b/ll-merge.h
index b9e2af1c88..e78973dd55 100644
--- a/ll-merge.h
+++ b/ll-merge.h
@@ -26,5 +26,6 @@ int ll_merge(mmbuffer_t *result_buf,
const struct ll_merge_options *opts);
int ll_merge_marker_size(struct index_state *istate, const char *path);
+void reset_merge_attributes(void);
#endif
diff --git a/log-tree.c b/log-tree.c
index 1e56df62a7..923a299e70 100644
--- a/log-tree.c
+++ b/log-tree.c
@@ -77,6 +77,7 @@ void add_name_decoration(enum decoration_type type, const char *name, struct obj
const struct name_decoration *get_name_decoration(const struct object *obj)
{
+ load_ref_decorations(NULL, DECORATE_SHORT_REFS);
return lookup_decoration(&name_decoration, obj);
}
@@ -677,9 +678,7 @@ void show_log(struct rev_info *opt)
raw = (opt->commit_format == CMIT_FMT_USERFORMAT);
format_display_notes(&commit->object.oid, &notebuf,
get_log_output_encoding(), raw);
- ctx.notes_message = notebuf.len
- ? strbuf_detach(&notebuf, NULL)
- : xcalloc(1, 1);
+ ctx.notes_message = strbuf_detach(&notebuf, NULL);
}
/*
diff --git a/merge-recursive.c b/merge-recursive.c
index 6b812d67e3..42be7c9960 100644
--- a/merge-recursive.c
+++ b/merge-recursive.c
@@ -4,30 +4,40 @@
* The thieves were Alex Riesen and Johannes Schindelin, in June/July 2006
*/
#include "cache.h"
-#include "config.h"
+#include "merge-recursive.h"
+
#include "advice.h"
-#include "lockfile.h"
-#include "cache-tree.h"
-#include "object-store.h"
-#include "repository.h"
-#include "commit.h"
+#include "alloc.h"
+#include "attr.h"
#include "blob.h"
#include "builtin.h"
-#include "tree-walk.h"
+#include "cache-tree.h"
+#include "commit.h"
+#include "commit-reach.h"
+#include "config.h"
#include "diff.h"
#include "diffcore.h"
+#include "dir.h"
+#include "ll-merge.h"
+#include "lockfile.h"
+#include "object-store.h"
+#include "repository.h"
+#include "revision.h"
+#include "string-list.h"
+#include "submodule.h"
#include "tag.h"
-#include "alloc.h"
+#include "tree-walk.h"
#include "unpack-trees.h"
-#include "string-list.h"
#include "xdiff-interface.h"
-#include "ll-merge.h"
-#include "attr.h"
-#include "merge-recursive.h"
-#include "dir.h"
-#include "submodule.h"
-#include "revision.h"
-#include "commit-reach.h"
+
+struct merge_options_internal {
+ int call_depth;
+ int needed_rename_limit;
+ struct hashmap current_file_dir_set;
+ struct string_list df_conflict_file_set;
+ struct unpack_trees_options unpack_opts;
+ struct index_state orig_index;
+};
struct path_hashmap_entry {
struct hashmap_entry e;
@@ -35,14 +45,16 @@ struct path_hashmap_entry {
};
static int path_hashmap_cmp(const void *cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *keydata)
{
- const struct path_hashmap_entry *a = entry;
- const struct path_hashmap_entry *b = entry_or_key;
+ const struct path_hashmap_entry *a, *b;
const char *key = keydata;
+ a = container_of(eptr, const struct path_hashmap_entry, e);
+ b = container_of(entry_or_key, const struct path_hashmap_entry, e);
+
if (ignore_case)
return strcasecmp(a->path, key ? key : b->path);
else
@@ -54,6 +66,24 @@ static unsigned int path_hash(const char *path)
return ignore_case ? strihash(path) : strhash(path);
}
+/*
+ * For dir_rename_entry, directory names are stored as a full path from the
+ * toplevel of the repository and do not include a trailing '/'. Also:
+ *
+ * dir: original name of directory being renamed
+ * non_unique_new_dir: if true, could not determine new_dir
+ * new_dir: final name of directory being renamed
+ * possible_new_dirs: temporary used to help determine new_dir; see comments
+ * in get_directory_renames() for details
+ */
+struct dir_rename_entry {
+ struct hashmap_entry ent;
+ char *dir;
+ unsigned non_unique_new_dir:1;
+ struct strbuf new_dir;
+ struct string_list possible_new_dirs;
+};
+
static struct dir_rename_entry *dir_rename_find_entry(struct hashmap *hashmap,
char *dir)
{
@@ -61,18 +91,20 @@ static struct dir_rename_entry *dir_rename_find_entry(struct hashmap *hashmap,
if (dir == NULL)
return NULL;
- hashmap_entry_init(&key, strhash(dir));
+ hashmap_entry_init(&key.ent, strhash(dir));
key.dir = dir;
- return hashmap_get(hashmap, &key, NULL);
+ return hashmap_get_entry(hashmap, &key, ent, NULL);
}
static int dir_rename_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
- const struct dir_rename_entry *e1 = entry;
- const struct dir_rename_entry *e2 = entry_or_key;
+ const struct dir_rename_entry *e1, *e2;
+
+ e1 = container_of(eptr, const struct dir_rename_entry, ent);
+ e2 = container_of(entry_or_key, const struct dir_rename_entry, ent);
return strcmp(e1->dir, e2->dir);
}
@@ -85,34 +117,46 @@ static void dir_rename_init(struct hashmap *map)
static void dir_rename_entry_init(struct dir_rename_entry *entry,
char *directory)
{
- hashmap_entry_init(entry, strhash(directory));
+ hashmap_entry_init(&entry->ent, strhash(directory));
entry->dir = directory;
entry->non_unique_new_dir = 0;
strbuf_init(&entry->new_dir, 0);
string_list_init(&entry->possible_new_dirs, 0);
}
+struct collision_entry {
+ struct hashmap_entry ent;
+ char *target_file;
+ struct string_list source_files;
+ unsigned reported_already:1;
+};
+
static struct collision_entry *collision_find_entry(struct hashmap *hashmap,
char *target_file)
{
struct collision_entry key;
- hashmap_entry_init(&key, strhash(target_file));
+ hashmap_entry_init(&key.ent, strhash(target_file));
key.target_file = target_file;
- return hashmap_get(hashmap, &key, NULL);
+ return hashmap_get_entry(hashmap, &key, ent, NULL);
}
-static int collision_cmp(void *unused_cmp_data,
- const struct collision_entry *e1,
- const struct collision_entry *e2,
+static int collision_cmp(const void *unused_cmp_data,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
+ const struct collision_entry *e1, *e2;
+
+ e1 = container_of(eptr, const struct collision_entry, ent);
+ e2 = container_of(entry_or_key, const struct collision_entry, ent);
+
return strcmp(e1->target_file, e2->target_file);
}
static void collision_init(struct hashmap *map)
{
- hashmap_init(map, (hashmap_cmp_fn) collision_cmp, NULL, 0);
+ hashmap_init(map, collision_cmp, NULL, 0);
}
static void flush_output(struct merge_options *opt)
@@ -284,7 +328,8 @@ static inline void setup_rename_conflict_info(enum rename_type rename_type,
static int show(struct merge_options *opt, int v)
{
- return (!opt->call_depth && opt->verbosity >= v) || opt->verbosity >= 5;
+ return (!opt->priv->call_depth && opt->verbosity >= v) ||
+ opt->verbosity >= 5;
}
__attribute__((format (printf, 3, 4)))
@@ -295,7 +340,7 @@ static void output(struct merge_options *opt, int v, const char *fmt, ...)
if (!show(opt, v))
return;
- strbuf_addchars(&opt->obuf, ' ', opt->call_depth * 2);
+ strbuf_addchars(&opt->obuf, ' ', opt->priv->call_depth * 2);
va_start(ap, fmt);
strbuf_vaddf(&opt->obuf, fmt, ap);
@@ -310,7 +355,7 @@ static void output_commit_title(struct merge_options *opt, struct commit *commit
{
struct merge_remote_desc *desc;
- strbuf_addchars(&opt->obuf, ' ', opt->call_depth * 2);
+ strbuf_addchars(&opt->obuf, ' ', opt->priv->call_depth * 2);
desc = merge_remote_util(commit);
if (desc)
strbuf_addf(&opt->obuf, "virtual %s\n", desc->name);
@@ -358,6 +403,11 @@ static int add_cacheinfo(struct merge_options *opt,
return ret;
}
+static inline int merge_detect_rename(struct merge_options *opt)
+{
+ return (opt->detect_renames >= 0) ? opt->detect_renames : 1;
+}
+
static void init_tree_desc_from_tree(struct tree_desc *desc, struct tree *tree)
{
parse_tree(tree);
@@ -373,74 +423,43 @@ static int unpack_trees_start(struct merge_options *opt,
struct tree_desc t[3];
struct index_state tmp_index = { NULL };
- memset(&opt->unpack_opts, 0, sizeof(opt->unpack_opts));
- if (opt->call_depth)
- opt->unpack_opts.index_only = 1;
+ memset(&opt->priv->unpack_opts, 0, sizeof(opt->priv->unpack_opts));
+ if (opt->priv->call_depth)
+ opt->priv->unpack_opts.index_only = 1;
else
- opt->unpack_opts.update = 1;
- opt->unpack_opts.merge = 1;
- opt->unpack_opts.head_idx = 2;
- opt->unpack_opts.fn = threeway_merge;
- opt->unpack_opts.src_index = opt->repo->index;
- opt->unpack_opts.dst_index = &tmp_index;
- opt->unpack_opts.aggressive = !merge_detect_rename(opt);
- setup_unpack_trees_porcelain(&opt->unpack_opts, "merge");
+ opt->priv->unpack_opts.update = 1;
+ opt->priv->unpack_opts.merge = 1;
+ opt->priv->unpack_opts.head_idx = 2;
+ opt->priv->unpack_opts.fn = threeway_merge;
+ opt->priv->unpack_opts.src_index = opt->repo->index;
+ opt->priv->unpack_opts.dst_index = &tmp_index;
+ opt->priv->unpack_opts.aggressive = !merge_detect_rename(opt);
+ setup_unpack_trees_porcelain(&opt->priv->unpack_opts, "merge");
init_tree_desc_from_tree(t+0, common);
init_tree_desc_from_tree(t+1, head);
init_tree_desc_from_tree(t+2, merge);
- rc = unpack_trees(3, t, &opt->unpack_opts);
+ rc = unpack_trees(3, t, &opt->priv->unpack_opts);
cache_tree_free(&opt->repo->index->cache_tree);
/*
- * Update opt->repo->index to match the new results, AFTER saving a copy
- * in opt->orig_index. Update src_index to point to the saved copy.
- * (verify_uptodate() checks src_index, and the original index is
- * the one that had the necessary modification timestamps.)
+ * Update opt->repo->index to match the new results, AFTER saving a
+ * copy in opt->priv->orig_index. Update src_index to point to the
+ * saved copy. (verify_uptodate() checks src_index, and the original
+ * index is the one that had the necessary modification timestamps.)
*/
- opt->orig_index = *opt->repo->index;
+ opt->priv->orig_index = *opt->repo->index;
*opt->repo->index = tmp_index;
- opt->unpack_opts.src_index = &opt->orig_index;
+ opt->priv->unpack_opts.src_index = &opt->priv->orig_index;
return rc;
}
static void unpack_trees_finish(struct merge_options *opt)
{
- discard_index(&opt->orig_index);
- clear_unpack_trees_porcelain(&opt->unpack_opts);
-}
-
-struct tree *write_tree_from_memory(struct merge_options *opt)
-{
- struct tree *result = NULL;
- struct index_state *istate = opt->repo->index;
-
- if (unmerged_index(istate)) {
- int i;
- fprintf(stderr, "BUG: There are unmerged index entries:\n");
- for (i = 0; i < istate->cache_nr; i++) {
- const struct cache_entry *ce = istate->cache[i];
- if (ce_stage(ce))
- fprintf(stderr, "BUG: %d %.*s\n", ce_stage(ce),
- (int)ce_namelen(ce), ce->name);
- }
- BUG("unmerged index entries in merge-recursive.c");
- }
-
- if (!istate->cache_tree)
- istate->cache_tree = cache_tree();
-
- if (!cache_tree_fully_valid(istate->cache_tree) &&
- cache_tree_update(istate, 0) < 0) {
- err(opt, _("error building trees"));
- return NULL;
- }
-
- result = lookup_tree(opt->repo, &istate->cache_tree->oid);
-
- return result;
+ discard_index(&opt->priv->orig_index);
+ clear_unpack_trees_porcelain(&opt->priv->unpack_opts);
}
static int save_files_dirs(const struct object_id *oid,
@@ -454,8 +473,8 @@ static int save_files_dirs(const struct object_id *oid,
strbuf_addstr(base, path);
FLEX_ALLOC_MEM(entry, path, base->buf, base->len);
- hashmap_entry_init(entry, path_hash(entry->path));
- hashmap_add(&opt->current_file_dir_set, entry);
+ hashmap_entry_init(&entry->e, path_hash(entry->path));
+ hashmap_add(&opt->priv->current_file_dir_set, &entry->e);
strbuf_setlen(base, baselen);
return (S_ISDIR(mode) ? READ_TREE_RECURSIVE : 0);
@@ -586,7 +605,7 @@ static void record_df_conflict_files(struct merge_options *opt,
* If we're merging merge-bases, we don't want to bother with
* any working directory changes.
*/
- if (opt->call_depth)
+ if (opt->priv->call_depth)
return;
/* Ensure D/F conflicts are adjacent in the entries list. */
@@ -598,7 +617,7 @@ static void record_df_conflict_files(struct merge_options *opt,
df_sorted_entries.cmp = string_list_df_name_compare;
string_list_sort(&df_sorted_entries);
- string_list_clear(&opt->df_conflict_file_set, 1);
+ string_list_clear(&opt->priv->df_conflict_file_set, 1);
for (i = 0; i < df_sorted_entries.nr; i++) {
const char *path = df_sorted_entries.items[i].string;
int len = strlen(path);
@@ -614,7 +633,7 @@ static void record_df_conflict_files(struct merge_options *opt,
len > last_len &&
memcmp(path, last_file, last_len) == 0 &&
path[last_len] == '/') {
- string_list_insert(&opt->df_conflict_file_set, last_file);
+ string_list_insert(&opt->priv->df_conflict_file_set, last_file);
}
/*
@@ -681,8 +700,8 @@ static void update_entry(struct stage_data *entry,
static int remove_file(struct merge_options *opt, int clean,
const char *path, int no_wd)
{
- int update_cache = opt->call_depth || clean;
- int update_working_directory = !opt->call_depth && !no_wd;
+ int update_cache = opt->priv->call_depth || clean;
+ int update_working_directory = !opt->priv->call_depth && !no_wd;
if (update_cache) {
if (remove_file_from_index(opt->repo->index, path))
@@ -712,7 +731,9 @@ static void add_flattened_path(struct strbuf *out, const char *s)
out->buf[i] = '_';
}
-static char *unique_path(struct merge_options *opt, const char *path, const char *branch)
+static char *unique_path(struct merge_options *opt,
+ const char *path,
+ const char *branch)
{
struct path_hashmap_entry *entry;
struct strbuf newpath = STRBUF_INIT;
@@ -723,16 +744,16 @@ static char *unique_path(struct merge_options *opt, const char *path, const char
add_flattened_path(&newpath, branch);
base_len = newpath.len;
- while (hashmap_get_from_hash(&opt->current_file_dir_set,
+ while (hashmap_get_from_hash(&opt->priv->current_file_dir_set,
path_hash(newpath.buf), newpath.buf) ||
- (!opt->call_depth && file_exists(newpath.buf))) {
+ (!opt->priv->call_depth && file_exists(newpath.buf))) {
strbuf_setlen(&newpath, base_len);
strbuf_addf(&newpath, "_%d", suffix++);
}
FLEX_ALLOC_MEM(entry, path, newpath.buf, newpath.len);
- hashmap_entry_init(entry, path_hash(entry->path));
- hashmap_add(&opt->current_file_dir_set, entry);
+ hashmap_entry_init(&entry->e, path_hash(entry->path));
+ hashmap_add(&opt->priv->current_file_dir_set, &entry->e);
return strbuf_detach(&newpath, NULL);
}
@@ -764,7 +785,8 @@ static int dir_in_way(struct index_state *istate, const char *path,
strbuf_release(&dirpath);
return check_working_copy && !lstat(path, &st) && S_ISDIR(st.st_mode) &&
- !(empty_ok && is_empty_dir(path));
+ !(empty_ok && is_empty_dir(path)) &&
+ !has_symlink_leading_path(path, strlen(path));
}
/*
@@ -774,7 +796,7 @@ static int dir_in_way(struct index_state *istate, const char *path,
static int was_tracked_and_matches(struct merge_options *opt, const char *path,
const struct diff_filespec *blob)
{
- int pos = index_name_pos(&opt->orig_index, path, strlen(path));
+ int pos = index_name_pos(&opt->priv->orig_index, path, strlen(path));
struct cache_entry *ce;
if (0 > pos)
@@ -782,7 +804,7 @@ static int was_tracked_and_matches(struct merge_options *opt, const char *path,
return 0;
/* See if the file we were tracking before matches */
- ce = opt->orig_index.cache[pos];
+ ce = opt->priv->orig_index.cache[pos];
return (oid_eq(&ce->oid, &blob->oid) && ce->ce_mode == blob->mode);
}
@@ -791,7 +813,7 @@ static int was_tracked_and_matches(struct merge_options *opt, const char *path,
*/
static int was_tracked(struct merge_options *opt, const char *path)
{
- int pos = index_name_pos(&opt->orig_index, path, strlen(path));
+ int pos = index_name_pos(&opt->priv->orig_index, path, strlen(path));
if (0 <= pos)
/* we were tracking this path before the merge */
@@ -848,12 +870,12 @@ static int was_dirty(struct merge_options *opt, const char *path)
struct cache_entry *ce;
int dirty = 1;
- if (opt->call_depth || !was_tracked(opt, path))
+ if (opt->priv->call_depth || !was_tracked(opt, path))
return !dirty;
- ce = index_file_exists(opt->unpack_opts.src_index,
+ ce = index_file_exists(opt->priv->unpack_opts.src_index,
path, strlen(path), ignore_case);
- dirty = verify_uptodate(ce, &opt->unpack_opts) != 0;
+ dirty = verify_uptodate(ce, &opt->priv->unpack_opts) != 0;
return dirty;
}
@@ -863,8 +885,8 @@ static int make_room_for_path(struct merge_options *opt, const char *path)
const char *msg = _("failed to create path '%s'%s");
/* Unlink any D/F conflict files that are in the way */
- for (i = 0; i < opt->df_conflict_file_set.nr; i++) {
- const char *df_path = opt->df_conflict_file_set.items[i].string;
+ for (i = 0; i < opt->priv->df_conflict_file_set.nr; i++) {
+ const char *df_path = opt->priv->df_conflict_file_set.items[i].string;
size_t pathlen = strlen(path);
size_t df_pathlen = strlen(df_path);
if (df_pathlen < pathlen &&
@@ -874,7 +896,7 @@ static int make_room_for_path(struct merge_options *opt, const char *path)
_("Removing %s to make room for subdirectory\n"),
df_path);
unlink(df_path);
- unsorted_string_list_delete_item(&opt->df_conflict_file_set,
+ unsorted_string_list_delete_item(&opt->priv->df_conflict_file_set,
i, 0);
break;
}
@@ -915,7 +937,7 @@ static int update_file_flags(struct merge_options *opt,
{
int ret = 0;
- if (opt->call_depth)
+ if (opt->priv->call_depth)
update_wd = 0;
if (update_wd) {
@@ -934,9 +956,11 @@ static int update_file_flags(struct merge_options *opt,
}
buf = read_object_file(&contents->oid, &type, &size);
- if (!buf)
- return err(opt, _("cannot read object %s '%s'"),
- oid_to_hex(&contents->oid), path);
+ if (!buf) {
+ ret = err(opt, _("cannot read object %s '%s'"),
+ oid_to_hex(&contents->oid), path);
+ goto free_buf;
+ }
if (type != OBJ_BLOB) {
ret = err(opt, _("blob expected for %s '%s'"),
oid_to_hex(&contents->oid), path);
@@ -944,7 +968,8 @@ static int update_file_flags(struct merge_options *opt,
}
if (S_ISREG(contents->mode)) {
struct strbuf strbuf = STRBUF_INIT;
- if (convert_to_working_tree(opt->repo->index, path, buf, size, &strbuf)) {
+ if (convert_to_working_tree(opt->repo->index,
+ path, buf, size, &strbuf)) {
free(buf);
size = strbuf.len;
buf = strbuf_detach(&strbuf, NULL);
@@ -997,7 +1022,7 @@ static int update_file(struct merge_options *opt,
const char *path)
{
return update_file_flags(opt, contents, path,
- opt->call_depth || clean, !opt->call_depth);
+ opt->priv->call_depth || clean, !opt->priv->call_depth);
}
/* Low level file merging, update and removal */
@@ -1019,22 +1044,22 @@ static int merge_3way(struct merge_options *opt,
{
mmfile_t orig, src1, src2;
struct ll_merge_options ll_opts = {0};
- char *base_name, *name1, *name2;
+ char *base, *name1, *name2;
int merge_status;
ll_opts.renormalize = opt->renormalize;
ll_opts.extra_marker_size = extra_marker_size;
ll_opts.xdl_opts = opt->xdl_opts;
- if (opt->call_depth) {
+ if (opt->priv->call_depth) {
ll_opts.virtual_ancestor = 1;
ll_opts.variant = 0;
} else {
switch (opt->recursive_variant) {
- case MERGE_RECURSIVE_OURS:
+ case MERGE_VARIANT_OURS:
ll_opts.variant = XDL_MERGE_FAVOR_OURS;
break;
- case MERGE_RECURSIVE_THEIRS:
+ case MERGE_VARIANT_THEIRS:
ll_opts.variant = XDL_MERGE_FAVOR_THEIRS;
break;
default:
@@ -1043,16 +1068,13 @@ static int merge_3way(struct merge_options *opt,
}
}
- assert(a->path && b->path);
- if (strcmp(a->path, b->path) ||
- (opt->ancestor != NULL && strcmp(a->path, o->path) != 0)) {
- base_name = opt->ancestor == NULL ? NULL :
- mkpathdup("%s:%s", opt->ancestor, o->path);
+ assert(a->path && b->path && o->path && opt->ancestor);
+ if (strcmp(a->path, b->path) || strcmp(a->path, o->path) != 0) {
+ base = mkpathdup("%s:%s", opt->ancestor, o->path);
name1 = mkpathdup("%s:%s", branch1, a->path);
name2 = mkpathdup("%s:%s", branch2, b->path);
} else {
- base_name = opt->ancestor == NULL ? NULL :
- mkpathdup("%s", opt->ancestor);
+ base = mkpathdup("%s", opt->ancestor);
name1 = mkpathdup("%s", branch1);
name2 = mkpathdup("%s", branch2);
}
@@ -1061,11 +1083,11 @@ static int merge_3way(struct merge_options *opt,
read_mmblob(&src1, &a->oid);
read_mmblob(&src2, &b->oid);
- merge_status = ll_merge(result_buf, a->path, &orig, base_name,
+ merge_status = ll_merge(result_buf, a->path, &orig, base,
&src1, name1, &src2, name2,
opt->repo->index, &ll_opts);
- free(base_name);
+ free(base);
free(name1);
free(name2);
free(orig.ptr);
@@ -1160,7 +1182,7 @@ static int merge_submodule(struct merge_options *opt,
struct object_array merges;
int i;
- int search = !opt->call_depth;
+ int search = !opt->priv->call_depth;
/* store a in result in case we fail */
oidcpy(result, a);
@@ -1344,15 +1366,15 @@ static int merge_mode_and_contents(struct merge_options *opt,
&b->oid);
} else if (S_ISLNK(a->mode)) {
switch (opt->recursive_variant) {
- case MERGE_RECURSIVE_NORMAL:
+ case MERGE_VARIANT_NORMAL:
oidcpy(&result->blob.oid, &a->oid);
if (!oid_eq(&a->oid, &b->oid))
result->clean = 0;
break;
- case MERGE_RECURSIVE_OURS:
+ case MERGE_VARIANT_OURS:
oidcpy(&result->blob.oid, &a->oid);
break;
- case MERGE_RECURSIVE_THEIRS:
+ case MERGE_VARIANT_THEIRS:
oidcpy(&result->blob.oid, &b->oid);
break;
}
@@ -1378,10 +1400,11 @@ static int handle_rename_via_dir(struct merge_options *opt,
const struct rename *ren = ci->ren1;
const struct diff_filespec *dest = ren->pair->two;
char *file_path = dest->path;
- int mark_conflicted = (opt->detect_directory_renames == 1);
+ int mark_conflicted = (opt->detect_directory_renames ==
+ MERGE_DIRECTORY_RENAMES_CONFLICT);
assert(ren->dir_rename_original_dest);
- if (!opt->call_depth && would_lose_untracked(opt, dest->path)) {
+ if (!opt->priv->call_depth && would_lose_untracked(opt, dest->path)) {
mark_conflicted = 1;
file_path = unique_path(opt, dest->path, ren->branch);
output(opt, 1, _("Error: Refusing to lose untracked file at %s; "
@@ -1424,12 +1447,12 @@ static int handle_change_delete(struct merge_options *opt,
const char *update_path = path;
int ret = 0;
- if (dir_in_way(opt->repo->index, path, !opt->call_depth, 0) ||
- (!opt->call_depth && would_lose_untracked(opt, path))) {
+ if (dir_in_way(opt->repo->index, path, !opt->priv->call_depth, 0) ||
+ (!opt->priv->call_depth && would_lose_untracked(opt, path))) {
update_path = alt_path = unique_path(opt, path, change_branch);
}
- if (opt->call_depth) {
+ if (opt->priv->call_depth) {
/*
* We cannot arbitrarily accept either a_sha or b_sha as
* correct; since there is no true "middle point" between
@@ -1504,14 +1527,14 @@ static int handle_rename_delete(struct merge_options *opt,
opt->branch2 : opt->branch1);
if (handle_change_delete(opt,
- opt->call_depth ? orig->path : dest->path,
- opt->call_depth ? NULL : orig->path,
+ opt->priv->call_depth ? orig->path : dest->path,
+ opt->priv->call_depth ? NULL : orig->path,
orig, dest,
rename_branch, delete_branch,
_("rename"), _("renamed")))
return -1;
- if (opt->call_depth)
+ if (opt->priv->call_depth)
return remove_file_from_index(opt->repo->index, dest->path);
else
return update_stages(opt, dest->path, NULL,
@@ -1548,7 +1571,7 @@ static int handle_file_collision(struct merge_options *opt,
/*
* In the recursive case, we just opt to undo renames
*/
- if (opt->call_depth && (prev_path1 || prev_path2)) {
+ if (opt->priv->call_depth && (prev_path1 || prev_path2)) {
/* Put first file (a->oid, a->mode) in its original spot */
if (prev_path1) {
if (update_file(opt, 1, a, prev_path1))
@@ -1577,10 +1600,10 @@ static int handle_file_collision(struct merge_options *opt,
/* Remove rename sources if rename/add or rename/rename(2to1) */
if (prev_path1)
remove_file(opt, 1, prev_path1,
- opt->call_depth || would_lose_untracked(opt, prev_path1));
+ opt->priv->call_depth || would_lose_untracked(opt, prev_path1));
if (prev_path2)
remove_file(opt, 1, prev_path2,
- opt->call_depth || would_lose_untracked(opt, prev_path2));
+ opt->priv->call_depth || would_lose_untracked(opt, prev_path2));
/*
* Remove the collision path, if it wouldn't cause dirty contents
@@ -1622,12 +1645,12 @@ static int handle_file_collision(struct merge_options *opt,
null.mode = 0;
if (merge_mode_and_contents(opt, &null, a, b, collide_path,
- branch1, branch2, opt->call_depth * 2, &mfi))
+ branch1, branch2, opt->priv->call_depth * 2, &mfi))
return -1;
mfi.clean &= !alt_path;
if (update_file(opt, mfi.clean, &mfi.blob, update_path))
return -1;
- if (!mfi.clean && !opt->call_depth &&
+ if (!mfi.clean && !opt->priv->call_depth &&
update_stages(opt, collide_path, NULL, a, b))
return -1;
free(alt_path);
@@ -1667,7 +1690,7 @@ static int handle_rename_add(struct merge_options *opt,
&ci->ren1->src_entry->stages[other_stage],
prev_path_desc,
opt->branch1, opt->branch2,
- 1 + opt->call_depth * 2, &mfi))
+ 1 + opt->priv->call_depth * 2, &mfi))
return -1;
free(prev_path_desc);
@@ -1685,7 +1708,7 @@ static char *find_path_for_conflict(struct merge_options *opt,
const char *branch2)
{
char *new_path = NULL;
- if (dir_in_way(opt->repo->index, path, !opt->call_depth, 0)) {
+ if (dir_in_way(opt->repo->index, path, !opt->priv->call_depth, 0)) {
new_path = unique_path(opt, path, branch1);
output(opt, 1, _("%s is a directory in %s adding "
"as %s instead"),
@@ -1716,17 +1739,17 @@ static int handle_rename_rename_1to2(struct merge_options *opt,
"rename \"%s\"->\"%s\" in \"%s\"%s"),
o->path, a->path, ci->ren1->branch,
o->path, b->path, ci->ren2->branch,
- opt->call_depth ? _(" (left unresolved)") : "");
+ opt->priv->call_depth ? _(" (left unresolved)") : "");
path_desc = xstrfmt("%s and %s, both renamed from %s",
a->path, b->path, o->path);
if (merge_mode_and_contents(opt, o, a, b, path_desc,
ci->ren1->branch, ci->ren2->branch,
- opt->call_depth * 2, &mfi))
+ opt->priv->call_depth * 2, &mfi))
return -1;
free(path_desc);
- if (opt->call_depth) {
+ if (opt->priv->call_depth) {
/*
* FIXME: For rename/add-source conflicts (if we could detect
* such), this is wrong. We should instead find a unique
@@ -1841,12 +1864,12 @@ static int handle_rename_rename_2to1(struct merge_options *opt,
&ci->ren1->src_entry->stages[ostage1],
path_side_1_desc,
opt->branch1, opt->branch2,
- 1 + opt->call_depth * 2, &mfi_c1) ||
+ 1 + opt->priv->call_depth * 2, &mfi_c1) ||
merge_mode_and_contents(opt, b,
&ci->ren2->src_entry->stages[ostage2],
c2, path_side_2_desc,
opt->branch1, opt->branch2,
- 1 + opt->call_depth * 2, &mfi_c2))
+ 1 + opt->priv->call_depth * 2, &mfi_c2))
return -1;
free(path_side_1_desc);
free(path_side_2_desc);
@@ -1880,17 +1903,15 @@ static struct diff_queue_struct *get_diffpairs(struct merge_options *opt,
*/
if (opts.detect_rename > DIFF_DETECT_RENAME)
opts.detect_rename = DIFF_DETECT_RENAME;
- opts.rename_limit = opt->merge_rename_limit >= 0 ? opt->merge_rename_limit :
- opt->diff_rename_limit >= 0 ? opt->diff_rename_limit :
- 1000;
+ opts.rename_limit = (opt->rename_limit >= 0) ? opt->rename_limit : 1000;
opts.rename_score = opt->rename_score;
opts.show_rename_progress = opt->show_rename_progress;
opts.output_format = DIFF_FORMAT_NO_OUTPUT;
diff_setup_done(&opts);
diff_tree_oid(&o_tree->object.oid, &tree->object.oid, "", &opts);
diffcore_std(&opts);
- if (opts.needed_rename_limit > opt->needed_rename_limit)
- opt->needed_rename_limit = opts.needed_rename_limit;
+ if (opts.needed_rename_limit > opt->priv->needed_rename_limit)
+ opt->priv->needed_rename_limit = opts.needed_rename_limit;
ret = xmalloc(sizeof(*ret));
*ret = diff_queued_diff;
@@ -2001,7 +2022,7 @@ static void remove_hashmap_entries(struct hashmap *dir_renames,
for (i = 0; i < items_to_remove->nr; i++) {
entry = items_to_remove->items[i].util;
- hashmap_remove(dir_renames, entry, NULL);
+ hashmap_remove(dir_renames, &entry->ent, NULL);
}
string_list_clear(items_to_remove, 0);
}
@@ -2124,8 +2145,8 @@ static void handle_directory_level_conflicts(struct merge_options *opt,
struct string_list remove_from_head = STRING_LIST_INIT_NODUP;
struct string_list remove_from_merge = STRING_LIST_INIT_NODUP;
- hashmap_iter_init(dir_re_head, &iter);
- while ((head_ent = hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(dir_re_head, &iter, head_ent,
+ ent /* member name */) {
merge_ent = dir_rename_find_entry(dir_re_merge, head_ent->dir);
if (merge_ent &&
!head_ent->non_unique_new_dir &&
@@ -2149,8 +2170,8 @@ static void handle_directory_level_conflicts(struct merge_options *opt,
remove_hashmap_entries(dir_re_head, &remove_from_head);
remove_hashmap_entries(dir_re_merge, &remove_from_merge);
- hashmap_iter_init(dir_re_merge, &iter);
- while ((merge_ent = hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(dir_re_merge, &iter, merge_ent,
+ ent /* member name */) {
head_ent = dir_rename_find_entry(dir_re_head, merge_ent->dir);
if (tree_has_path(opt->repo, merge, merge_ent->dir)) {
/* 2. This wasn't a directory rename after all */
@@ -2229,7 +2250,7 @@ static struct hashmap *get_directory_renames(struct diff_queue_struct *pairs)
if (!entry) {
entry = xmalloc(sizeof(*entry));
dir_rename_entry_init(entry, old_dir);
- hashmap_put(dir_renames, entry);
+ hashmap_put(dir_renames, &entry->ent);
} else {
free(old_dir);
}
@@ -2254,8 +2275,8 @@ static struct hashmap *get_directory_renames(struct diff_queue_struct *pairs)
* we set non_unique_new_dir. Once we've determined the winner (or
* that there is no winner), we no longer need possible_new_dirs.
*/
- hashmap_iter_init(dir_renames, &iter);
- while ((entry = hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(dir_renames, &iter, entry,
+ ent /* member name */) {
int max = 0;
int bad_max = 0;
char *best = NULL;
@@ -2358,8 +2379,9 @@ static void compute_collisions(struct hashmap *collisions,
if (!collision_ent) {
collision_ent = xcalloc(1,
sizeof(struct collision_entry));
- hashmap_entry_init(collision_ent, strhash(new_path));
- hashmap_put(collisions, collision_ent);
+ hashmap_entry_init(&collision_ent->ent,
+ strhash(new_path));
+ hashmap_put(collisions, &collision_ent->ent);
collision_ent->target_file = new_path;
} else {
free(new_path);
@@ -2612,12 +2634,12 @@ static struct string_list *get_renames(struct merge_options *opt,
entries);
}
- hashmap_iter_init(&collisions, &iter);
- while ((e = hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(&collisions, &iter, e,
+ ent /* member name */) {
free(e->target_file);
string_list_clear(&e->source_files, 0);
}
- hashmap_free(&collisions, 1);
+ hashmap_free_entries(&collisions, struct collision_entry, ent);
return renames;
}
@@ -2830,13 +2852,13 @@ static void initial_cleanup_rename(struct diff_queue_struct *pairs,
struct hashmap_iter iter;
struct dir_rename_entry *e;
- hashmap_iter_init(dir_renames, &iter);
- while ((e = hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(dir_renames, &iter, e,
+ ent /* member name */) {
free(e->dir);
strbuf_release(&e->new_dir);
/* possible_new_dirs already cleared in get_directory_renames */
}
- hashmap_free(dir_renames, 1);
+ hashmap_free_entries(dir_renames, struct dir_rename_entry, ent);
free(dir_renames);
free(pairs->queue);
@@ -2863,8 +2885,9 @@ static int detect_and_process_renames(struct merge_options *opt,
head_pairs = get_diffpairs(opt, common, head);
merge_pairs = get_diffpairs(opt, common, merge);
- if ((opt->detect_directory_renames == 2) ||
- (opt->detect_directory_renames == 1 && !opt->call_depth)) {
+ if ((opt->detect_directory_renames == MERGE_DIRECTORY_RENAMES_TRUE) ||
+ (opt->detect_directory_renames == MERGE_DIRECTORY_RENAMES_CONFLICT &&
+ !opt->priv->call_depth)) {
dir_re_head = get_directory_renames(head_pairs);
dir_re_merge = get_directory_renames(merge_pairs);
@@ -3021,13 +3044,13 @@ static int handle_content_merge(struct merge_file_info *mfi,
reason = _("add/add");
assert(o->path && a->path && b->path);
- if (ci && dir_in_way(opt->repo->index, path, !opt->call_depth,
+ if (ci && dir_in_way(opt->repo->index, path, !opt->priv->call_depth,
S_ISGITLINK(ci->ren1->pair->two->mode)))
df_conflict_remains = 1;
if (merge_mode_and_contents(opt, o, a, b, path,
opt->branch1, opt->branch2,
- opt->call_depth * 2, mfi))
+ opt->priv->call_depth * 2, mfi))
return -1;
/*
@@ -3043,7 +3066,7 @@ static int handle_content_merge(struct merge_file_info *mfi,
output(opt, 3, _("Skipped %s (merged same as existing)"), path);
if (add_cacheinfo(opt, &mfi->blob, path,
- 0, (!opt->call_depth && !is_dirty), 0))
+ 0, (!opt->priv->call_depth && !is_dirty), 0))
return -1;
/*
* However, add_cacheinfo() will delete the old cache entry
@@ -3051,8 +3074,8 @@ static int handle_content_merge(struct merge_file_info *mfi,
* flag to avoid making the file appear as if it were
* deleted by the user.
*/
- pos = index_name_pos(&opt->orig_index, path, strlen(path));
- ce = opt->orig_index.cache[pos];
+ pos = index_name_pos(&opt->priv->orig_index, path, strlen(path));
+ ce = opt->priv->orig_index.cache[pos];
if (ce_skip_worktree(ce)) {
pos = index_name_pos(opt->repo->index, path, strlen(path));
ce = opt->repo->index->cache[pos];
@@ -3073,7 +3096,7 @@ static int handle_content_merge(struct merge_file_info *mfi,
if (df_conflict_remains || is_dirty) {
char *new_path;
- if (opt->call_depth) {
+ if (opt->priv->call_depth) {
remove_file_from_index(opt->repo->index, path);
} else {
if (!mfi->clean) {
@@ -3122,7 +3145,8 @@ static int handle_rename_normal(struct merge_options *opt,
clean = handle_content_merge(&mfi, opt, path, was_dirty(opt, path),
o, a, b, ci);
- if (clean && opt->detect_directory_renames == 1 &&
+ if (clean &&
+ opt->detect_directory_renames == MERGE_DIRECTORY_RENAMES_CONFLICT &&
ren->dir_rename_original_dest) {
if (update_stages(opt, path,
NULL,
@@ -3167,12 +3191,12 @@ static int warn_about_dir_renamed_entries(struct merge_options *opt,
return clean;
/* Sanity checks */
- assert(opt->detect_directory_renames > 0);
+ assert(opt->detect_directory_renames > MERGE_DIRECTORY_RENAMES_NONE);
assert(ren->dir_rename_original_type == 'A' ||
ren->dir_rename_original_type == 'R');
/* Check whether to treat directory renames as a conflict */
- clean = (opt->detect_directory_renames == 2);
+ clean = (opt->detect_directory_renames == MERGE_DIRECTORY_RENAMES_TRUE);
is_add = (ren->dir_rename_original_type == 'A');
if (ren->dir_rename_original_type == 'A' && clean) {
@@ -3331,7 +3355,7 @@ static int process_entry(struct merge_options *opt,
conf = _("directory/file");
}
if (dir_in_way(opt->repo->index, path,
- !opt->call_depth && !S_ISGITLINK(a->mode),
+ !opt->priv->call_depth && !S_ISGITLINK(a->mode),
0)) {
char *new_path = unique_path(opt, path, add_branch);
clean_merge = 0;
@@ -3340,7 +3364,7 @@ static int process_entry(struct merge_options *opt,
conf, path, other_branch, path, new_path);
if (update_file(opt, 0, contents, new_path))
clean_merge = -1;
- else if (opt->call_depth)
+ else if (opt->priv->call_depth)
remove_file_from_index(opt->repo->index, path);
free(new_path);
} else {
@@ -3380,37 +3404,32 @@ static int process_entry(struct merge_options *opt,
return clean_merge;
}
-int merge_trees(struct merge_options *opt,
- struct tree *head,
- struct tree *merge,
- struct tree *common,
- struct tree **result)
+static int merge_trees_internal(struct merge_options *opt,
+ struct tree *head,
+ struct tree *merge,
+ struct tree *merge_base,
+ struct tree **result)
{
struct index_state *istate = opt->repo->index;
int code, clean;
- struct strbuf sb = STRBUF_INIT;
-
- if (!opt->call_depth && repo_index_has_changes(opt->repo, head, &sb)) {
- err(opt, _("Your local changes to the following files would be overwritten by merge:\n %s"),
- sb.buf);
- return -1;
- }
if (opt->subtree_shift) {
- merge = shift_tree_object(opt->repo, head, merge, opt->subtree_shift);
- common = shift_tree_object(opt->repo, head, common, opt->subtree_shift);
+ merge = shift_tree_object(opt->repo, head, merge,
+ opt->subtree_shift);
+ merge_base = shift_tree_object(opt->repo, head, merge_base,
+ opt->subtree_shift);
}
- if (oid_eq(&common->object.oid, &merge->object.oid)) {
+ if (oid_eq(&merge_base->object.oid, &merge->object.oid)) {
output(opt, 0, _("Already up to date!"));
*result = head;
return 1;
}
- code = unpack_trees_start(opt, common, head, merge);
+ code = unpack_trees_start(opt, merge_base, head, merge);
if (code != 0) {
- if (show(opt, 4) || opt->call_depth)
+ if (show(opt, 4) || opt->priv->call_depth)
err(opt, _("merging of trees %s and %s failed"),
oid_to_hex(&head->object.oid),
oid_to_hex(&merge->object.oid));
@@ -3429,12 +3448,13 @@ int merge_trees(struct merge_options *opt,
* opposed to decaring a local hashmap is for convenience
* so that we don't have to pass it to around.
*/
- hashmap_init(&opt->current_file_dir_set, path_hashmap_cmp, NULL, 512);
+ hashmap_init(&opt->priv->current_file_dir_set, path_hashmap_cmp,
+ NULL, 512);
get_files_dirs(opt, head);
get_files_dirs(opt, merge);
entries = get_unmerged(opt->repo->index);
- clean = detect_and_process_renames(opt, common, head, merge,
+ clean = detect_and_process_renames(opt, merge_base, head, merge,
entries, &re_info);
record_df_conflict_files(opt, entries);
if (clean < 0)
@@ -3465,7 +3485,8 @@ int merge_trees(struct merge_options *opt,
string_list_clear(entries, 1);
free(entries);
- hashmap_free(&opt->current_file_dir_set, 1);
+ hashmap_free_entries(&opt->priv->current_file_dir_set,
+ struct path_hashmap_entry, e);
if (clean < 0) {
unpack_trees_finish(opt);
@@ -3477,7 +3498,8 @@ int merge_trees(struct merge_options *opt,
unpack_trees_finish(opt);
- if (opt->call_depth && !(*result = write_tree_from_memory(opt)))
+ if (opt->priv->call_depth &&
+ !(*result = write_in_core_index_as_tree(opt->repo)))
return -1;
return clean;
@@ -3498,16 +3520,18 @@ static struct commit_list *reverse_commit_list(struct commit_list *list)
* Merge the commits h1 and h2, return the resulting virtual
* commit object and a flag indicating the cleanness of the merge.
*/
-int merge_recursive(struct merge_options *opt,
- struct commit *h1,
- struct commit *h2,
- struct commit_list *ca,
- struct commit **result)
+static int merge_recursive_internal(struct merge_options *opt,
+ struct commit *h1,
+ struct commit *h2,
+ struct commit_list *merge_bases,
+ struct commit **result)
{
struct commit_list *iter;
- struct commit *merged_common_ancestors;
- struct tree *mrtree;
+ struct commit *merged_merge_bases;
+ struct tree *result_tree;
int clean;
+ const char *ancestor_name;
+ struct strbuf merge_base_abbrev = STRBUF_INIT;
if (show(opt, 4)) {
output(opt, 4, _("Merging:"));
@@ -3515,32 +3539,43 @@ int merge_recursive(struct merge_options *opt,
output_commit_title(opt, h2);
}
- if (!ca) {
- ca = get_merge_bases(h1, h2);
- ca = reverse_commit_list(ca);
+ if (!merge_bases) {
+ merge_bases = get_merge_bases(h1, h2);
+ merge_bases = reverse_commit_list(merge_bases);
}
if (show(opt, 5)) {
- unsigned cnt = commit_list_count(ca);
+ unsigned cnt = commit_list_count(merge_bases);
output(opt, 5, Q_("found %u common ancestor:",
"found %u common ancestors:", cnt), cnt);
- for (iter = ca; iter; iter = iter->next)
+ for (iter = merge_bases; iter; iter = iter->next)
output_commit_title(opt, iter->item);
}
- merged_common_ancestors = pop_commit(&ca);
- if (merged_common_ancestors == NULL) {
+ merged_merge_bases = pop_commit(&merge_bases);
+ if (merged_merge_bases == NULL) {
/* if there is no common ancestor, use an empty tree */
struct tree *tree;
tree = lookup_tree(opt->repo, opt->repo->hash_algo->empty_tree);
- merged_common_ancestors = make_virtual_commit(opt->repo, tree, "ancestor");
+ merged_merge_bases = make_virtual_commit(opt->repo, tree,
+ "ancestor");
+ ancestor_name = "empty tree";
+ } else if (opt->ancestor && !opt->priv->call_depth) {
+ ancestor_name = opt->ancestor;
+ } else if (merge_bases) {
+ ancestor_name = "merged common ancestors";
+ } else {
+ strbuf_add_unique_abbrev(&merge_base_abbrev,
+ &merged_merge_bases->object.oid,
+ DEFAULT_ABBREV);
+ ancestor_name = merge_base_abbrev.buf;
}
- for (iter = ca; iter; iter = iter->next) {
+ for (iter = merge_bases; iter; iter = iter->next) {
const char *saved_b1, *saved_b2;
- opt->call_depth++;
+ opt->priv->call_depth++;
/*
* When the merge fails, the result contains files
* with conflict markers. The cleanness flag is
@@ -3554,45 +3589,134 @@ int merge_recursive(struct merge_options *opt,
saved_b2 = opt->branch2;
opt->branch1 = "Temporary merge branch 1";
opt->branch2 = "Temporary merge branch 2";
- if (merge_recursive(opt, merged_common_ancestors, iter->item,
- NULL, &merged_common_ancestors) < 0)
+ if (merge_recursive_internal(opt, merged_merge_bases, iter->item,
+ NULL, &merged_merge_bases) < 0)
return -1;
opt->branch1 = saved_b1;
opt->branch2 = saved_b2;
- opt->call_depth--;
+ opt->priv->call_depth--;
- if (!merged_common_ancestors)
+ if (!merged_merge_bases)
return err(opt, _("merge returned no commit"));
}
discard_index(opt->repo->index);
- if (!opt->call_depth)
+ if (!opt->priv->call_depth)
repo_read_index(opt->repo);
- opt->ancestor = "merged common ancestors";
- clean = merge_trees(opt, get_commit_tree(h1), get_commit_tree(h2),
- get_commit_tree(merged_common_ancestors),
- &mrtree);
+ opt->ancestor = ancestor_name;
+ clean = merge_trees_internal(opt,
+ repo_get_commit_tree(opt->repo, h1),
+ repo_get_commit_tree(opt->repo, h2),
+ repo_get_commit_tree(opt->repo,
+ merged_merge_bases),
+ &result_tree);
+ strbuf_release(&merge_base_abbrev);
+ opt->ancestor = NULL; /* avoid accidental re-use of opt->ancestor */
if (clean < 0) {
flush_output(opt);
return clean;
}
- if (opt->call_depth) {
- *result = make_virtual_commit(opt->repo, mrtree, "merged tree");
+ if (opt->priv->call_depth) {
+ *result = make_virtual_commit(opt->repo, result_tree,
+ "merged tree");
commit_list_insert(h1, &(*result)->parents);
commit_list_insert(h2, &(*result)->parents->next);
}
+ return clean;
+}
+
+static int merge_start(struct merge_options *opt, struct tree *head)
+{
+ struct strbuf sb = STRBUF_INIT;
+
+ /* Sanity checks on opt */
+ assert(opt->repo);
+
+ assert(opt->branch1 && opt->branch2);
+
+ assert(opt->detect_renames >= -1 &&
+ opt->detect_renames <= DIFF_DETECT_COPY);
+ assert(opt->detect_directory_renames >= MERGE_DIRECTORY_RENAMES_NONE &&
+ opt->detect_directory_renames <= MERGE_DIRECTORY_RENAMES_TRUE);
+ assert(opt->rename_limit >= -1);
+ assert(opt->rename_score >= 0 && opt->rename_score <= MAX_SCORE);
+ assert(opt->show_rename_progress >= 0 && opt->show_rename_progress <= 1);
+
+ assert(opt->xdl_opts >= 0);
+ assert(opt->recursive_variant >= MERGE_VARIANT_NORMAL &&
+ opt->recursive_variant <= MERGE_VARIANT_THEIRS);
+
+ assert(opt->verbosity >= 0 && opt->verbosity <= 5);
+ assert(opt->buffer_output <= 2);
+ assert(opt->obuf.len == 0);
+
+ assert(opt->priv == NULL);
+
+ /* Sanity check on repo state; index must match head */
+ if (repo_index_has_changes(opt->repo, head, &sb)) {
+ err(opt, _("Your local changes to the following files would be overwritten by merge:\n %s"),
+ sb.buf);
+ strbuf_release(&sb);
+ return -1;
+ }
+
+ opt->priv = xcalloc(1, sizeof(*opt->priv));
+ string_list_init(&opt->priv->df_conflict_file_set, 1);
+ return 0;
+}
+
+static void merge_finalize(struct merge_options *opt)
+{
flush_output(opt);
- if (!opt->call_depth && opt->buffer_output < 2)
+ if (!opt->priv->call_depth && opt->buffer_output < 2)
strbuf_release(&opt->obuf);
if (show(opt, 2))
diff_warn_rename_limit("merge.renamelimit",
- opt->needed_rename_limit, 0);
+ opt->priv->needed_rename_limit, 0);
+ FREE_AND_NULL(opt->priv);
+}
+
+int merge_trees(struct merge_options *opt,
+ struct tree *head,
+ struct tree *merge,
+ struct tree *merge_base)
+{
+ int clean;
+ struct tree *ignored;
+
+ assert(opt->ancestor != NULL);
+
+ if (merge_start(opt, head))
+ return -1;
+ clean = merge_trees_internal(opt, head, merge, merge_base, &ignored);
+ merge_finalize(opt);
+
return clean;
}
-static struct commit *get_ref(struct repository *repo, const struct object_id *oid,
+int merge_recursive(struct merge_options *opt,
+ struct commit *h1,
+ struct commit *h2,
+ struct commit_list *merge_bases,
+ struct commit **result)
+{
+ int clean;
+
+ assert(opt->ancestor == NULL ||
+ !strcmp(opt->ancestor, "constructed merge base"));
+
+ if (merge_start(opt, repo_get_commit_tree(opt->repo, h1)))
+ return -1;
+ clean = merge_recursive_internal(opt, h1, h2, merge_bases, result);
+ merge_finalize(opt);
+
+ return clean;
+}
+
+static struct commit *get_ref(struct repository *repo,
+ const struct object_id *oid,
const char *name)
{
struct object *object;
@@ -3613,8 +3737,8 @@ static struct commit *get_ref(struct repository *repo, const struct object_id *o
int merge_recursive_generic(struct merge_options *opt,
const struct object_id *head,
const struct object_id *merge,
- int num_base_list,
- const struct object_id **base_list,
+ int num_merge_bases,
+ const struct object_id **merge_bases,
struct commit **result)
{
int clean;
@@ -3623,15 +3747,18 @@ int merge_recursive_generic(struct merge_options *opt,
struct commit *next_commit = get_ref(opt->repo, merge, opt->branch2);
struct commit_list *ca = NULL;
- if (base_list) {
+ if (merge_bases) {
int i;
- for (i = 0; i < num_base_list; ++i) {
+ for (i = 0; i < num_merge_bases; ++i) {
struct commit *base;
- if (!(base = get_ref(opt->repo, base_list[i], oid_to_hex(base_list[i]))))
+ if (!(base = get_ref(opt->repo, merge_bases[i],
+ oid_to_hex(merge_bases[i]))))
return err(opt, _("Could not parse object '%s'"),
- oid_to_hex(base_list[i]));
+ oid_to_hex(merge_bases[i]));
commit_list_insert(base, &ca);
}
+ if (num_merge_bases == 1)
+ opt->ancestor = "constructed merge base";
}
repo_hold_locked_index(opt->repo, &lock, LOCK_DIE_ON_ERROR);
@@ -3653,22 +3780,25 @@ static void merge_recursive_config(struct merge_options *opt)
{
char *value = NULL;
git_config_get_int("merge.verbosity", &opt->verbosity);
- git_config_get_int("diff.renamelimit", &opt->diff_rename_limit);
- git_config_get_int("merge.renamelimit", &opt->merge_rename_limit);
+ git_config_get_int("diff.renamelimit", &opt->rename_limit);
+ git_config_get_int("merge.renamelimit", &opt->rename_limit);
if (!git_config_get_string("diff.renames", &value)) {
- opt->diff_detect_rename = git_config_rename("diff.renames", value);
+ opt->detect_renames = git_config_rename("diff.renames", value);
free(value);
}
if (!git_config_get_string("merge.renames", &value)) {
- opt->merge_detect_rename = git_config_rename("merge.renames", value);
+ opt->detect_renames = git_config_rename("merge.renames", value);
free(value);
}
if (!git_config_get_string("merge.directoryrenames", &value)) {
int boolval = git_parse_maybe_bool(value);
if (0 <= boolval) {
- opt->detect_directory_renames = boolval ? 2 : 0;
+ opt->detect_directory_renames = boolval ?
+ MERGE_DIRECTORY_RENAMES_TRUE :
+ MERGE_DIRECTORY_RENAMES_NONE;
} else if (!strcasecmp(value, "conflict")) {
- opt->detect_directory_renames = 1;
+ opt->detect_directory_renames =
+ MERGE_DIRECTORY_RENAMES_CONFLICT;
} /* avoid erroring on values from future versions of git */
free(value);
}
@@ -3680,23 +3810,25 @@ void init_merge_options(struct merge_options *opt,
{
const char *merge_verbosity;
memset(opt, 0, sizeof(struct merge_options));
+
opt->repo = repo;
+
+ opt->detect_renames = -1;
+ opt->detect_directory_renames = MERGE_DIRECTORY_RENAMES_CONFLICT;
+ opt->rename_limit = -1;
+
opt->verbosity = 2;
opt->buffer_output = 1;
- opt->diff_rename_limit = -1;
- opt->merge_rename_limit = -1;
+ strbuf_init(&opt->obuf, 0);
+
opt->renormalize = 0;
- opt->diff_detect_rename = -1;
- opt->merge_detect_rename = -1;
- opt->detect_directory_renames = 1;
+
merge_recursive_config(opt);
merge_verbosity = getenv("GIT_MERGE_VERBOSITY");
if (merge_verbosity)
opt->verbosity = strtol(merge_verbosity, NULL, 10);
if (opt->verbosity >= 5)
opt->buffer_output = 0;
- strbuf_init(&opt->obuf, 0);
- string_list_init(&opt->df_conflict_file_set, 1);
}
int parse_merge_opt(struct merge_options *opt, const char *s)
@@ -3706,9 +3838,9 @@ int parse_merge_opt(struct merge_options *opt, const char *s)
if (!s || !*s)
return -1;
if (!strcmp(s, "ours"))
- opt->recursive_variant = MERGE_RECURSIVE_OURS;
+ opt->recursive_variant = MERGE_VARIANT_OURS;
else if (!strcmp(s, "theirs"))
- opt->recursive_variant = MERGE_RECURSIVE_THEIRS;
+ opt->recursive_variant = MERGE_VARIANT_THEIRS;
else if (!strcmp(s, "subtree"))
opt->subtree_shift = "";
else if (skip_prefix(s, "subtree=", &arg))
@@ -3739,16 +3871,16 @@ int parse_merge_opt(struct merge_options *opt, const char *s)
else if (!strcmp(s, "no-renormalize"))
opt->renormalize = 0;
else if (!strcmp(s, "no-renames"))
- opt->merge_detect_rename = 0;
+ opt->detect_renames = 0;
else if (!strcmp(s, "find-renames")) {
- opt->merge_detect_rename = 1;
+ opt->detect_renames = 1;
opt->rename_score = 0;
}
else if (skip_prefix(s, "find-renames=", &arg) ||
skip_prefix(s, "rename-threshold=", &arg)) {
if ((opt->rename_score = parse_rename_score(&arg)) == -1 || *arg != 0)
return -1;
- opt->merge_detect_rename = 1;
+ opt->detect_renames = 1;
}
/*
* Please update $__git_merge_strategy_options in
diff --git a/merge-recursive.h b/merge-recursive.h
index c2b7bb65c6..978847e672 100644
--- a/merge-recursive.h
+++ b/merge-recursive.h
@@ -1,104 +1,124 @@
#ifndef MERGE_RECURSIVE_H
#define MERGE_RECURSIVE_H
-#include "string-list.h"
-#include "unpack-trees.h"
+#include "strbuf.h"
struct commit;
-
+struct commit_list;
+struct object_id;
struct repository;
+struct tree;
+struct merge_options_internal;
struct merge_options {
+ struct repository *repo;
+
+ /* ref names used in console messages and conflict markers */
const char *ancestor;
const char *branch1;
const char *branch2;
+
+ /* rename related options */
+ int detect_renames;
enum {
- MERGE_RECURSIVE_NORMAL = 0,
- MERGE_RECURSIVE_OURS,
- MERGE_RECURSIVE_THEIRS
+ MERGE_DIRECTORY_RENAMES_NONE = 0,
+ MERGE_DIRECTORY_RENAMES_CONFLICT = 1,
+ MERGE_DIRECTORY_RENAMES_TRUE = 2
+ } detect_directory_renames;
+ int rename_limit;
+ int rename_score;
+ int show_rename_progress;
+
+ /* xdiff-related options (patience, ignore whitespace, ours/theirs) */
+ long xdl_opts;
+ enum {
+ MERGE_VARIANT_NORMAL = 0,
+ MERGE_VARIANT_OURS,
+ MERGE_VARIANT_THEIRS
} recursive_variant;
- const char *subtree_shift;
+
+ /* console output related options */
+ int verbosity;
unsigned buffer_output; /* 1: output at end, 2: keep buffered */
+ struct strbuf obuf; /* output buffer; if buffer_output == 2, caller
+ * must handle and call strbuf_release */
+
+ /* miscellaneous control options */
+ const char *subtree_shift;
unsigned renormalize : 1;
- long xdl_opts;
- int verbosity;
- int detect_directory_renames;
- int diff_detect_rename;
- int merge_detect_rename;
- int diff_rename_limit;
- int merge_rename_limit;
- int rename_score;
- int needed_rename_limit;
- int show_rename_progress;
- int call_depth;
- struct strbuf obuf;
- struct hashmap current_file_dir_set;
- struct string_list df_conflict_file_set;
- struct unpack_trees_options unpack_opts;
- struct index_state orig_index;
- struct repository *repo;
+
+ /* internal fields used by the implementation */
+ struct merge_options_internal *priv;
};
+void init_merge_options(struct merge_options *opt, struct repository *repo);
+
+/* parse the option in s and update the relevant field of opt */
+int parse_merge_opt(struct merge_options *opt, const char *s);
+
/*
- * For dir_rename_entry, directory names are stored as a full path from the
- * toplevel of the repository and do not include a trailing '/'. Also:
- *
- * dir: original name of directory being renamed
- * non_unique_new_dir: if true, could not determine new_dir
- * new_dir: final name of directory being renamed
- * possible_new_dirs: temporary used to help determine new_dir; see comments
- * in get_directory_renames() for details
+ * RETURN VALUES: All the merge_* functions below return a value as follows:
+ * > 0 Merge was clean
+ * = 0 Merge had conflicts
+ * < 0 Merge hit an unexpected and unrecoverable problem (e.g. disk
+ * full) and aborted merge part-way through.
*/
-struct dir_rename_entry {
- struct hashmap_entry ent; /* must be the first member! */
- char *dir;
- unsigned non_unique_new_dir:1;
- struct strbuf new_dir;
- struct string_list possible_new_dirs;
-};
-
-struct collision_entry {
- struct hashmap_entry ent; /* must be the first member! */
- char *target_file;
- struct string_list source_files;
- unsigned reported_already:1;
-};
-static inline int merge_detect_rename(struct merge_options *o)
-{
- return o->merge_detect_rename >= 0 ? o->merge_detect_rename :
- o->diff_detect_rename >= 0 ? o->diff_detect_rename : 1;
-}
+/*
+ * rename-detecting three-way merge, no recursion.
+ *
+ * Outputs:
+ * - See RETURN VALUES above
+ * - No commit is created
+ * - opt->repo->index has the new index
+ * - $GIT_INDEX_FILE is not updated
+ * - The working tree is updated with results of the merge
+ */
+int merge_trees(struct merge_options *opt,
+ struct tree *head,
+ struct tree *merge,
+ struct tree *merge_base);
-/* merge_trees() but with recursive ancestor consolidation */
-int merge_recursive(struct merge_options *o,
+/*
+ * merge_recursive is like merge_trees() but with recursive ancestor
+ * consolidation and, if the commit is clean, creation of a commit.
+ *
+ * NOTE: empirically, about a decade ago it was determined that with more
+ * than two merge bases, optimal behavior was found when the
+ * merge_bases were passed in the order of oldest commit to newest
+ * commit. Also, merge_bases will be consumed (emptied) so make a
+ * copy if you need it.
+ *
+ * Outputs:
+ * - See RETURN VALUES above
+ * - If merge is clean, a commit is created and its address written to *result
+ * - opt->repo->index has the new index
+ * - $GIT_INDEX_FILE is not updated
+ * - The working tree is updated with results of the merge
+ */
+int merge_recursive(struct merge_options *opt,
struct commit *h1,
struct commit *h2,
- struct commit_list *ancestors,
+ struct commit_list *merge_bases,
struct commit **result);
-/* rename-detecting three-way merge, no recursion */
-int merge_trees(struct merge_options *o,
- struct tree *head,
- struct tree *merge,
- struct tree *common,
- struct tree **result);
-
/*
- * "git-merge-recursive" can be fed trees; wrap them into
- * virtual commits and call merge_recursive() proper.
+ * merge_recursive_generic can operate on trees instead of commits, by
+ * wrapping the trees into virtual commits, and calling merge_recursive().
+ * It also writes out the in-memory index to disk if the merge is successful.
+ *
+ * Outputs:
+ * - See RETURN VALUES above
+ * - If merge is clean, a commit is created and its address written to *result
+ * - opt->repo->index has the new index
+ * - $GIT_INDEX_FILE is updated
+ * - The working tree is updated with results of the merge
*/
-int merge_recursive_generic(struct merge_options *o,
+int merge_recursive_generic(struct merge_options *opt,
const struct object_id *head,
const struct object_id *merge,
- int num_ca,
- const struct object_id **ca,
+ int num_merge_bases,
+ const struct object_id **merge_bases,
struct commit **result);
-void init_merge_options(struct merge_options *o,
- struct repository *repo);
-struct tree *write_tree_from_memory(struct merge_options *o);
-
-int parse_merge_opt(struct merge_options *out, const char *s);
-
#endif
diff --git a/midx.c b/midx.c
index d649644420..f29afc0d2d 100644
--- a/midx.c
+++ b/midx.c
@@ -19,8 +19,7 @@
#define MIDX_BYTE_NUM_PACKS 8
#define MIDX_HASH_VERSION 1
#define MIDX_HEADER_SIZE 12
-#define MIDX_HASH_LEN 20
-#define MIDX_MIN_SIZE (MIDX_HEADER_SIZE + MIDX_HASH_LEN)
+#define MIDX_MIN_SIZE (MIDX_HEADER_SIZE + the_hash_algo->rawsz)
#define MIDX_MAX_CHUNKS 5
#define MIDX_CHUNK_ALIGNMENT 4
@@ -93,7 +92,7 @@ struct multi_pack_index *load_multi_pack_index(const char *object_dir, int local
hash_version = m->data[MIDX_BYTE_HASH_VERSION];
if (hash_version != MIDX_HASH_VERSION)
die(_("hash version %u does not match"), hash_version);
- m->hash_len = MIDX_HASH_LEN;
+ m->hash_len = the_hash_algo->rawsz;
m->num_chunks = m->data[MIDX_BYTE_NUM_CHUNKS];
@@ -234,7 +233,7 @@ int prepare_midx_pack(struct repository *r, struct multi_pack_index *m, uint32_t
int bsearch_midx(const struct object_id *oid, struct multi_pack_index *m, uint32_t *result)
{
return bsearch_hash(oid->hash, m->chunk_oid_fanout, m->chunk_oid_lookup,
- MIDX_HASH_LEN, result);
+ the_hash_algo->rawsz, result);
}
struct object_id *nth_midxed_object_oid(struct object_id *oid,
@@ -928,7 +927,7 @@ static int write_midx_internal(const char *object_dir, struct multi_pack_index *
cur_chunk++;
chunk_ids[cur_chunk] = MIDX_CHUNKID_OBJECTOFFSETS;
- chunk_offsets[cur_chunk] = chunk_offsets[cur_chunk - 1] + nr_entries * MIDX_HASH_LEN;
+ chunk_offsets[cur_chunk] = chunk_offsets[cur_chunk - 1] + nr_entries * the_hash_algo->rawsz;
cur_chunk++;
chunk_offsets[cur_chunk] = chunk_offsets[cur_chunk - 1] + nr_entries * MIDX_CHUNK_OFFSET_WIDTH;
@@ -976,7 +975,7 @@ static int write_midx_internal(const char *object_dir, struct multi_pack_index *
break;
case MIDX_CHUNKID_OIDLOOKUP:
- written += write_midx_oid_lookup(f, MIDX_HASH_LEN, entries, nr_entries);
+ written += write_midx_oid_lookup(f, the_hash_algo->rawsz, entries, nr_entries);
break;
case MIDX_CHUNKID_OBJECTOFFSETS:
diff --git a/name-hash.c b/name-hash.c
index 695908609f..ceb1d7bd6f 100644
--- a/name-hash.c
+++ b/name-hash.c
@@ -17,14 +17,16 @@ struct dir_entry {
};
static int dir_entry_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *keydata)
{
- const struct dir_entry *e1 = entry;
- const struct dir_entry *e2 = entry_or_key;
+ const struct dir_entry *e1, *e2;
const char *name = keydata;
+ e1 = container_of(eptr, const struct dir_entry, ent);
+ e2 = container_of(entry_or_key, const struct dir_entry, ent);
+
return e1->namelen != e2->namelen || strncasecmp(e1->name,
name ? name : e2->name, e1->namelen);
}
@@ -33,9 +35,9 @@ static struct dir_entry *find_dir_entry__hash(struct index_state *istate,
const char *name, unsigned int namelen, unsigned int hash)
{
struct dir_entry key;
- hashmap_entry_init(&key, hash);
+ hashmap_entry_init(&key.ent, hash);
key.namelen = namelen;
- return hashmap_get(&istate->dir_hash, &key, name);
+ return hashmap_get_entry(&istate->dir_hash, &key, ent, name);
}
static struct dir_entry *find_dir_entry(struct index_state *istate,
@@ -68,9 +70,9 @@ static struct dir_entry *hash_dir_entry(struct index_state *istate,
if (!dir) {
/* not found, create it and add to hash table */
FLEX_ALLOC_MEM(dir, name, ce->name, namelen);
- hashmap_entry_init(dir, memihash(ce->name, namelen));
+ hashmap_entry_init(&dir->ent, memihash(ce->name, namelen));
dir->namelen = namelen;
- hashmap_add(&istate->dir_hash, dir);
+ hashmap_add(&istate->dir_hash, &dir->ent);
/* recursively add missing parent directories */
dir->parent = hash_dir_entry(istate, ce, namelen);
@@ -95,7 +97,7 @@ static void remove_dir_entry(struct index_state *istate, struct cache_entry *ce)
struct dir_entry *dir = hash_dir_entry(istate, ce, ce_namelen(ce));
while (dir && !(--dir->nr)) {
struct dir_entry *parent = dir->parent;
- hashmap_remove(&istate->dir_hash, dir, NULL);
+ hashmap_remove(&istate->dir_hash, &dir->ent, NULL);
free(dir);
dir = parent;
}
@@ -106,20 +108,23 @@ static void hash_index_entry(struct index_state *istate, struct cache_entry *ce)
if (ce->ce_flags & CE_HASHED)
return;
ce->ce_flags |= CE_HASHED;
- hashmap_entry_init(ce, memihash(ce->name, ce_namelen(ce)));
- hashmap_add(&istate->name_hash, ce);
+ hashmap_entry_init(&ce->ent, memihash(ce->name, ce_namelen(ce)));
+ hashmap_add(&istate->name_hash, &ce->ent);
if (ignore_case)
add_dir_entry(istate, ce);
}
static int cache_entry_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *remove)
{
- const struct cache_entry *ce1 = entry;
- const struct cache_entry *ce2 = entry_or_key;
+ const struct cache_entry *ce1, *ce2;
+
+ ce1 = container_of(eptr, const struct cache_entry, ent);
+ ce2 = container_of(entry_or_key, const struct cache_entry, ent);
+
/*
* For remove_name_hash, find the exact entry (pointer equality); for
* index_file_exists, find all entries with matching hash code and
@@ -280,10 +285,10 @@ static struct dir_entry *hash_dir_entry_with_parent_and_prefix(
dir = find_dir_entry__hash(istate, prefix->buf, prefix->len, hash);
if (!dir) {
FLEX_ALLOC_MEM(dir, name, prefix->buf, prefix->len);
- hashmap_entry_init(dir, hash);
+ hashmap_entry_init(&dir->ent, hash);
dir->namelen = prefix->len;
dir->parent = parent;
- hashmap_add(&istate->dir_hash, dir);
+ hashmap_add(&istate->dir_hash, &dir->ent);
if (parent) {
unlock_dir_mutex(lock_nr);
@@ -472,8 +477,8 @@ static void *lazy_name_thread_proc(void *_data)
for (k = 0; k < d->istate->cache_nr; k++) {
struct cache_entry *ce_k = d->istate->cache[k];
ce_k->ce_flags |= CE_HASHED;
- hashmap_entry_init(ce_k, d->lazy_entries[k].hash_name);
- hashmap_add(&d->istate->name_hash, ce_k);
+ hashmap_entry_init(&ce_k->ent, d->lazy_entries[k].hash_name);
+ hashmap_add(&d->istate->name_hash, &ce_k->ent);
}
return NULL;
@@ -625,7 +630,7 @@ void remove_name_hash(struct index_state *istate, struct cache_entry *ce)
if (!istate->name_hash_initialized || !(ce->ce_flags & CE_HASHED))
return;
ce->ce_flags &= ~CE_HASHED;
- hashmap_remove(&istate->name_hash, ce, ce);
+ hashmap_remove(&istate->name_hash, &ce->ent, ce);
if (ignore_case)
remove_dir_entry(istate, ce);
@@ -702,15 +707,15 @@ void adjust_dirname_case(struct index_state *istate, char *name)
struct cache_entry *index_file_exists(struct index_state *istate, const char *name, int namelen, int icase)
{
struct cache_entry *ce;
+ unsigned int hash = memihash(name, namelen);
lazy_init_name_hash(istate);
- ce = hashmap_get_from_hash(&istate->name_hash,
- memihash(name, namelen), NULL);
- while (ce) {
+ ce = hashmap_get_entry_from_hash(&istate->name_hash, hash, NULL,
+ struct cache_entry, ent);
+ hashmap_for_each_entry_from(&istate->name_hash, ce, ent) {
if (same_name(ce, name, namelen, icase))
return ce;
- ce = hashmap_get_next(&istate->name_hash, ce);
}
return NULL;
}
@@ -721,6 +726,6 @@ void free_name_hash(struct index_state *istate)
return;
istate->name_hash_initialized = 0;
- hashmap_free(&istate->name_hash, 0);
- hashmap_free(&istate->dir_hash, 1);
+ hashmap_free(&istate->name_hash);
+ hashmap_free_entries(&istate->dir_hash, struct dir_entry, ent);
}
diff --git a/notes.c b/notes.c
index 75c028b300..03e7d0cd2d 100644
--- a/notes.c
+++ b/notes.c
@@ -269,8 +269,10 @@ static int note_tree_insert(struct notes_tree *t, struct int_node *tree,
case PTR_TYPE_NOTE:
if (oideq(&l->key_oid, &entry->key_oid)) {
/* skip concatenation if l == entry */
- if (oideq(&l->val_oid, &entry->val_oid))
+ if (oideq(&l->val_oid, &entry->val_oid)) {
+ free(entry);
return 0;
+ }
ret = combine_notes(&l->val_oid,
&entry->val_oid);
@@ -458,7 +460,7 @@ static void load_subtree(struct notes_tree *t, struct leaf_node *subtree,
die("Failed to load %s %s into notes tree "
"from %s",
type == PTR_TYPE_NOTE ? "note" : "subtree",
- oid_to_hex(&l->key_oid), t->ref);
+ oid_to_hex(&object_oid), t->ref);
continue;
diff --git a/object.c b/object.c
index 07bdd5b26e..3b8b8c55c9 100644
--- a/object.c
+++ b/object.c
@@ -7,7 +7,6 @@
#include "commit.h"
#include "tag.h"
#include "alloc.h"
-#include "object-store.h"
#include "packfile.h"
#include "commit-graph.h"
diff --git a/oidmap.c b/oidmap.c
index 6d6e840d03..423aa014a3 100644
--- a/oidmap.c
+++ b/oidmap.c
@@ -2,14 +2,18 @@
#include "oidmap.h"
static int oidmap_neq(const void *hashmap_cmp_fn_data,
- const void *entry, const void *entry_or_key,
+ const struct hashmap_entry *e1,
+ const struct hashmap_entry *e2,
const void *keydata)
{
- const struct oidmap_entry *entry_ = entry;
+ const struct oidmap_entry *a, *b;
+
+ a = container_of(e1, const struct oidmap_entry, internal_entry);
+ b = container_of(e2, const struct oidmap_entry, internal_entry);
+
if (keydata)
- return !oideq(&entry_->oid, (const struct object_id *) keydata);
- return !oideq(&entry_->oid,
- &((const struct oidmap_entry *) entry_or_key)->oid);
+ return !oideq(&a->oid, (const struct object_id *) keydata);
+ return !oideq(&a->oid, &b->oid);
}
void oidmap_init(struct oidmap *map, size_t initial_size)
@@ -21,7 +25,9 @@ void oidmap_free(struct oidmap *map, int free_entries)
{
if (!map)
return;
- hashmap_free(&map->map, free_entries);
+
+ /* TODO: make oidmap itself not depend on struct layouts */
+ hashmap_free_(&map->map, free_entries ? 0 : -1);
}
void *oidmap_get(const struct oidmap *map, const struct object_id *key)
@@ -51,5 +57,5 @@ void *oidmap_put(struct oidmap *map, void *entry)
oidmap_init(map, 0);
hashmap_entry_init(&to_put->internal_entry, oidhash(&to_put->oid));
- return hashmap_put(&map->map, to_put);
+ return hashmap_put(&map->map, &to_put->internal_entry);
}
diff --git a/oidmap.h b/oidmap.h
index 7a939461ff..c66a83ab1d 100644
--- a/oidmap.h
+++ b/oidmap.h
@@ -78,14 +78,16 @@ static inline void oidmap_iter_init(struct oidmap *map, struct oidmap_iter *iter
static inline void *oidmap_iter_next(struct oidmap_iter *iter)
{
- return hashmap_iter_next(&iter->h_iter);
+ /* TODO: this API could be reworked to do compile-time type checks */
+ return (void *)hashmap_iter_next(&iter->h_iter);
}
static inline void *oidmap_iter_first(struct oidmap *map,
struct oidmap_iter *iter)
{
oidmap_iter_init(map, iter);
- return oidmap_iter_next(iter);
+ /* TODO: this API could be reworked to do compile-time type checks */
+ return (void *)oidmap_iter_next(iter);
}
#endif
diff --git a/pack-bitmap-write.c b/pack-bitmap-write.c
index fa78a460c9..a7a4964b50 100644
--- a/pack-bitmap-write.c
+++ b/pack-bitmap-write.c
@@ -144,7 +144,7 @@ static inline void reset_all_seen(void)
static uint32_t find_object_pos(const struct object_id *oid)
{
- struct object_entry *entry = packlist_find(writer.to_pack, oid, NULL);
+ struct object_entry *entry = packlist_find(writer.to_pack, oid);
if (!entry) {
die("Failed to write bitmap index. Packfile doesn't have full closure "
diff --git a/pack-bitmap.c b/pack-bitmap.c
index ed2befaac6..e07c798879 100644
--- a/pack-bitmap.c
+++ b/pack-bitmap.c
@@ -709,9 +709,7 @@ struct bitmap_index *prepare_bitmap_walk(struct rev_info *revs)
else
object_list_insert(object, &wants);
- if (!tag->tagged)
- die("bad tag");
- object = parse_object_or_die(&tag->tagged->oid, NULL);
+ object = parse_object_or_die(get_tagged_oid(tag), NULL);
}
if (object->flags & UNINTERESTING)
@@ -1063,7 +1061,7 @@ int rebuild_existing_bitmaps(struct bitmap_index *bitmap_git,
entry = &bitmap_git->pack->revindex[i];
nth_packed_object_oid(&oid, bitmap_git->pack, entry->nr);
- oe = packlist_find(mapping, &oid, NULL);
+ oe = packlist_find(mapping, &oid);
if (oe)
reposition[i] = oe_in_pack_pos(mapping, oe) + 1;
diff --git a/pack-bitmap.h b/pack-bitmap.h
index 00de3ec8e4..466c5afa09 100644
--- a/pack-bitmap.h
+++ b/pack-bitmap.h
@@ -9,16 +9,16 @@ struct commit;
struct repository;
struct rev_info;
+static const char BITMAP_IDX_SIGNATURE[] = {'B', 'I', 'T', 'M'};
+
struct bitmap_disk_header {
- char magic[4];
+ char magic[ARRAY_SIZE(BITMAP_IDX_SIGNATURE)];
uint16_t version;
uint16_t options;
uint32_t entry_count;
unsigned char checksum[GIT_MAX_RAWSZ];
};
-static const char BITMAP_IDX_SIGNATURE[] = {'B', 'I', 'T', 'M'};
-
#define NEEDS_BITMAP (1u<<22)
enum pack_bitmap_opts {
diff --git a/pack-objects.c b/pack-objects.c
index 52560293b6..c6250d77f4 100644
--- a/pack-objects.c
+++ b/pack-objects.c
@@ -68,8 +68,7 @@ static void rehash_objects(struct packing_data *pdata)
}
struct object_entry *packlist_find(struct packing_data *pdata,
- const struct object_id *oid,
- uint32_t *index_pos)
+ const struct object_id *oid)
{
uint32_t i;
int found;
@@ -79,9 +78,6 @@ struct object_entry *packlist_find(struct packing_data *pdata,
i = locate_object_entry_hash(pdata, oid, &found);
- if (index_pos)
- *index_pos = i;
-
if (!found)
return NULL;
@@ -153,8 +149,7 @@ void prepare_packing_data(struct repository *r, struct packing_data *pdata)
}
struct object_entry *packlist_alloc(struct packing_data *pdata,
- const unsigned char *sha1,
- uint32_t index_pos)
+ const struct object_id *oid)
{
struct object_entry *new_entry;
@@ -177,12 +172,19 @@ struct object_entry *packlist_alloc(struct packing_data *pdata,
new_entry = pdata->objects + pdata->nr_objects++;
memset(new_entry, 0, sizeof(*new_entry));
- hashcpy(new_entry->idx.oid.hash, sha1);
+ oidcpy(&new_entry->idx.oid, oid);
if (pdata->index_size * 3 <= pdata->nr_objects * 4)
rehash_objects(pdata);
- else
- pdata->index[index_pos] = pdata->nr_objects;
+ else {
+ int found;
+ uint32_t pos = locate_object_entry_hash(pdata,
+ &new_entry->idx.oid,
+ &found);
+ if (found)
+ BUG("duplicate object inserted into hash");
+ pdata->index[pos] = pdata->nr_objects;
+ }
if (pdata->in_pack)
pdata->in_pack[pdata->nr_objects - 1] = NULL;
diff --git a/pack-objects.h b/pack-objects.h
index 857d43850b..6fe6ae5ee8 100644
--- a/pack-objects.h
+++ b/pack-objects.h
@@ -183,12 +183,10 @@ static inline void packing_data_unlock(struct packing_data *pdata)
}
struct object_entry *packlist_alloc(struct packing_data *pdata,
- const unsigned char *sha1,
- uint32_t index_pos);
+ const struct object_id *oid);
struct object_entry *packlist_find(struct packing_data *pdata,
- const struct object_id *oid,
- uint32_t *index_pos);
+ const struct object_id *oid);
static inline uint32_t pack_name_hash(const char *name)
{
diff --git a/pack-write.c b/pack-write.c
index 29d17a9bec..f0017beb9d 100644
--- a/pack-write.c
+++ b/pack-write.c
@@ -349,7 +349,7 @@ void finish_tmp_packfile(struct strbuf *name_buffer,
struct pack_idx_entry **written_list,
uint32_t nr_written,
struct pack_idx_option *pack_idx_opts,
- unsigned char sha1[])
+ unsigned char hash[])
{
const char *idx_tmp_name;
int basename_len = name_buffer->len;
@@ -358,18 +358,18 @@ void finish_tmp_packfile(struct strbuf *name_buffer,
die_errno("unable to make temporary pack file readable");
idx_tmp_name = write_idx_file(NULL, written_list, nr_written,
- pack_idx_opts, sha1);
+ pack_idx_opts, hash);
if (adjust_shared_perm(idx_tmp_name))
die_errno("unable to make temporary index file readable");
- strbuf_addf(name_buffer, "%s.pack", sha1_to_hex(sha1));
+ strbuf_addf(name_buffer, "%s.pack", hash_to_hex(hash));
if (rename(pack_tmp_name, name_buffer->buf))
die_errno("unable to rename temporary pack file");
strbuf_setlen(name_buffer, basename_len);
- strbuf_addf(name_buffer, "%s.idx", sha1_to_hex(sha1));
+ strbuf_addf(name_buffer, "%s.idx", hash_to_hex(hash));
if (rename(idx_tmp_name, name_buffer->buf))
die_errno("unable to rename temporary index file");
diff --git a/packfile.c b/packfile.c
index fc43a6c52c..355066de17 100644
--- a/packfile.c
+++ b/packfile.c
@@ -6,7 +6,6 @@
#include "mergesort.h"
#include "packfile.h"
#include "delta.h"
-#include "list.h"
#include "streaming.h"
#include "sha1-lookup.h"
#include "commit.h"
@@ -17,14 +16,15 @@
#include "object-store.h"
#include "midx.h"
#include "commit-graph.h"
+#include "promisor-remote.h"
char *odb_pack_name(struct strbuf *buf,
- const unsigned char *sha1,
+ const unsigned char *hash,
const char *ext)
{
strbuf_reset(buf);
strbuf_addf(buf, "%s/pack/pack-%s.%s", get_object_directory(),
- sha1_to_hex(sha1), ext);
+ hash_to_hex(hash), ext);
return buf->buf;
}
@@ -287,13 +287,6 @@ static int unuse_one_window(struct packed_git *current)
return 0;
}
-void release_pack_memory(size_t need)
-{
- size_t cur = pack_mapped;
- while (need >= (cur - pack_mapped) && unuse_one_window(NULL))
- ; /* nothing */
-}
-
void close_pack_windows(struct packed_git *p)
{
while (p->windows) {
@@ -710,23 +703,12 @@ void unuse_pack(struct pack_window **w_cursor)
}
}
-static void try_to_free_pack_memory(size_t size)
-{
- release_pack_memory(size);
-}
-
struct packed_git *add_packed_git(const char *path, size_t path_len, int local)
{
- static int have_set_try_to_free_routine;
struct stat st;
size_t alloc;
struct packed_git *p;
- if (!have_set_try_to_free_routine) {
- have_set_try_to_free_routine = 1;
- set_try_to_free_routine(try_to_free_pack_memory);
- }
-
/*
* Make sure a corresponding .pack file exists and that
* the index looks sane.
@@ -1361,7 +1343,7 @@ struct delta_base_cache_key {
};
struct delta_base_cache_entry {
- struct hashmap hash;
+ struct hashmap_entry ent;
struct delta_base_cache_key key;
struct list_head lru;
void *data;
@@ -1381,7 +1363,7 @@ static unsigned int pack_entry_hash(struct packed_git *p, off_t base_offset)
static struct delta_base_cache_entry *
get_delta_base_cache_entry(struct packed_git *p, off_t base_offset)
{
- struct hashmap_entry entry;
+ struct hashmap_entry entry, *e;
struct delta_base_cache_key key;
if (!delta_base_cache.cmpfn)
@@ -1390,7 +1372,8 @@ get_delta_base_cache_entry(struct packed_git *p, off_t base_offset)
hashmap_entry_init(&entry, pack_entry_hash(p, base_offset));
key.p = p;
key.base_offset = base_offset;
- return hashmap_get(&delta_base_cache, &entry, &key);
+ e = hashmap_get(&delta_base_cache, &entry, &key);
+ return e ? container_of(e, struct delta_base_cache_entry, ent) : NULL;
}
static int delta_base_cache_key_eq(const struct delta_base_cache_key *a,
@@ -1400,11 +1383,16 @@ static int delta_base_cache_key_eq(const struct delta_base_cache_key *a,
}
static int delta_base_cache_hash_cmp(const void *unused_cmp_data,
- const void *va, const void *vb,
+ const struct hashmap_entry *va,
+ const struct hashmap_entry *vb,
const void *vkey)
{
- const struct delta_base_cache_entry *a = va, *b = vb;
+ const struct delta_base_cache_entry *a, *b;
const struct delta_base_cache_key *key = vkey;
+
+ a = container_of(va, const struct delta_base_cache_entry, ent);
+ b = container_of(vb, const struct delta_base_cache_entry, ent);
+
if (key)
return !delta_base_cache_key_eq(&a->key, key);
else
@@ -1423,7 +1411,7 @@ static int in_delta_base_cache(struct packed_git *p, off_t base_offset)
*/
static void detach_delta_base_cache_entry(struct delta_base_cache_entry *ent)
{
- hashmap_remove(&delta_base_cache, ent, &ent->key);
+ hashmap_remove(&delta_base_cache, &ent->ent, &ent->key);
list_del(&ent->lru);
delta_base_cached -= ent->size;
free(ent);
@@ -1487,8 +1475,8 @@ static void add_delta_base_cache(struct packed_git *p, off_t base_offset,
if (!delta_base_cache.cmpfn)
hashmap_init(&delta_base_cache, delta_base_cache_hash_cmp, NULL, 0);
- hashmap_entry_init(ent, pack_entry_hash(p, base_offset));
- hashmap_add(&delta_base_cache, ent);
+ hashmap_entry_init(&ent->ent, pack_entry_hash(p, base_offset));
+ hashmap_add(&delta_base_cache, &ent->ent);
}
int packed_object_info(struct repository *r, struct packed_git *p,
@@ -2139,7 +2127,7 @@ static int add_promisor_object(const struct object_id *oid,
oidset_insert(set, &parents->item->object.oid);
} else if (obj->type == OBJ_TAG) {
struct tag *tag = (struct tag *) obj;
- oidset_insert(set, &tag->tagged->oid);
+ oidset_insert(set, get_tagged_oid(tag));
}
return 0;
}
@@ -2150,7 +2138,7 @@ int is_promisor_object(const struct object_id *oid)
static int promisor_objects_prepared;
if (!promisor_objects_prepared) {
- if (repository_format_partial_clone) {
+ if (has_promisor_remote()) {
for_each_packed_object(add_promisor_object,
&promisor_objects,
FOR_EACH_OBJECT_PROMISOR_ONLY);
diff --git a/packfile.h b/packfile.h
index 3e98910bdd..fc7904ec81 100644
--- a/packfile.h
+++ b/packfile.h
@@ -100,7 +100,7 @@ struct packed_git *add_packed_git(const char *path, size_t path_len, int local);
* Does not unlink if 'force_delete' is false and the pack-file is
* marked as ".keep".
*/
-extern void unlink_pack_path(const char *pack_name, int force_delete);
+void unlink_pack_path(const char *pack_name, int force_delete);
/*
* Make sure that a pointer access into an mmap'd index file is within bounds,
diff --git a/parse-options.c b/parse-options.c
index 87b26a1d92..b42f54d48b 100644
--- a/parse-options.c
+++ b/parse-options.c
@@ -780,7 +780,8 @@ int parse_options_step(struct parse_opt_ctx_t *ctx,
continue;
}
- if (!arg[2]) { /* "--" */
+ if (!arg[2] /* "--" */ ||
+ !strcmp(arg + 2, "end-of-options")) {
if (!(ctx->flags & PARSE_OPT_KEEP_DASHDASH)) {
ctx->argc--;
ctx->argv++;
diff --git a/parse-options.h b/parse-options.h
index a4bd40bb6a..38a33a087e 100644
--- a/parse-options.h
+++ b/parse-options.h
@@ -46,6 +46,15 @@ enum parse_opt_option_flags {
PARSE_OPT_COMP_ARG = 1024
};
+enum parse_opt_result {
+ PARSE_OPT_COMPLETE = -3,
+ PARSE_OPT_HELP = -2,
+ PARSE_OPT_ERROR = -1, /* must be the same as error() */
+ PARSE_OPT_DONE = 0, /* fixed so that "return 0" works */
+ PARSE_OPT_NON_OPTION,
+ PARSE_OPT_UNKNOWN
+};
+
struct option;
typedef int parse_opt_cb(const struct option *, const char *arg, int unset);
@@ -241,15 +250,6 @@ const char *optname(const struct option *opt, int flags);
/*----- incremental advanced APIs -----*/
-enum parse_opt_result {
- PARSE_OPT_COMPLETE = -3,
- PARSE_OPT_HELP = -2,
- PARSE_OPT_ERROR = -1, /* must be the same as error() */
- PARSE_OPT_DONE = 0, /* fixed so that "return 0" works */
- PARSE_OPT_NON_OPTION,
- PARSE_OPT_UNKNOWN
-};
-
/*
* It's okay for the caller to consume argv/argc in the usual way.
* Other fields of that structure are private to parse-options and should not
diff --git a/patch-ids.c b/patch-ids.c
index e8c150d0c9..12aa6d494b 100644
--- a/patch-ids.c
+++ b/patch-ids.c
@@ -36,14 +36,16 @@ int commit_patch_id(struct commit *commit, struct diff_options *options,
* any significance; only that it is non-zero matters.
*/
static int patch_id_neq(const void *cmpfn_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
/* NEEDSWORK: const correctness? */
struct diff_options *opt = (void *)cmpfn_data;
- struct patch_id *a = (void *)entry;
- struct patch_id *b = (void *)entry_or_key;
+ struct patch_id *a, *b;
+
+ a = container_of(eptr, struct patch_id, ent);
+ b = container_of(entry_or_key, struct patch_id, ent);
if (is_null_oid(&a->patch_id) &&
commit_patch_id(a->commit, opt, &a->patch_id, 0, 0))
@@ -69,7 +71,7 @@ int init_patch_ids(struct repository *r, struct patch_ids *ids)
int free_patch_ids(struct patch_ids *ids)
{
- hashmap_free(&ids->patches, 1);
+ hashmap_free_entries(&ids->patches, struct patch_id, ent);
return 0;
}
@@ -83,7 +85,7 @@ static int init_patch_id_entry(struct patch_id *patch,
if (commit_patch_id(commit, &ids->diffopts, &header_only_patch_id, 1, 0))
return -1;
- hashmap_entry_init(patch, oidhash(&header_only_patch_id));
+ hashmap_entry_init(&patch->ent, oidhash(&header_only_patch_id));
return 0;
}
@@ -99,7 +101,7 @@ struct patch_id *has_commit_patch_id(struct commit *commit,
if (init_patch_id_entry(&patch, commit, ids))
return NULL;
- return hashmap_get(&ids->patches, &patch, NULL);
+ return hashmap_get_entry(&ids->patches, &patch, ent, NULL);
}
struct patch_id *add_commit_patch_id(struct commit *commit,
@@ -116,6 +118,6 @@ struct patch_id *add_commit_patch_id(struct commit *commit,
return NULL;
}
- hashmap_add(&ids->patches, key);
+ hashmap_add(&ids->patches, &key->ent);
return key;
}
diff --git a/path.c b/path.c
index 25e97b8c3f..e3da1f3c4e 100644
--- a/path.c
+++ b/path.c
@@ -1221,31 +1221,52 @@ static inline int chomp_trailing_dir_sep(const char *path, int len)
}
/*
- * If path ends with suffix (complete path components), returns the
- * part before suffix (sans trailing directory separators).
- * Otherwise returns NULL.
+ * If path ends with suffix (complete path components), returns the offset of
+ * the last character in the path before the suffix (sans trailing directory
+ * separators), and -1 otherwise.
*/
-char *strip_path_suffix(const char *path, const char *suffix)
+static ssize_t stripped_path_suffix_offset(const char *path, const char *suffix)
{
int path_len = strlen(path), suffix_len = strlen(suffix);
while (suffix_len) {
if (!path_len)
- return NULL;
+ return -1;
if (is_dir_sep(path[path_len - 1])) {
if (!is_dir_sep(suffix[suffix_len - 1]))
- return NULL;
+ return -1;
path_len = chomp_trailing_dir_sep(path, path_len);
suffix_len = chomp_trailing_dir_sep(suffix, suffix_len);
}
else if (path[--path_len] != suffix[--suffix_len])
- return NULL;
+ return -1;
}
if (path_len && !is_dir_sep(path[path_len - 1]))
- return NULL;
- return xstrndup(path, chomp_trailing_dir_sep(path, path_len));
+ return -1;
+ return chomp_trailing_dir_sep(path, path_len);
+}
+
+/*
+ * Returns true if the path ends with components, considering only complete path
+ * components, and false otherwise.
+ */
+int ends_with_path_components(const char *path, const char *components)
+{
+ return stripped_path_suffix_offset(path, components) != -1;
+}
+
+/*
+ * If path ends with suffix (complete path components), returns the
+ * part before suffix (sans trailing directory separators).
+ * Otherwise returns NULL.
+ */
+char *strip_path_suffix(const char *path, const char *suffix)
+{
+ ssize_t offset = stripped_path_suffix_offset(path, suffix);
+
+ return offset == -1 ? NULL : xstrndup(path, offset);
}
int daemon_avoid_alias(const char *p)
diff --git a/path.h b/path.h
index 2ba6ca58c8..14d6dcad16 100644
--- a/path.h
+++ b/path.h
@@ -193,4 +193,7 @@ const char *git_path_merge_head(struct repository *r);
const char *git_path_fetch_head(struct repository *r);
const char *git_path_shallow(struct repository *r);
+
+int ends_with_path_components(const char *path, const char *components);
+
#endif /* PATH_H */
diff --git a/perl/Git/SVN.pm b/perl/Git/SVN.pm
index 76b2965905..4b28b87784 100644
--- a/perl/Git/SVN.pm
+++ b/perl/Git/SVN.pm
@@ -1491,6 +1491,10 @@ sub call_authors_prog {
sub check_author {
my ($author) = @_;
+ if (defined $author) {
+ $author =~ s/^\s+//g;
+ $author =~ s/\s+$//g;
+ }
if (!defined $author || length $author == 0) {
$author = '(no author)';
}
diff --git a/pretty.c b/pretty.c
index e4ed14effe..b32f036953 100644
--- a/pretty.c
+++ b/pretty.c
@@ -1239,11 +1239,9 @@ static size_t format_commit_one(struct strbuf *sb, /* in UTF-8 */
strbuf_addstr(sb, get_revision_mark(NULL, commit));
return 1;
case 'd':
- load_ref_decorations(NULL, DECORATE_SHORT_REFS);
format_decorations(sb, commit, c->auto_color);
return 1;
case 'D':
- load_ref_decorations(NULL, DECORATE_SHORT_REFS);
format_decorations_extended(sb, commit, c->auto_color, "", ", ", "");
return 1;
case 'S': /* tag/branch like --source */
diff --git a/progress.c b/progress.c
index 277db8afa2..0063559aab 100644
--- a/progress.c
+++ b/progress.c
@@ -45,6 +45,19 @@ struct progress {
static volatile sig_atomic_t progress_update;
+/*
+ * These are only intended for testing the progress output, i.e. exclusively
+ * for 'test-tool progress'.
+ */
+int progress_testing;
+uint64_t progress_test_ns = 0;
+void progress_test_force_update(void); /* To silence -Wmissing-prototypes */
+void progress_test_force_update(void)
+{
+ progress_update = 1;
+}
+
+
static void progress_interval(int signum)
{
progress_update = 1;
@@ -55,6 +68,9 @@ static void set_progress_signal(void)
struct sigaction sa;
struct itimerval v;
+ if (progress_testing)
+ return;
+
progress_update = 0;
memset(&sa, 0, sizeof(sa));
@@ -72,6 +88,10 @@ static void set_progress_signal(void)
static void clear_progress_signal(void)
{
struct itimerval v = {{0,},};
+
+ if (progress_testing)
+ return;
+
setitimer(ITIMER_REAL, &v, NULL);
signal(SIGALRM, SIG_IGN);
progress_update = 0;
@@ -88,6 +108,7 @@ static void display(struct progress *progress, uint64_t n, const char *done)
const char *tp;
struct strbuf *counters_sb = &progress->counters_sb;
int show_update = 0;
+ int last_count_len = counters_sb->len;
if (progress->delay && (!progress_update || --progress->delay))
return;
@@ -115,21 +136,27 @@ static void display(struct progress *progress, uint64_t n, const char *done)
if (show_update) {
if (is_foreground_fd(fileno(stderr)) || done) {
const char *eol = done ? done : "\r";
+ size_t clear_len = counters_sb->len < last_count_len ?
+ last_count_len - counters_sb->len + 1 :
+ 0;
+ /* The "+ 2" accounts for the ": ". */
+ size_t progress_line_len = progress->title_len +
+ counters_sb->len + 2;
+ int cols = term_columns();
- term_clear_line();
if (progress->split) {
- fprintf(stderr, " %s%s", counters_sb->buf,
- eol);
- } else if (!done &&
- /* The "+ 2" accounts for the ": ". */
- term_columns() < progress->title_len +
- counters_sb->len + 2) {
- fprintf(stderr, "%s:\n %s%s",
- progress->title, counters_sb->buf, eol);
+ fprintf(stderr, " %s%*s", counters_sb->buf,
+ (int) clear_len, eol);
+ } else if (!done && cols < progress_line_len) {
+ clear_len = progress->title_len + 1 < cols ?
+ cols - progress->title_len - 1 : 0;
+ fprintf(stderr, "%s:%*s\n %s%s",
+ progress->title, (int) clear_len, "",
+ counters_sb->buf, eol);
progress->split = 1;
} else {
- fprintf(stderr, "%s: %s%s", progress->title,
- counters_sb->buf, eol);
+ fprintf(stderr, "%s: %s%*s", progress->title,
+ counters_sb->buf, (int) clear_len, eol);
}
fflush(stderr);
}
@@ -147,6 +174,14 @@ static void throughput_string(struct strbuf *buf, uint64_t total,
strbuf_humanise_rate(buf, rate * 1024);
}
+static uint64_t progress_getnanotime(struct progress *progress)
+{
+ if (progress_testing)
+ return progress->start_ns + progress_test_ns;
+ else
+ return getnanotime();
+}
+
void display_throughput(struct progress *progress, uint64_t total)
{
struct throughput *tp;
@@ -157,7 +192,7 @@ void display_throughput(struct progress *progress, uint64_t total)
return;
tp = progress->throughput;
- now_ns = getnanotime();
+ now_ns = progress_getnanotime(progress);
if (!tp) {
progress->throughput = tp = xcalloc(1, sizeof(*tp));
@@ -289,7 +324,7 @@ void stop_progress_msg(struct progress **p_progress, const char *msg)
struct throughput *tp = progress->throughput;
if (tp) {
- uint64_t now_ns = getnanotime();
+ uint64_t now_ns = progress_getnanotime(progress);
unsigned int misecs, rate;
misecs = ((now_ns - progress->start_ns) * 4398) >> 32;
rate = tp->curr_total / (misecs ? misecs : 1);
diff --git a/promisor-remote.c b/promisor-remote.c
new file mode 100644
index 0000000000..9bd5b79d59
--- /dev/null
+++ b/promisor-remote.c
@@ -0,0 +1,268 @@
+#include "cache.h"
+#include "object-store.h"
+#include "promisor-remote.h"
+#include "config.h"
+#include "transport.h"
+
+static char *repository_format_partial_clone;
+static const char *core_partial_clone_filter_default;
+
+void set_repository_format_partial_clone(char *partial_clone)
+{
+ repository_format_partial_clone = xstrdup_or_null(partial_clone);
+}
+
+static int fetch_refs(const char *remote_name, struct ref *ref)
+{
+ struct remote *remote;
+ struct transport *transport;
+ int original_fetch_if_missing = fetch_if_missing;
+ int res;
+
+ fetch_if_missing = 0;
+ remote = remote_get(remote_name);
+ if (!remote->url[0])
+ die(_("Remote with no URL"));
+ transport = transport_get(remote, remote->url[0]);
+
+ transport_set_option(transport, TRANS_OPT_FROM_PROMISOR, "1");
+ transport_set_option(transport, TRANS_OPT_NO_DEPENDENTS, "1");
+ res = transport_fetch_refs(transport, ref);
+ fetch_if_missing = original_fetch_if_missing;
+
+ return res;
+}
+
+static int fetch_objects(const char *remote_name,
+ const struct object_id *oids,
+ int oid_nr)
+{
+ struct ref *ref = NULL;
+ int i;
+
+ for (i = 0; i < oid_nr; i++) {
+ struct ref *new_ref = alloc_ref(oid_to_hex(&oids[i]));
+ oidcpy(&new_ref->old_oid, &oids[i]);
+ new_ref->exact_oid = 1;
+ new_ref->next = ref;
+ ref = new_ref;
+ }
+ return fetch_refs(remote_name, ref);
+}
+
+static struct promisor_remote *promisors;
+static struct promisor_remote **promisors_tail = &promisors;
+
+static struct promisor_remote *promisor_remote_new(const char *remote_name)
+{
+ struct promisor_remote *r;
+
+ if (*remote_name == '/') {
+ warning(_("promisor remote name cannot begin with '/': %s"),
+ remote_name);
+ return NULL;
+ }
+
+ FLEX_ALLOC_STR(r, name, remote_name);
+
+ *promisors_tail = r;
+ promisors_tail = &r->next;
+
+ return r;
+}
+
+static struct promisor_remote *promisor_remote_lookup(const char *remote_name,
+ struct promisor_remote **previous)
+{
+ struct promisor_remote *r, *p;
+
+ for (p = NULL, r = promisors; r; p = r, r = r->next)
+ if (!strcmp(r->name, remote_name)) {
+ if (previous)
+ *previous = p;
+ return r;
+ }
+
+ return NULL;
+}
+
+static void promisor_remote_move_to_tail(struct promisor_remote *r,
+ struct promisor_remote *previous)
+{
+ if (r->next == NULL)
+ return;
+
+ if (previous)
+ previous->next = r->next;
+ else
+ promisors = r->next ? r->next : r;
+ r->next = NULL;
+ *promisors_tail = r;
+ promisors_tail = &r->next;
+}
+
+static int promisor_remote_config(const char *var, const char *value, void *data)
+{
+ const char *name;
+ int namelen;
+ const char *subkey;
+
+ if (!strcmp(var, "core.partialclonefilter"))
+ return git_config_string(&core_partial_clone_filter_default,
+ var, value);
+
+ if (parse_config_key(var, "remote", &name, &namelen, &subkey) < 0)
+ return 0;
+
+ if (!strcmp(subkey, "promisor")) {
+ char *remote_name;
+
+ if (!git_config_bool(var, value))
+ return 0;
+
+ remote_name = xmemdupz(name, namelen);
+
+ if (!promisor_remote_lookup(remote_name, NULL))
+ promisor_remote_new(remote_name);
+
+ free(remote_name);
+ return 0;
+ }
+ if (!strcmp(subkey, "partialclonefilter")) {
+ struct promisor_remote *r;
+ char *remote_name = xmemdupz(name, namelen);
+
+ r = promisor_remote_lookup(remote_name, NULL);
+ if (!r)
+ r = promisor_remote_new(remote_name);
+
+ free(remote_name);
+
+ if (!r)
+ return 0;
+
+ return git_config_string(&r->partial_clone_filter, var, value);
+ }
+
+ return 0;
+}
+
+static int initialized;
+
+static void promisor_remote_init(void)
+{
+ if (initialized)
+ return;
+ initialized = 1;
+
+ git_config(promisor_remote_config, NULL);
+
+ if (repository_format_partial_clone) {
+ struct promisor_remote *o, *previous;
+
+ o = promisor_remote_lookup(repository_format_partial_clone,
+ &previous);
+ if (o)
+ promisor_remote_move_to_tail(o, previous);
+ else
+ promisor_remote_new(repository_format_partial_clone);
+ }
+}
+
+static void promisor_remote_clear(void)
+{
+ while (promisors) {
+ struct promisor_remote *r = promisors;
+ promisors = promisors->next;
+ free(r);
+ }
+
+ promisors_tail = &promisors;
+}
+
+void promisor_remote_reinit(void)
+{
+ initialized = 0;
+ promisor_remote_clear();
+ promisor_remote_init();
+}
+
+struct promisor_remote *promisor_remote_find(const char *remote_name)
+{
+ promisor_remote_init();
+
+ if (!remote_name)
+ return promisors;
+
+ return promisor_remote_lookup(remote_name, NULL);
+}
+
+int has_promisor_remote(void)
+{
+ return !!promisor_remote_find(NULL);
+}
+
+static int remove_fetched_oids(struct repository *repo,
+ struct object_id **oids,
+ int oid_nr, int to_free)
+{
+ int i, remaining_nr = 0;
+ int *remaining = xcalloc(oid_nr, sizeof(*remaining));
+ struct object_id *old_oids = *oids;
+ struct object_id *new_oids;
+
+ for (i = 0; i < oid_nr; i++)
+ if (oid_object_info_extended(repo, &old_oids[i], NULL,
+ OBJECT_INFO_SKIP_FETCH_OBJECT)) {
+ remaining[i] = 1;
+ remaining_nr++;
+ }
+
+ if (remaining_nr) {
+ int j = 0;
+ new_oids = xcalloc(remaining_nr, sizeof(*new_oids));
+ for (i = 0; i < oid_nr; i++)
+ if (remaining[i])
+ oidcpy(&new_oids[j++], &old_oids[i]);
+ *oids = new_oids;
+ if (to_free)
+ free(old_oids);
+ }
+
+ free(remaining);
+
+ return remaining_nr;
+}
+
+int promisor_remote_get_direct(struct repository *repo,
+ const struct object_id *oids,
+ int oid_nr)
+{
+ struct promisor_remote *r;
+ struct object_id *remaining_oids = (struct object_id *)oids;
+ int remaining_nr = oid_nr;
+ int to_free = 0;
+ int res = -1;
+
+ promisor_remote_init();
+
+ for (r = promisors; r; r = r->next) {
+ if (fetch_objects(r->name, remaining_oids, remaining_nr) < 0) {
+ if (remaining_nr == 1)
+ continue;
+ remaining_nr = remove_fetched_oids(repo, &remaining_oids,
+ remaining_nr, to_free);
+ if (remaining_nr) {
+ to_free = 1;
+ continue;
+ }
+ }
+ res = 0;
+ break;
+ }
+
+ if (to_free)
+ free(remaining_oids);
+
+ return res;
+}
diff --git a/promisor-remote.h b/promisor-remote.h
new file mode 100644
index 0000000000..737bac3a33
--- /dev/null
+++ b/promisor-remote.h
@@ -0,0 +1,33 @@
+#ifndef PROMISOR_REMOTE_H
+#define PROMISOR_REMOTE_H
+
+#include "repository.h"
+
+struct object_id;
+
+/*
+ * Promisor remote linked list
+ *
+ * Information in its fields come from remote.XXX config entries or
+ * from extensions.partialclone or core.partialclonefilter.
+ */
+struct promisor_remote {
+ struct promisor_remote *next;
+ const char *partial_clone_filter;
+ const char name[FLEX_ARRAY];
+};
+
+void promisor_remote_reinit(void);
+struct promisor_remote *promisor_remote_find(const char *remote_name);
+int has_promisor_remote(void);
+int promisor_remote_get_direct(struct repository *repo,
+ const struct object_id *oids,
+ int oid_nr);
+
+/*
+ * This should be used only once from setup.c to set the value we got
+ * from the extensions.partialclone config option.
+ */
+void set_repository_format_partial_clone(char *partial_clone);
+
+#endif /* PROMISOR_REMOTE_H */
diff --git a/quote.c b/quote.c
index 7f2aa6faa4..24a58ba454 100644
--- a/quote.c
+++ b/quote.c
@@ -48,6 +48,12 @@ void sq_quote_buf_pretty(struct strbuf *dst, const char *src)
static const char ok_punct[] = "+,-./:=@_^";
const char *p;
+ /* Avoid losing a zero-length string by adding '' */
+ if (!*src) {
+ strbuf_addstr(dst, "''");
+ return;
+ }
+
for (p = src; *p; p++) {
if (!isalpha(*p) && !isdigit(*p) && !strchr(ok_punct, *p)) {
sq_quote_buf(dst, src);
@@ -84,12 +90,28 @@ void sq_quote_argv(struct strbuf *dst, const char **argv)
}
}
+/*
+ * Legacy function to append each argv value, quoted as necessasry,
+ * with whitespace before each value. This results in a leading
+ * space in the result.
+ */
void sq_quote_argv_pretty(struct strbuf *dst, const char **argv)
{
+ if (argv[0])
+ strbuf_addch(dst, ' ');
+ sq_append_quote_argv_pretty(dst, argv);
+}
+
+/*
+ * Append each argv value, quoted as necessary, with whitespace between them.
+ */
+void sq_append_quote_argv_pretty(struct strbuf *dst, const char **argv)
+{
int i;
for (i = 0; argv[i]; i++) {
- strbuf_addch(dst, ' ');
+ if (i > 0)
+ strbuf_addch(dst, ' ');
sq_quote_buf_pretty(dst, argv[i]);
}
}
diff --git a/quote.h b/quote.h
index fb08dc085c..ca8ee3144a 100644
--- a/quote.h
+++ b/quote.h
@@ -40,6 +40,7 @@ void sq_quotef(struct strbuf *, const char *fmt, ...);
*/
void sq_quote_buf_pretty(struct strbuf *, const char *src);
void sq_quote_argv_pretty(struct strbuf *, const char **argv);
+void sq_append_quote_argv_pretty(struct strbuf *dst, const char **argv);
/* This unwraps what sq_quote() produces in place, but returns
* NULL if the input does not look like what sq_quote would have
diff --git a/range-diff.c b/range-diff.c
index ba1e9a4265..7fed5a3b4b 100644
--- a/range-diff.c
+++ b/range-diff.c
@@ -52,6 +52,7 @@ static int read_patches(const char *range, struct string_list *list)
argv_array_pushl(&cp.args, "log", "--no-color", "-p", "--no-merges",
"--reverse", "--date-order", "--decorate=no",
+ "--no-prefix",
/*
* Choose indicators that are not used anywhere
* else in diffs, but still look reasonable
@@ -111,7 +112,7 @@ static int read_patches(const char *range, struct string_list *list)
if (!util->diff_offset)
util->diff_offset = buf.len;
line[len - 1] = '\n';
- len = parse_git_diff_header(&root, &linenr, 1, line,
+ len = parse_git_diff_header(&root, &linenr, 0, line,
len, size, &patch);
if (len < 0)
die(_("could not parse git header '%.*s'"), (int)len, line);
@@ -217,8 +218,8 @@ static void find_exact_matches(struct string_list *a, struct string_list *b)
util->i = i;
util->patch = a->items[i].string;
util->diff = util->patch + util->diff_offset;
- hashmap_entry_init(util, strhash(util->diff));
- hashmap_add(&map, util);
+ hashmap_entry_init(&util->e, strhash(util->diff));
+ hashmap_add(&map, &util->e);
}
/* Now try to find exact matches in b */
@@ -228,8 +229,8 @@ static void find_exact_matches(struct string_list *a, struct string_list *b)
util->i = i;
util->patch = b->items[i].string;
util->diff = util->patch + util->diff_offset;
- hashmap_entry_init(util, strhash(util->diff));
- other = hashmap_remove(&map, util, NULL);
+ hashmap_entry_init(&util->e, strhash(util->diff));
+ other = hashmap_remove_entry(&map, util, e, NULL);
if (other) {
if (other->matching >= 0)
BUG("already assigned!");
@@ -239,7 +240,7 @@ static void find_exact_matches(struct string_list *a, struct string_list *b)
}
}
- hashmap_free(&map, 0);
+ hashmap_free(&map);
}
static void diffsize_consume(void *data, char *line, unsigned long len)
diff --git a/read-cache.c b/read-cache.c
index 52ffa8a313..133f790fa4 100644
--- a/read-cache.c
+++ b/read-cache.c
@@ -1276,7 +1276,7 @@ static int add_index_entry_with_check(struct index_state *istate, struct cache_e
*/
if (istate->cache_nr > 0 &&
strcmp(ce->name, istate->cache[istate->cache_nr - 1]->name) > 0)
- pos = -istate->cache_nr - 1;
+ pos = index_pos_to_insert_pos(istate->cache_nr);
else
pos = index_name_stage_pos(istate, ce->name, ce_namelen(ce), ce_stage(ce));
@@ -1472,6 +1472,27 @@ static void show_file(const char * fmt, const char * name, int in_porcelain,
printf(fmt, name);
}
+int repo_refresh_and_write_index(struct repository *repo,
+ unsigned int refresh_flags,
+ unsigned int write_flags,
+ int gentle,
+ const struct pathspec *pathspec,
+ char *seen, const char *header_msg)
+{
+ struct lock_file lock_file = LOCK_INIT;
+ int fd, ret = 0;
+
+ fd = repo_hold_locked_index(repo, &lock_file, 0);
+ if (!gentle && fd < 0)
+ return -1;
+ if (refresh_index(repo->index, refresh_flags, pathspec, seen, header_msg))
+ ret = 1;
+ if (0 <= fd && write_locked_index(repo->index, &lock_file, COMMIT_LOCK | write_flags))
+ ret = -1;
+ return ret;
+}
+
+
int refresh_index(struct index_state *istate, unsigned int flags,
const struct pathspec *pathspec,
char *seen, const char *header_msg)
@@ -1599,16 +1620,17 @@ struct cache_entry *refresh_cache_entry(struct index_state *istate,
#define INDEX_FORMAT_DEFAULT 3
-static unsigned int get_index_format_default(void)
+static unsigned int get_index_format_default(struct repository *r)
{
char *envversion = getenv("GIT_INDEX_VERSION");
char *endp;
- int value;
unsigned int version = INDEX_FORMAT_DEFAULT;
if (!envversion) {
- if (!git_config_get_int("index.version", &value))
- version = value;
+ prepare_repo_settings(r);
+
+ if (r->settings.index_version >= 0)
+ version = r->settings.index_version;
if (version < INDEX_FORMAT_LB || INDEX_FORMAT_UB < version) {
warning(_("index.version set, but the value is invalid.\n"
"Using version %i"), INDEX_FORMAT_DEFAULT);
@@ -1844,18 +1866,17 @@ static void check_ce_order(struct index_state *istate)
static void tweak_untracked_cache(struct index_state *istate)
{
- switch (git_config_get_untracked_cache()) {
- case -1: /* keep: do nothing */
- break;
- case 0: /* false */
+ struct repository *r = the_repository;
+
+ prepare_repo_settings(r);
+
+ if (r->settings.core_untracked_cache == UNTRACKED_CACHE_REMOVE) {
remove_untracked_cache(istate);
- break;
- case 1: /* true */
- add_untracked_cache(istate);
- break;
- default: /* unknown value: do nothing */
- break;
+ return;
}
+
+ if (r->settings.core_untracked_cache == UNTRACKED_CACHE_WRITE)
+ add_untracked_cache(istate);
}
static void tweak_split_index(struct index_state *istate)
@@ -1894,7 +1915,7 @@ static size_t estimate_cache_size(size_t ondisk_size, unsigned int entries)
/*
* Account for potential alignment differences.
*/
- per_entry += align_padding_size(sizeof(struct cache_entry), -sizeof(struct ondisk_cache_entry));
+ per_entry += align_padding_size(per_entry, 0);
return ondisk_size + entries * per_entry;
}
@@ -2765,7 +2786,7 @@ static int do_write_index(struct index_state *istate, struct tempfile *tempfile,
}
if (!istate->version) {
- istate->version = get_index_format_default();
+ istate->version = get_index_format_default(the_repository);
if (git_env_bool("GIT_TEST_SPLIT_INDEX", 0))
init_split_index(istate);
}
diff --git a/ref-filter.c b/ref-filter.c
index f27cfc8c3e..6867e33648 100644
--- a/ref-filter.c
+++ b/ref-filter.c
@@ -79,17 +79,20 @@ static struct expand_data {
} oi, oi_deref;
struct ref_to_worktree_entry {
- struct hashmap_entry ent; /* must be the first member! */
+ struct hashmap_entry ent;
struct worktree *wt; /* key is wt->head_ref */
};
static int ref_to_worktree_map_cmpfnc(const void *unused_lookupdata,
- const void *existing_hashmap_entry_to_test,
- const void *key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *kptr,
const void *keydata_aka_refname)
{
- const struct ref_to_worktree_entry *e = existing_hashmap_entry_to_test;
- const struct ref_to_worktree_entry *k = key;
+ const struct ref_to_worktree_entry *e, *k;
+
+ e = container_of(eptr, const struct ref_to_worktree_entry, ent);
+ k = container_of(kptr, const struct ref_to_worktree_entry, ent);
+
return strcmp(e->wt->head_ref,
keydata_aka_refname ? keydata_aka_refname : k->wt->head_ref);
}
@@ -1028,7 +1031,7 @@ static const char *copy_name(const char *buf)
if (!strncmp(cp, " <", 2))
return xmemdupz(buf, cp - buf);
}
- return "";
+ return xstrdup("");
}
static const char *copy_email(const char *buf)
@@ -1036,10 +1039,10 @@ static const char *copy_email(const char *buf)
const char *email = strchr(buf, '<');
const char *eoemail;
if (!email)
- return "";
+ return xstrdup("");
eoemail = strchr(email, '>');
if (!eoemail)
- return "";
+ return xstrdup("");
return xmemdupz(email, eoemail + 1 - email);
}
@@ -1565,9 +1568,10 @@ static void populate_worktree_map(struct hashmap *map, struct worktree **worktre
struct ref_to_worktree_entry *entry;
entry = xmalloc(sizeof(*entry));
entry->wt = worktrees[i];
- hashmap_entry_init(entry, strhash(worktrees[i]->head_ref));
+ hashmap_entry_init(&entry->ent,
+ strhash(worktrees[i]->head_ref));
- hashmap_add(map, entry);
+ hashmap_add(map, &entry->ent);
}
}
}
@@ -1584,18 +1588,20 @@ static void lazy_init_worktree_map(void)
static char *get_worktree_path(const struct used_atom *atom, const struct ref_array_item *ref)
{
- struct hashmap_entry entry;
+ struct hashmap_entry entry, *e;
struct ref_to_worktree_entry *lookup_result;
lazy_init_worktree_map();
hashmap_entry_init(&entry, strhash(ref->refname));
- lookup_result = hashmap_get(&(ref_to_worktree_map.map), &entry, ref->refname);
+ e = hashmap_get(&(ref_to_worktree_map.map), &entry, ref->refname);
- if (lookup_result)
- return xstrdup(lookup_result->wt->path);
- else
+ if (!e)
return xstrdup("");
+
+ lookup_result = container_of(e, struct ref_to_worktree_entry, ent);
+
+ return xstrdup(lookup_result->wt->path);
}
/*
@@ -1766,7 +1772,7 @@ static int populate_value(struct ref_array_item *ref, struct strbuf *err)
* If it is a tag object, see if we use a value that derefs
* the object, and if we do grab the object it refers to.
*/
- oi_deref.oid = ((struct tag *)obj)->tagged->oid;
+ oi_deref.oid = *get_tagged_oid((struct tag *)obj);
/*
* NEEDSWORK: This derefs tag only once, which
@@ -1997,7 +2003,7 @@ static const struct object_id *match_points_at(struct oid_array *points_at,
if (!obj)
die(_("malformed object at '%s'"), refname);
if (obj->type == OBJ_TAG)
- tagged_oid = &((struct tag *)obj)->tagged->oid;
+ tagged_oid = get_tagged_oid((struct tag *)obj);
if (tagged_oid && oid_array_lookup(points_at, tagged_oid) >= 0)
return tagged_oid;
return NULL;
@@ -2166,7 +2172,8 @@ void ref_array_clear(struct ref_array *array)
used_atom_cnt = 0;
if (ref_to_worktree_map.worktrees) {
- hashmap_free(&(ref_to_worktree_map.map), 1);
+ hashmap_free_entries(&(ref_to_worktree_map.map),
+ struct ref_to_worktree_entry, ent);
free_worktrees(ref_to_worktree_map.worktrees);
ref_to_worktree_map.worktrees = NULL;
}
diff --git a/refs.c b/refs.c
index cd297ee4bd..1ab0bb54d3 100644
--- a/refs.c
+++ b/refs.c
@@ -1772,7 +1772,7 @@ int resolve_gitlink_ref(const char *submodule, const char *refname,
struct ref_store_hash_entry
{
- struct hashmap_entry ent; /* must be the first member! */
+ struct hashmap_entry ent;
struct ref_store *refs;
@@ -1781,11 +1781,16 @@ struct ref_store_hash_entry
};
static int ref_store_hash_cmp(const void *unused_cmp_data,
- const void *entry, const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *keydata)
{
- const struct ref_store_hash_entry *e1 = entry, *e2 = entry_or_key;
- const char *name = keydata ? keydata : e2->name;
+ const struct ref_store_hash_entry *e1, *e2;
+ const char *name;
+
+ e1 = container_of(eptr, const struct ref_store_hash_entry, ent);
+ e2 = container_of(entry_or_key, const struct ref_store_hash_entry, ent);
+ name = keydata ? keydata : e2->name;
return strcmp(e1->name, name);
}
@@ -1796,7 +1801,7 @@ static struct ref_store_hash_entry *alloc_ref_store_hash_entry(
struct ref_store_hash_entry *entry;
FLEX_ALLOC_STR(entry, name, name);
- hashmap_entry_init(entry, strhash(name));
+ hashmap_entry_init(&entry->ent, strhash(name));
entry->refs = refs;
return entry;
}
@@ -1815,12 +1820,15 @@ static struct ref_store *lookup_ref_store_map(struct hashmap *map,
const char *name)
{
struct ref_store_hash_entry *entry;
+ unsigned int hash;
if (!map->tablesize)
/* It's initialized on demand in register_ref_store(). */
return NULL;
- entry = hashmap_get_from_hash(map, strhash(name), name);
+ hash = strhash(name);
+ entry = hashmap_get_entry_from_hash(map, hash, name,
+ struct ref_store_hash_entry, ent);
return entry ? entry->refs : NULL;
}
@@ -1863,10 +1871,13 @@ static void register_ref_store_map(struct hashmap *map,
struct ref_store *refs,
const char *name)
{
+ struct ref_store_hash_entry *entry;
+
if (!map->tablesize)
hashmap_init(map, ref_store_hash_cmp, NULL, 0);
- if (hashmap_put(map, alloc_ref_store_hash_entry(name, refs)))
+ entry = alloc_ref_store_hash_entry(name, refs);
+ if (hashmap_put(map, &entry->ent))
BUG("%s ref_store '%s' initialized twice", type, name);
}
diff --git a/refs/packed-backend.c b/refs/packed-backend.c
index c01c7f5901..4458a0f69c 100644
--- a/refs/packed-backend.c
+++ b/refs/packed-backend.c
@@ -1012,14 +1012,23 @@ int packed_refs_lock(struct ref_store *ref_store, int flags, struct strbuf *err)
}
/*
- * Now that we hold the `packed-refs` lock, make sure that our
- * snapshot matches the current version of the file. Normally
- * `get_snapshot()` does that for us, but that function
- * assumes that when the file is locked, any existing snapshot
- * is still valid. We've just locked the file, but it might
- * have changed the moment *before* we locked it.
+ * There is a stat-validity problem might cause `update-ref -d`
+ * lost the newly commit of a ref, because a new `packed-refs`
+ * file might has the same on-disk file attributes such as
+ * timestamp, file size and inode value, but has a changed
+ * ref value.
+ *
+ * This could happen with a very small chance when
+ * `update-ref -d` is called and at the same time another
+ * `pack-refs --all` process is running.
+ *
+ * Now that we hold the `packed-refs` lock, it is important
+ * to make sure we could read the latest version of
+ * `packed-refs` file no matter we have just mmap it or not.
+ * So what need to do is clear the snapshot if we hold it
+ * already.
*/
- validate_snapshot(refs);
+ clear_snapshot(refs);
/*
* Now make sure that the packed-refs file as it exists in the
diff --git a/remote.c b/remote.c
index e50f7602ed..5c4666b53a 100644
--- a/remote.c
+++ b/remote.c
@@ -111,14 +111,16 @@ struct remotes_hash_key {
};
static int remotes_hash_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *keydata)
{
- const struct remote *a = entry;
- const struct remote *b = entry_or_key;
+ const struct remote *a, *b;
const struct remotes_hash_key *key = keydata;
+ a = container_of(eptr, const struct remote, ent);
+ b = container_of(entry_or_key, const struct remote, ent);
+
if (key)
return strncmp(a->name, key->str, key->len) || a->name[key->len];
else
@@ -135,7 +137,7 @@ static struct remote *make_remote(const char *name, int len)
{
struct remote *ret, *replaced;
struct remotes_hash_key lookup;
- struct hashmap_entry lookup_entry;
+ struct hashmap_entry lookup_entry, *e;
if (!len)
len = strlen(name);
@@ -145,8 +147,9 @@ static struct remote *make_remote(const char *name, int len)
lookup.len = len;
hashmap_entry_init(&lookup_entry, memhash(name, len));
- if ((ret = hashmap_get(&remotes_hash, &lookup_entry, &lookup)) != NULL)
- return ret;
+ e = hashmap_get(&remotes_hash, &lookup_entry, &lookup);
+ if (e)
+ return container_of(e, struct remote, ent);
ret = xcalloc(1, sizeof(struct remote));
ret->prune = -1; /* unspecified */
@@ -158,8 +161,8 @@ static struct remote *make_remote(const char *name, int len)
ALLOC_GROW(remotes, remotes_nr + 1, remotes_alloc);
remotes[remotes_nr++] = ret;
- hashmap_entry_init(ret, lookup_entry.hash);
- replaced = hashmap_put(&remotes_hash, ret);
+ hashmap_entry_init(&ret->ent, lookup_entry.hash);
+ replaced = hashmap_put_entry(&remotes_hash, ret, ent);
assert(replaced == NULL); /* no previous entry overwritten */
return ret;
}
diff --git a/remote.h b/remote.h
index 83e885672b..0e1d2b245b 100644
--- a/remote.h
+++ b/remote.h
@@ -14,7 +14,7 @@ enum {
};
struct remote {
- struct hashmap_entry ent; /* must be first */
+ struct hashmap_entry ent;
const char *name;
int origin, configured_in_repo;
diff --git a/repo-settings.c b/repo-settings.c
new file mode 100644
index 0000000000..05546db98e
--- /dev/null
+++ b/repo-settings.c
@@ -0,0 +1,68 @@
+#include "cache.h"
+#include "config.h"
+#include "repository.h"
+
+#define UPDATE_DEFAULT_BOOL(s,v) do { if (s == -1) { s = v; } } while(0)
+
+void prepare_repo_settings(struct repository *r)
+{
+ int value;
+ char *strval;
+
+ if (r->settings.initialized)
+ return;
+
+ /* Defaults */
+ memset(&r->settings, -1, sizeof(r->settings));
+
+ if (!repo_config_get_bool(r, "core.commitgraph", &value))
+ r->settings.core_commit_graph = value;
+ if (!repo_config_get_bool(r, "gc.writecommitgraph", &value))
+ r->settings.gc_write_commit_graph = value;
+ UPDATE_DEFAULT_BOOL(r->settings.core_commit_graph, 1);
+ UPDATE_DEFAULT_BOOL(r->settings.gc_write_commit_graph, 1);
+
+ if (!repo_config_get_bool(r, "index.version", &value))
+ r->settings.index_version = value;
+ if (!repo_config_get_maybe_bool(r, "core.untrackedcache", &value)) {
+ if (value == 0)
+ r->settings.core_untracked_cache = UNTRACKED_CACHE_REMOVE;
+ else
+ r->settings.core_untracked_cache = UNTRACKED_CACHE_WRITE;
+ } else if (!repo_config_get_string(r, "core.untrackedcache", &strval)) {
+ if (!strcasecmp(strval, "keep"))
+ r->settings.core_untracked_cache = UNTRACKED_CACHE_KEEP;
+
+ free(strval);
+ }
+
+ if (!repo_config_get_string(r, "fetch.negotiationalgorithm", &strval)) {
+ if (!strcasecmp(strval, "skipping"))
+ r->settings.fetch_negotiation_algorithm = FETCH_NEGOTIATION_SKIPPING;
+ else
+ r->settings.fetch_negotiation_algorithm = FETCH_NEGOTIATION_DEFAULT;
+ }
+
+ if (!repo_config_get_bool(r, "pack.usesparse", &value))
+ r->settings.pack_use_sparse = value;
+ if (!repo_config_get_bool(r, "feature.manyfiles", &value) && value) {
+ UPDATE_DEFAULT_BOOL(r->settings.index_version, 4);
+ UPDATE_DEFAULT_BOOL(r->settings.core_untracked_cache, UNTRACKED_CACHE_WRITE);
+ }
+ if (!repo_config_get_bool(r, "fetch.writecommitgraph", &value))
+ r->settings.fetch_write_commit_graph = value;
+ if (!repo_config_get_bool(r, "feature.experimental", &value) && value) {
+ UPDATE_DEFAULT_BOOL(r->settings.pack_use_sparse, 1);
+ UPDATE_DEFAULT_BOOL(r->settings.fetch_negotiation_algorithm, FETCH_NEGOTIATION_SKIPPING);
+ UPDATE_DEFAULT_BOOL(r->settings.fetch_write_commit_graph, 1);
+ }
+ UPDATE_DEFAULT_BOOL(r->settings.fetch_write_commit_graph, 0);
+
+ /* Hack for test programs like test-dump-untracked-cache */
+ if (ignore_untracked_cache_config)
+ r->settings.core_untracked_cache = UNTRACKED_CACHE_KEEP;
+ else
+ UPDATE_DEFAULT_BOOL(r->settings.core_untracked_cache, UNTRACKED_CACHE_KEEP);
+
+ UPDATE_DEFAULT_BOOL(r->settings.fetch_negotiation_algorithm, FETCH_NEGOTIATION_DEFAULT);
+}
diff --git a/repository.h b/repository.h
index 4fb6a5885f..fe42197813 100644
--- a/repository.h
+++ b/repository.h
@@ -11,6 +11,34 @@ struct pathspec;
struct raw_object_store;
struct submodule_cache;
+enum untracked_cache_setting {
+ UNTRACKED_CACHE_UNSET = -1,
+ UNTRACKED_CACHE_REMOVE = 0,
+ UNTRACKED_CACHE_KEEP = 1,
+ UNTRACKED_CACHE_WRITE = 2
+};
+
+enum fetch_negotiation_setting {
+ FETCH_NEGOTIATION_UNSET = -1,
+ FETCH_NEGOTIATION_NONE = 0,
+ FETCH_NEGOTIATION_DEFAULT = 1,
+ FETCH_NEGOTIATION_SKIPPING = 2,
+};
+
+struct repo_settings {
+ int initialized;
+
+ int core_commit_graph;
+ int gc_write_commit_graph;
+ int fetch_write_commit_graph;
+
+ int index_version;
+ enum untracked_cache_setting core_untracked_cache;
+
+ int pack_use_sparse;
+ enum fetch_negotiation_setting fetch_negotiation_algorithm;
+};
+
struct repository {
/* Environment */
/*
@@ -72,6 +100,8 @@ struct repository {
*/
char *submodule_prefix;
+ struct repo_settings settings;
+
/* Subsystems */
/*
* Repository's config which contains key-value pairs from the usual
@@ -95,6 +125,9 @@ struct repository {
/* A unique-id for tracing purposes. */
int trace2_repo_id;
+ /* True if commit-graph has been disabled within this process. */
+ int commit_graph_disabled;
+
/* Configurations */
/* Indicate if a repository has a different 'commondir' from 'gitdir' */
@@ -157,5 +190,6 @@ int repo_read_index_unmerged(struct repository *);
*/
void repo_update_index_if_able(struct repository *, struct lock_file *);
+void prepare_repo_settings(struct repository *r);
#endif /* REPOSITORY_H */
diff --git a/rerere.c b/rerere.c
index 17abb47321..3e51fdfe58 100644
--- a/rerere.c
+++ b/rerere.c
@@ -52,7 +52,7 @@ static void free_rerere_id(struct string_list_item *item)
static const char *rerere_id_hex(const struct rerere_id *id)
{
- return sha1_to_hex(id->collection->hash);
+ return hash_to_hex(id->collection->hash);
}
static void fit_variant(struct rerere_dir *rr_dir, int variant)
@@ -115,7 +115,7 @@ static int is_rr_file(const char *name, const char *filename, int *variant)
static void scan_rerere_dir(struct rerere_dir *rr_dir)
{
struct dirent *de;
- DIR *dir = opendir(git_path("rr-cache/%s", sha1_to_hex(rr_dir->hash)));
+ DIR *dir = opendir(git_path("rr-cache/%s", hash_to_hex(rr_dir->hash)));
if (!dir)
return;
@@ -186,9 +186,9 @@ static struct rerere_id *new_rerere_id_hex(char *hex)
return id;
}
-static struct rerere_id *new_rerere_id(unsigned char *sha1)
+static struct rerere_id *new_rerere_id(unsigned char *hash)
{
- return new_rerere_id_hex(sha1_to_hex(sha1));
+ return new_rerere_id_hex(hash_to_hex(hash));
}
/*
diff --git a/revision.c b/revision.c
index 07412297f0..0e39b2b8a5 100644
--- a/revision.c
+++ b/revision.c
@@ -28,6 +28,7 @@
#include "commit-graph.h"
#include "prio-queue.h"
#include "hashmap.h"
+#include "utf8.h"
volatile show_early_output_fn_t show_early_output;
@@ -107,30 +108,34 @@ struct path_and_oids_entry {
};
static int path_and_oids_cmp(const void *hashmap_cmp_fn_data,
- const struct path_and_oids_entry *e1,
- const struct path_and_oids_entry *e2,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *keydata)
{
+ const struct path_and_oids_entry *e1, *e2;
+
+ e1 = container_of(eptr, const struct path_and_oids_entry, ent);
+ e2 = container_of(entry_or_key, const struct path_and_oids_entry, ent);
+
return strcmp(e1->path, e2->path);
}
static void paths_and_oids_init(struct hashmap *map)
{
- hashmap_init(map, (hashmap_cmp_fn) path_and_oids_cmp, NULL, 0);
+ hashmap_init(map, path_and_oids_cmp, NULL, 0);
}
static void paths_and_oids_clear(struct hashmap *map)
{
struct hashmap_iter iter;
struct path_and_oids_entry *entry;
- hashmap_iter_init(map, &iter);
- while ((entry = (struct path_and_oids_entry *)hashmap_iter_next(&iter))) {
+ hashmap_for_each_entry(map, &iter, entry, ent /* member name */) {
oidset_clear(&entry->trees);
free(entry->path);
}
- hashmap_free(map, 1);
+ hashmap_free_entries(map, struct path_and_oids_entry, ent);
}
static void paths_and_oids_insert(struct hashmap *map,
@@ -141,18 +146,19 @@ static void paths_and_oids_insert(struct hashmap *map,
struct path_and_oids_entry key;
struct path_and_oids_entry *entry;
- hashmap_entry_init(&key, hash);
+ hashmap_entry_init(&key.ent, hash);
/* use a shallow copy for the lookup */
key.path = (char *)path;
oidset_init(&key.trees, 0);
- if (!(entry = (struct path_and_oids_entry *)hashmap_get(map, &key, NULL))) {
+ entry = hashmap_get_entry(map, &key, ent, NULL);
+ if (!entry) {
entry = xcalloc(1, sizeof(struct path_and_oids_entry));
- hashmap_entry_init(entry, hash);
+ hashmap_entry_init(&entry->ent, hash);
entry->path = xstrdup(key.path);
oidset_init(&entry->trees, 16);
- hashmap_put(map, entry);
+ hashmap_put(map, &entry->ent);
}
oidset_insert(&entry->trees, oid);
@@ -235,8 +241,7 @@ void mark_trees_uninteresting_sparse(struct repository *r,
add_children_by_path(r, tree, &map);
}
- hashmap_iter_init(&map, &map_iter);
- while ((entry = hashmap_iter_next(&map_iter)))
+ hashmap_for_each_entry(&map, &map_iter, entry, ent /* member name */)
mark_trees_uninteresting_sparse(r, &entry->trees);
paths_and_oids_clear(&map);
@@ -404,9 +409,7 @@ static struct commit *handle_commit(struct rev_info *revs,
struct tag *tag = (struct tag *) object;
if (revs->tag_objects && !(flags & UNINTERESTING))
add_pending_object(revs, object, tag->tag);
- if (!tag->tagged)
- die("bad tag");
- object = parse_object(revs->repo, &tag->tagged->oid);
+ object = parse_object(revs->repo, get_tagged_oid(tag));
if (!object) {
if (revs->ignore_missing_links || (flags & UNINTERESTING))
return NULL;
@@ -2063,7 +2066,6 @@ static int handle_revision_opt(struct rev_info *revs, int argc, const char **arg
revs->simplify_by_decoration = 1;
revs->limited = 1;
revs->prune = 1;
- load_ref_decorations(NULL, DECORATE_SHORT_REFS);
} else if (!strcmp(arg, "--date-order")) {
revs->sort_order = REV_SORT_BY_COMMIT_DATE;
revs->topo_order = 1;
@@ -2523,6 +2525,7 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct s
int i, flags, left, seen_dashdash, got_rev_arg = 0, revarg_opt;
struct argv_array prune_data = ARGV_ARRAY_INIT;
const char *submodule = NULL;
+ int seen_end_of_options = 0;
if (opt)
submodule = opt->submodule;
@@ -2552,7 +2555,7 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct s
revarg_opt |= REVARG_CANNOT_BE_FILENAME;
for (left = i = 1; i < argc; i++) {
const char *arg = argv[i];
- if (*arg == '-') {
+ if (!seen_end_of_options && *arg == '-') {
int opts;
opts = handle_revision_pseudo_opt(submodule,
@@ -2574,6 +2577,11 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct s
continue;
}
+ if (!strcmp(arg, "--end-of-options")) {
+ seen_end_of_options = 1;
+ continue;
+ }
+
opts = handle_revision_opt(revs, argc - i, argv + i,
&left, argv, opt);
if (opts > 0) {
@@ -2684,6 +2692,8 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct s
grep_commit_pattern_type(GREP_PATTERN_TYPE_UNSPECIFIED,
&revs->grep_filter);
+ if (!is_encoding_utf8(get_log_output_encoding()))
+ revs->grep_filter.ignore_locale = 1;
compile_grep_patterns(&revs->grep_filter);
if (revs->reverse && revs->reflog_info)
diff --git a/send-pack.c b/send-pack.c
index 6dc16c3211..34c77cbb1a 100644
--- a/send-pack.c
+++ b/send-pack.c
@@ -40,7 +40,8 @@ int option_parse_push_signed(const struct option *opt,
static void feed_object(const struct object_id *oid, FILE *fh, int negative)
{
- if (negative && !has_object_file(oid))
+ if (negative &&
+ !has_object_file_with_flags(oid, OBJECT_INFO_SKIP_FETCH_OBJECT))
return;
if (negative)
diff --git a/sequencer.c b/sequencer.c
index 34ebf8ed94..9d5964fd81 100644
--- a/sequencer.c
+++ b/sequencer.c
@@ -586,7 +586,7 @@ static int do_recursive_merge(struct repository *r,
struct replay_opts *opts)
{
struct merge_options o;
- struct tree *result, *next_tree, *base_tree, *head_tree;
+ struct tree *next_tree, *base_tree, *head_tree;
int clean;
char **xopt;
struct lock_file index_lock = LOCK_INIT;
@@ -613,11 +613,10 @@ static int do_recursive_merge(struct repository *r,
clean = merge_trees(&o,
head_tree,
- next_tree, base_tree, &result);
+ next_tree, base_tree);
if (is_rebase_i(opts) && clean <= 0)
fputs(o.obuf.buf, stdout);
strbuf_release(&o.obuf);
- diff_warn_rename_limit("merge.renamelimit", o.needed_rename_limit, 0);
if (clean < 0) {
rollback_lock_file(&index_lock);
return clean;
@@ -869,34 +868,6 @@ static char *get_author(const char *message)
return NULL;
}
-/* Read author-script and return an ident line (author <email> timestamp) */
-static const char *read_author_ident(struct strbuf *buf)
-{
- struct strbuf out = STRBUF_INIT;
- char *name, *email, *date;
-
- if (read_author_script(rebase_path_author_script(),
- &name, &email, &date, 0))
- return NULL;
-
- /* validate date since fmt_ident() will die() on bad value */
- if (parse_date(date, &out)){
- warning(_("invalid date format '%s' in '%s'"),
- date, rebase_path_author_script());
- strbuf_release(&out);
- return NULL;
- }
-
- strbuf_reset(&out);
- strbuf_addstr(&out, fmt_ident(name, email, WANT_AUTHOR_IDENT, date, 0));
- strbuf_swap(buf, &out);
- strbuf_release(&out);
- free(name);
- free(email);
- free(date);
- return buf->buf;
-}
-
static const char staged_changes_advice[] =
N_("you have staged changes in your working tree\n"
"If these changes are meant to be squashed into the previous commit, run:\n"
@@ -954,47 +925,6 @@ static int run_git_commit(struct repository *r,
{
struct child_process cmd = CHILD_PROCESS_INIT;
- if ((flags & CREATE_ROOT_COMMIT) && !(flags & AMEND_MSG)) {
- struct strbuf msg = STRBUF_INIT, script = STRBUF_INIT;
- const char *author = NULL;
- struct object_id root_commit, *cache_tree_oid;
- int res = 0;
-
- if (is_rebase_i(opts)) {
- author = read_author_ident(&script);
- if (!author) {
- strbuf_release(&script);
- return -1;
- }
- }
-
- if (!defmsg)
- BUG("root commit without message");
-
- if (!(cache_tree_oid = get_cache_tree_oid(r->index)))
- res = -1;
-
- if (!res)
- res = strbuf_read_file(&msg, defmsg, 0);
-
- if (res <= 0)
- res = error_errno(_("could not read '%s'"), defmsg);
- else
- res = commit_tree(msg.buf, msg.len, cache_tree_oid,
- NULL, &root_commit, author,
- opts->gpg_sign);
-
- strbuf_release(&msg);
- strbuf_release(&script);
- if (!res) {
- update_ref(NULL, "CHERRY_PICK_HEAD", &root_commit, NULL,
- REF_NO_DEREF, UPDATE_REFS_MSG_ON_ERR);
- res = update_ref(NULL, "HEAD", &root_commit, NULL, 0,
- UPDATE_REFS_MSG_ON_ERR);
- }
- return res < 0 ? error(_("writing root commit")) : 0;
- }
-
cmd.git_cmd = 1;
if (is_rebase_i(opts) && read_env_script(&cmd.env_array)) {
@@ -1378,7 +1308,7 @@ static int try_to_commit(struct repository *r,
struct object_id *oid)
{
struct object_id tree;
- struct commit *current_head;
+ struct commit *current_head = NULL;
struct commit_list *parents = NULL;
struct commit_extra_header *extra = NULL;
struct strbuf err = STRBUF_INIT;
@@ -1413,7 +1343,8 @@ static int try_to_commit(struct repository *r,
}
parents = copy_commit_list(current_head->parents);
extra = read_commit_extra_headers(current_head, exclude_gpgsig);
- } else if (current_head) {
+ } else if (current_head &&
+ (!(flags & CREATE_ROOT_COMMIT) || (flags & AMEND_MSG))) {
commit_list_insert(current_head, &parents);
}
@@ -1490,8 +1421,7 @@ static int do_commit(struct repository *r,
{
int res = 1;
- if (!(flags & EDIT_MSG) && !(flags & VERIFY_MSG) &&
- !(flags & CREATE_ROOT_COMMIT)) {
+ if (!(flags & EDIT_MSG) && !(flags & VERIFY_MSG)) {
struct object_id oid;
struct strbuf sb = STRBUF_INIT;
@@ -1775,7 +1705,7 @@ static int do_pick_commit(struct repository *r,
enum todo_command command,
struct commit *commit,
struct replay_opts *opts,
- int final_fixup)
+ int final_fixup, int *check_todo)
{
unsigned int flags = opts->edit ? EDIT_MSG : 0;
const char *msg_file = opts->edit ? NULL : git_path_merge_msg(r);
@@ -1785,7 +1715,7 @@ static int do_pick_commit(struct repository *r,
char *author = NULL;
struct commit_message msg = { NULL, NULL, NULL, NULL };
struct strbuf msgbuf = STRBUF_INIT;
- int res, unborn = 0, allow;
+ int res, unborn = 0, reword = 0, allow;
if (opts->no_commit) {
/*
@@ -1855,7 +1785,7 @@ static int do_pick_commit(struct repository *r,
opts);
if (res || command != TODO_REWORD)
goto leave;
- flags |= EDIT_MSG | AMEND_MSG | VERIFY_MSG;
+ reword = 1;
msg_file = NULL;
goto fast_forward_edit;
}
@@ -1913,7 +1843,7 @@ static int do_pick_commit(struct repository *r,
}
if (command == TODO_REWORD)
- flags |= EDIT_MSG | VERIFY_MSG;
+ reword = 1;
else if (is_fixup(command)) {
if (update_squash_messages(r, command, commit, opts))
return -1;
@@ -1997,13 +1927,21 @@ static int do_pick_commit(struct repository *r,
} else if (allow)
flags |= ALLOW_EMPTY;
if (!opts->no_commit) {
-fast_forward_edit:
if (author || command == TODO_REVERT || (flags & AMEND_MSG))
res = do_commit(r, msg_file, author, opts, flags);
else
res = error(_("unable to parse commit author"));
+ *check_todo = !!(flags & EDIT_MSG);
+ if (!res && reword) {
+fast_forward_edit:
+ res = run_git_commit(r, NULL, opts, EDIT_MSG |
+ VERIFY_MSG | AMEND_MSG |
+ (flags & ALLOW_EMPTY));
+ *check_todo = 1;
+ }
}
+
if (!res && final_fixup) {
unlink(rebase_path_fixup_msg());
unlink(rebase_path_squash_msg());
@@ -3364,6 +3302,9 @@ static int do_merge(struct repository *r,
struct commit *head_commit, *merge_commit, *i;
struct commit_list *bases, *j, *reversed = NULL;
struct commit_list *to_merge = NULL, **tail = &to_merge;
+ const char *strategy = !opts->xopts_nr &&
+ (!opts->strategy || !strcmp(opts->strategy, "recursive")) ?
+ NULL : opts->strategy;
struct merge_options o;
int merge_arg_len, oneline_offset, can_fast_forward, ret, k;
static struct lock_file lock;
@@ -3516,7 +3457,7 @@ static int do_merge(struct repository *r,
goto leave_merge;
}
- if (to_merge->next) {
+ if (strategy || to_merge->next) {
/* Octopus merge */
struct child_process cmd = CHILD_PROCESS_INIT;
@@ -3530,7 +3471,14 @@ static int do_merge(struct repository *r,
cmd.git_cmd = 1;
argv_array_push(&cmd.args, "merge");
argv_array_push(&cmd.args, "-s");
- argv_array_push(&cmd.args, "octopus");
+ if (!strategy)
+ argv_array_push(&cmd.args, "octopus");
+ else {
+ argv_array_push(&cmd.args, strategy);
+ for (k = 0; k < opts->xopts_nr; k++)
+ argv_array_pushf(&cmd.args,
+ "-X%s", opts->xopts[k]);
+ }
argv_array_push(&cmd.args, "--no-edit");
argv_array_push(&cmd.args, "--no-ff");
argv_array_push(&cmd.args, "--no-log");
@@ -3568,7 +3516,7 @@ static int do_merge(struct repository *r,
goto leave_merge;
}
- write_message(oid_to_hex(&merge_commit->object.oid), GIT_SHA1_HEXSZ,
+ write_message(oid_to_hex(&merge_commit->object.oid), the_hash_algo->hexsz,
git_path_merge_head(r), 0);
write_message("no-ff", 5, git_path_merge_mode(r), 0);
@@ -3818,6 +3766,7 @@ static int pick_commits(struct repository *r,
while (todo_list->current < todo_list->nr) {
struct todo_item *item = todo_list->items + todo_list->current;
const char *arg = todo_item_get_arg(todo_list, item);
+ int check_todo = 0;
if (save_todo(todo_list, opts))
return -1;
@@ -3856,7 +3805,8 @@ static int pick_commits(struct repository *r,
command_to_string(item->command), NULL),
1);
res = do_pick_commit(r, item->command, item->commit,
- opts, is_final_fixup(todo_list));
+ opts, is_final_fixup(todo_list),
+ &check_todo);
if (is_rebase_i(opts) && res < 0) {
/* Reschedule */
advise(_(rescheduled_advice),
@@ -3913,7 +3863,6 @@ static int pick_commits(struct repository *r,
} else if (item->command == TODO_EXEC) {
char *end_of_arg = (char *)(arg + item->arg_len);
int saved = *end_of_arg;
- struct stat st;
if (!opts->verbose)
term_clear_line();
@@ -3924,17 +3873,8 @@ static int pick_commits(struct repository *r,
if (res) {
if (opts->reschedule_failed_exec)
reschedule = 1;
- } else if (stat(get_todo_path(opts), &st))
- res = error_errno(_("could not stat '%s'"),
- get_todo_path(opts));
- else if (match_stat_data(&todo_list->stat, &st)) {
- /* Reread the todo file if it has changed. */
- todo_list_release(todo_list);
- if (read_populate_todo(r, todo_list, opts))
- res = -1; /* message was printed */
- /* `current` will be incremented below */
- todo_list->current = -1;
}
+ check_todo = 1;
} else if (item->command == TODO_LABEL) {
if ((res = do_label(r, arg, item->arg_len)))
reschedule = 1;
@@ -3970,6 +3910,20 @@ static int pick_commits(struct repository *r,
item->commit,
arg, item->arg_len,
opts, res, 0);
+ } else if (check_todo && !res) {
+ struct stat st;
+
+ if (stat(get_todo_path(opts), &st)) {
+ res = error_errno(_("could not stat '%s'"),
+ get_todo_path(opts));
+ } else if (match_stat_data(&todo_list->stat, &st)) {
+ /* Reread the todo file if it has changed. */
+ todo_list_release(todo_list);
+ if (read_populate_todo(r, todo_list, opts))
+ res = -1; /* message was printed */
+ /* `current` will be incremented below */
+ todo_list->current = -1;
+ }
}
todo_list->current++;
@@ -4296,9 +4250,12 @@ static int single_pick(struct repository *r,
struct commit *cmit,
struct replay_opts *opts)
{
+ int check_todo;
+
setenv(GIT_REFLOG_ACTION, action_name(opts), 0);
return do_pick_commit(r, opts->action == REPLAY_PICK ?
- TODO_PICK : TODO_REVERT, cmit, opts, 0);
+ TODO_PICK : TODO_REVERT, cmit, opts, 0,
+ &check_todo);
}
int sequencer_pick_revisions(struct repository *r,
@@ -4440,9 +4397,14 @@ struct labels_entry {
char label[FLEX_ARRAY];
};
-static int labels_cmp(const void *fndata, const struct labels_entry *a,
- const struct labels_entry *b, const void *key)
+static int labels_cmp(const void *fndata, const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key, const void *key)
{
+ const struct labels_entry *a, *b;
+
+ a = container_of(eptr, const struct labels_entry, entry);
+ b = container_of(entry_or_key, const struct labels_entry, entry);
+
return key ? strcmp(a->label, key) : strcmp(a->label, b->label);
}
@@ -4487,7 +4449,7 @@ static const char *label_oid(struct object_id *oid, const char *label,
char *p;
strbuf_reset(&state->buf);
- strbuf_grow(&state->buf, GIT_SHA1_HEXSZ);
+ strbuf_grow(&state->buf, GIT_MAX_HEXSZ);
label = p = state->buf.buf;
find_unique_abbrev_r(p, oid, default_abbrev);
@@ -4500,7 +4462,7 @@ static const char *label_oid(struct object_id *oid, const char *label,
size_t i = strlen(p) + 1;
oid_to_hex_r(p, oid);
- for (; i < GIT_SHA1_HEXSZ; i++) {
+ for (; i < the_hash_algo->hexsz; i++) {
char save = p[i];
p[i] = '\0';
if (!hashmap_get_from_hash(&state->labels,
@@ -4538,8 +4500,8 @@ static const char *label_oid(struct object_id *oid, const char *label,
}
FLEX_ALLOC_STR(labels_entry, label, label);
- hashmap_entry_init(labels_entry, strihash(label));
- hashmap_add(&state->labels, labels_entry);
+ hashmap_entry_init(&labels_entry->entry, strihash(label));
+ hashmap_add(&state->labels, &labels_entry->entry);
FLEX_ALLOC_STR(string_entry, string, label);
oidcpy(&string_entry->entry.oid, oid);
@@ -4554,6 +4516,7 @@ static int make_script_with_merges(struct pretty_print_context *pp,
{
int keep_empty = flags & TODO_LIST_KEEP_EMPTY;
int rebase_cousins = flags & TODO_LIST_REBASE_COUSINS;
+ int root_with_onto = flags & TODO_LIST_ROOT_WITH_ONTO;
struct strbuf buf = STRBUF_INIT, oneline = STRBUF_INIT;
struct strbuf label = STRBUF_INIT;
struct commit_list *commits = NULL, **tail = &commits, *iter;
@@ -4573,7 +4536,7 @@ static int make_script_with_merges(struct pretty_print_context *pp,
oidmap_init(&commit2todo, 0);
oidmap_init(&state.commit2label, 0);
- hashmap_init(&state.labels, (hashmap_cmp_fn) labels_cmp, NULL, 0);
+ hashmap_init(&state.labels, labels_cmp, NULL, 0);
strbuf_init(&state.buf, 32);
if (revs->cmdline.nr && (revs->cmdline.rev[0].flags & BOTTOM)) {
@@ -4720,7 +4683,8 @@ static int make_script_with_merges(struct pretty_print_context *pp,
if (!commit)
strbuf_addf(out, "%s %s\n", cmd_reset,
- rebase_cousins ? "onto" : "[new root]");
+ rebase_cousins || root_with_onto ?
+ "onto" : "[new root]");
else {
const char *to = NULL;
@@ -4767,7 +4731,7 @@ static int make_script_with_merges(struct pretty_print_context *pp,
oidmap_free(&commit2todo, 1);
oidmap_free(&state.commit2label, 1);
- hashmap_free(&state.labels, 1);
+ hashmap_free_entries(&state.labels, struct labels_entry, entry);
strbuf_release(&state.buf);
return 0;
@@ -5138,9 +5102,15 @@ struct subject2item_entry {
};
static int subject2item_cmp(const void *fndata,
- const struct subject2item_entry *a,
- const struct subject2item_entry *b, const void *key)
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
+ const void *key)
{
+ const struct subject2item_entry *a, *b;
+
+ a = container_of(eptr, const struct subject2item_entry, entry);
+ b = container_of(entry_or_key, const struct subject2item_entry, entry);
+
return key ? strcmp(a->subject, key) : strcmp(a->subject, b->subject);
}
@@ -5173,8 +5143,7 @@ int todo_list_rearrange_squash(struct todo_list *todo_list)
* In that case, last[i] will indicate the index of the latest item to
* be moved to appear after the i'th.
*/
- hashmap_init(&subject2item, (hashmap_cmp_fn) subject2item_cmp,
- NULL, todo_list->nr);
+ hashmap_init(&subject2item, subject2item_cmp, NULL, todo_list->nr);
ALLOC_ARRAY(next, todo_list->nr);
ALLOC_ARRAY(tail, todo_list->nr);
ALLOC_ARRAY(subjects, todo_list->nr);
@@ -5217,8 +5186,11 @@ int todo_list_rearrange_squash(struct todo_list *todo_list)
break;
}
- if ((entry = hashmap_get_from_hash(&subject2item,
- strhash(p), p)))
+ entry = hashmap_get_entry_from_hash(&subject2item,
+ strhash(p), p,
+ struct subject2item_entry,
+ entry);
+ if (entry)
/* found by title */
i2 = entry->i;
else if (!strchr(p, ' ') &&
@@ -5252,8 +5224,9 @@ int todo_list_rearrange_squash(struct todo_list *todo_list)
strhash(subject), subject)) {
FLEX_ALLOC_MEM(entry, subject, subject, subject_len);
entry->i = i;
- hashmap_entry_init(entry, strhash(entry->subject));
- hashmap_put(&subject2item, entry);
+ hashmap_entry_init(&entry->entry,
+ strhash(entry->subject));
+ hashmap_put(&subject2item, &entry->entry);
}
}
@@ -5287,7 +5260,7 @@ int todo_list_rearrange_squash(struct todo_list *todo_list)
for (i = 0; i < todo_list->nr; i++)
free(subjects[i]);
free(subjects);
- hashmap_free(&subject2item, 1);
+ hashmap_free_entries(&subject2item, struct subject2item_entry, entry);
clear_commit_todo_item(&commit_todo);
diff --git a/sequencer.h b/sequencer.h
index 6704acbb9c..574260f621 100644
--- a/sequencer.h
+++ b/sequencer.h
@@ -143,6 +143,12 @@ int sequencer_remove_state(struct replay_opts *opts);
*/
#define TODO_LIST_REBASE_COUSINS (1U << 4)
#define TODO_LIST_APPEND_TODO_HELP (1U << 5)
+/*
+ * When generating a script that rebases merges with `--root` *and* with
+ * `--onto`, we do not want to re-generate the root commits.
+ */
+#define TODO_LIST_ROOT_WITH_ONTO (1U << 6)
+
int sequencer_make_script(struct repository *r, struct strbuf *out, int argc,
const char **argv, unsigned flags);
diff --git a/setup.c b/setup.c
index 8dcb4631f7..e2a479a64f 100644
--- a/setup.c
+++ b/setup.c
@@ -4,6 +4,7 @@
#include "dir.h"
#include "string-list.h"
#include "chdir-notify.h"
+#include "promisor-remote.h"
static int inside_git_dir = -1;
static int inside_work_tree = -1;
@@ -478,7 +479,7 @@ static int check_repository_format_gently(const char *gitdir, struct repository_
}
repository_format_precious_objects = candidate->precious_objects;
- repository_format_partial_clone = xstrdup_or_null(candidate->partial_clone);
+ set_repository_format_partial_clone(candidate->partial_clone);
repository_format_worktree_config = candidate->worktree_config;
string_list_clear(&candidate->unknown_extensions, 0);
@@ -797,7 +798,7 @@ static const char *setup_discovered_git_dir(const char *gitdir,
set_git_dir(gitdir);
inside_git_dir = 0;
inside_work_tree = 1;
- if (offset == cwd->len)
+ if (offset >= cwd->len)
return NULL;
/* Make "offset" point past the '/' (already the case for root dirs) */
@@ -919,7 +920,7 @@ static enum discovery_result setup_git_directory_gently_1(struct strbuf *dir,
const char *env_ceiling_dirs = getenv(CEILING_DIRECTORIES_ENVIRONMENT);
struct string_list ceiling_dirs = STRING_LIST_INIT_DUP;
const char *gitdirenv;
- int ceil_offset = -1, min_offset = has_dos_drive_prefix(dir->buf) ? 3 : 1;
+ int ceil_offset = -1, min_offset = offset_1st_component(dir->buf);
dev_t current_device = 0;
int one_filesystem = 1;
@@ -947,6 +948,12 @@ static enum discovery_result setup_git_directory_gently_1(struct strbuf *dir,
if (ceil_offset < 0)
ceil_offset = min_offset - 2;
+ if (min_offset && min_offset == dir->len &&
+ !is_dir_sep(dir->buf[min_offset - 1])) {
+ strbuf_addch(dir, '/');
+ min_offset++;
+ }
+
/*
* Test in the following order (relative to the dir):
* - .git (file containing "gitdir: <path>")
diff --git a/sha1-file.c b/sha1-file.c
index 487ea35d2d..188de57634 100644
--- a/sha1-file.c
+++ b/sha1-file.c
@@ -30,8 +30,8 @@
#include "mergesort.h"
#include "quote.h"
#include "packfile.h"
-#include "fetch-object.h"
#include "object-store.h"
+#include "promisor-remote.h"
/* The maximum size for an object header. */
#define MAX_HEADER_LEN 32
@@ -55,7 +55,6 @@
"\x6f\xe1\x41\xf7\x74\x91\x20\xa3\x03\x72" \
"\x18\x13"
-const unsigned char null_sha1[GIT_MAX_RAWSZ];
const struct object_id null_oid;
static const struct object_id empty_tree_oid = {
EMPTY_TREE_SHA1_BIN_LITERAL
@@ -952,12 +951,8 @@ void *xmmap_gently(void *start, size_t length,
mmap_limit_check(length);
ret = mmap(start, length, prot, flags, fd, offset);
- if (ret == MAP_FAILED) {
- if (!length)
- return NULL;
- release_pack_memory(length);
- ret = mmap(start, length, prot, flags, fd, offset);
- }
+ if (ret == MAP_FAILED && !length)
+ ret = NULL;
return ret;
}
@@ -1475,16 +1470,17 @@ int oid_object_info_extended(struct repository *r, const struct object_id *oid,
}
/* Check if it is a missing object */
- if (fetch_if_missing && repository_format_partial_clone &&
+ if (fetch_if_missing && has_promisor_remote() &&
!already_retried && r == the_repository &&
!(flags & OBJECT_INFO_SKIP_FETCH_OBJECT)) {
/*
- * TODO Investigate having fetch_object() return
- * TODO error/success and stopping the music here.
- * TODO Pass a repository struct through fetch_object,
- * such that arbitrary repositories work.
+ * TODO Investigate checking promisor_remote_get_direct()
+ * TODO return value and stopping on error here.
+ * TODO Pass a repository struct through
+ * promisor_remote_get_direct(), such that arbitrary
+ * repositories work.
*/
- fetch_objects(repository_format_partial_clone, real, 1);
+ promisor_remote_get_direct(r, real, 1);
already_retried = 1;
continue;
}
diff --git a/sha1-lookup.c b/sha1-lookup.c
index 796ab68da8..29185844ec 100644
--- a/sha1-lookup.c
+++ b/sha1-lookup.c
@@ -50,7 +50,7 @@ static uint32_t take2(const unsigned char *sha1)
* The sha1 of element i (between 0 and nr - 1) should be returned
* by "fn(i, table)".
*/
-int sha1_pos(const unsigned char *sha1, void *table, size_t nr,
+int sha1_pos(const unsigned char *hash, void *table, size_t nr,
sha1_access_fn fn)
{
size_t hi = nr;
@@ -63,14 +63,14 @@ int sha1_pos(const unsigned char *sha1, void *table, size_t nr,
if (nr != 1) {
size_t lov, hiv, miv, ofs;
- for (ofs = 0; ofs < 18; ofs += 2) {
+ for (ofs = 0; ofs < the_hash_algo->rawsz - 2; ofs += 2) {
lov = take2(fn(0, table) + ofs);
hiv = take2(fn(nr - 1, table) + ofs);
- miv = take2(sha1 + ofs);
+ miv = take2(hash + ofs);
if (miv < lov)
return -1;
if (hiv < miv)
- return -1 - nr;
+ return index_pos_to_insert_pos(nr);
if (lov != hiv) {
/*
* At this point miv could be equal
@@ -88,7 +88,7 @@ int sha1_pos(const unsigned char *sha1, void *table, size_t nr,
do {
int cmp;
- cmp = hashcmp(fn(mi, table), sha1);
+ cmp = hashcmp(fn(mi, table), hash);
if (!cmp)
return mi;
if (cmp > 0)
@@ -97,7 +97,7 @@ int sha1_pos(const unsigned char *sha1, void *table, size_t nr,
lo = mi + 1;
mi = lo + (hi - lo) / 2;
} while (lo < hi);
- return -lo-1;
+ return index_pos_to_insert_pos(lo);
}
int bsearch_hash(const unsigned char *sha1, const uint32_t *fanout_nbo,
diff --git a/sha1-name.c b/sha1-name.c
index 2989e27b71..200eb373ad 100644
--- a/sha1-name.c
+++ b/sha1-name.c
@@ -403,9 +403,9 @@ static int repo_collect_ambiguous(struct repository *r,
return collect_ambiguous(oid, data);
}
-static struct repository *sort_ambiguous_repo;
-static int sort_ambiguous(const void *a, const void *b)
+static int sort_ambiguous(const void *a, const void *b, void *ctx)
{
+ struct repository *sort_ambiguous_repo = ctx;
int a_type = oid_object_info(sort_ambiguous_repo, a, NULL);
int b_type = oid_object_info(sort_ambiguous_repo, b, NULL);
int a_type_sort;
@@ -434,10 +434,7 @@ static int sort_ambiguous(const void *a, const void *b)
static void sort_ambiguous_oid_array(struct repository *r, struct oid_array *a)
{
- /* mutex will be needed if this code is to be made thread safe */
- sort_ambiguous_repo = r;
- QSORT(a->oid, a->nr, sort_ambiguous);
- sort_ambiguous_repo = NULL;
+ QSORT_S(a->oid, a->nr, sort_ambiguous, r);
}
static enum get_oid_result get_short_oid(struct repository *r,
@@ -1163,13 +1160,22 @@ static enum get_oid_result get_oid_1(struct repository *r,
}
if (has_suffix) {
- int num = 0;
+ unsigned int num = 0;
int len1 = cp - name;
cp++;
- while (cp < name + len)
- num = num * 10 + *cp++ - '0';
+ while (cp < name + len) {
+ unsigned int digit = *cp++ - '0';
+ if (unsigned_mult_overflows(num, 10))
+ return MISSING_OBJECT;
+ num *= 10;
+ if (unsigned_add_overflows(num, digit))
+ return MISSING_OBJECT;
+ num += digit;
+ }
if (!num && len1 == len - 1)
num = 1;
+ else if (num > INT_MAX)
+ return MISSING_OBJECT;
if (has_suffix == '^')
return get_parent(r, name, len1, oid, num);
/* else if (has_suffix == '~') -- goes without saying */
@@ -1289,7 +1295,7 @@ static int get_oid_oneline(struct repository *r,
struct grab_nth_branch_switch_cbdata {
int remaining;
- struct strbuf buf;
+ struct strbuf *sb;
};
static int grab_nth_branch_switch(struct object_id *ooid, struct object_id *noid,
@@ -1307,8 +1313,8 @@ static int grab_nth_branch_switch(struct object_id *ooid, struct object_id *noid
return 0;
if (--(cb->remaining) == 0) {
len = target - match;
- strbuf_reset(&cb->buf);
- strbuf_add(&cb->buf, match, len);
+ strbuf_reset(cb->sb);
+ strbuf_add(cb->sb, match, len);
return 1; /* we are done */
}
return 0;
@@ -1341,18 +1347,15 @@ static int interpret_nth_prior_checkout(struct repository *r,
if (nth <= 0)
return -1;
cb.remaining = nth;
- strbuf_init(&cb.buf, 20);
+ cb.sb = buf;
retval = refs_for_each_reflog_ent_reverse(get_main_ref_store(r),
"HEAD", grab_nth_branch_switch, &cb);
if (0 < retval) {
- strbuf_reset(buf);
- strbuf_addbuf(buf, &cb.buf);
retval = brace - name + 1;
} else
retval = 0;
- strbuf_release(&cb.buf);
return retval;
}
diff --git a/shallow.c b/shallow.c
index 5fa2b15d37..7fd04afed1 100644
--- a/shallow.c
+++ b/shallow.c
@@ -12,10 +12,7 @@
#include "diff.h"
#include "revision.h"
#include "commit-slab.h"
-#include "revision.h"
#include "list-objects.h"
-#include "commit-slab.h"
-#include "repository.h"
#include "commit-reach.h"
void set_alternate_shallow_file(struct repository *r, const char *path, int override)
@@ -156,6 +153,8 @@ struct commit_list *get_shallow_commits(struct object_array *heads, int depth,
for (i = 0; i < depths.slab_count; i++) {
int j;
+ if (!depths.slab[i])
+ continue;
for (j = 0; j < depths.slab_size; j++)
free(depths.slab[i][j]);
}
diff --git a/compat/qsort.c b/stable-qsort.c
index 7d071afb70..6cbaf39f7b 100644
--- a/compat/qsort.c
+++ b/stable-qsort.c
@@ -1,4 +1,4 @@
-#include "../git-compat-util.h"
+#include "git-compat-util.h"
/*
* A merge sort implementation, simplified from the qsort implementation
@@ -44,8 +44,8 @@ static void msort_with_tmp(void *b, size_t n, size_t s,
memcpy(b, t, (n - n2) * s);
}
-void git_qsort(void *b, size_t n, size_t s,
- int (*cmp)(const void *, const void *))
+void git_stable_qsort(void *b, size_t n, size_t s,
+ int (*cmp)(const void *, const void *))
{
const size_t size = st_mult(n, s);
char buf[1024];
diff --git a/strbuf.c b/strbuf.c
index d30f916858..aa48d179a9 100644
--- a/strbuf.c
+++ b/strbuf.c
@@ -774,8 +774,10 @@ void strbuf_addstr_xml_quoted(struct strbuf *buf, const char *s)
}
}
-static int is_rfc3986_reserved(char ch)
+int is_rfc3986_reserved_or_unreserved(char ch)
{
+ if (is_rfc3986_unreserved(ch))
+ return 1;
switch (ch) {
case '!': case '*': case '\'': case '(': case ')': case ';':
case ':': case '@': case '&': case '=': case '+': case '$':
@@ -785,20 +787,19 @@ static int is_rfc3986_reserved(char ch)
return 0;
}
-static int is_rfc3986_unreserved(char ch)
+int is_rfc3986_unreserved(char ch)
{
return isalnum(ch) ||
ch == '-' || ch == '_' || ch == '.' || ch == '~';
}
static void strbuf_add_urlencode(struct strbuf *sb, const char *s, size_t len,
- int reserved)
+ char_predicate allow_unencoded_fn)
{
strbuf_grow(sb, len);
while (len--) {
char ch = *s++;
- if (is_rfc3986_unreserved(ch) ||
- (!reserved && is_rfc3986_reserved(ch)))
+ if (allow_unencoded_fn(ch))
strbuf_addch(sb, ch);
else
strbuf_addf(sb, "%%%02x", (unsigned char)ch);
@@ -806,9 +807,9 @@ static void strbuf_add_urlencode(struct strbuf *sb, const char *s, size_t len,
}
void strbuf_addstr_urlencode(struct strbuf *sb, const char *s,
- int reserved)
+ char_predicate allow_unencoded_fn)
{
- strbuf_add_urlencode(sb, s, strlen(s), reserved);
+ strbuf_add_urlencode(sb, s, strlen(s), allow_unencoded_fn);
}
static void strbuf_humanise(struct strbuf *buf, off_t bytes,
diff --git a/strbuf.h b/strbuf.h
index f62278a0be..84cf969721 100644
--- a/strbuf.h
+++ b/strbuf.h
@@ -672,8 +672,13 @@ void strbuf_branchname(struct strbuf *sb, const char *name,
*/
int strbuf_check_branch_ref(struct strbuf *sb, const char *name);
+typedef int (*char_predicate)(char ch);
+
+int is_rfc3986_unreserved(char ch);
+int is_rfc3986_reserved_or_unreserved(char ch);
+
void strbuf_addstr_urlencode(struct strbuf *sb, const char *name,
- int reserved);
+ char_predicate allow_unencoded_fn);
__attribute__((format (printf,1,2)))
int printf_ln(const char *fmt, ...);
diff --git a/sub-process.c b/sub-process.c
index 3f4af93555..1b1af9dcbd 100644
--- a/sub-process.c
+++ b/sub-process.c
@@ -6,12 +6,14 @@
#include "pkt-line.h"
int cmd2process_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
- const struct subprocess_entry *e1 = entry;
- const struct subprocess_entry *e2 = entry_or_key;
+ const struct subprocess_entry *e1, *e2;
+
+ e1 = container_of(eptr, const struct subprocess_entry, ent);
+ e2 = container_of(entry_or_key, const struct subprocess_entry, ent);
return strcmp(e1->cmd, e2->cmd);
}
@@ -20,9 +22,9 @@ struct subprocess_entry *subprocess_find_entry(struct hashmap *hashmap, const ch
{
struct subprocess_entry key;
- hashmap_entry_init(&key, strhash(cmd));
+ hashmap_entry_init(&key.ent, strhash(cmd));
key.cmd = cmd;
- return hashmap_get(hashmap, &key, NULL);
+ return hashmap_get_entry(hashmap, &key, ent, NULL);
}
int subprocess_read_status(int fd, struct strbuf *status)
@@ -58,7 +60,7 @@ void subprocess_stop(struct hashmap *hashmap, struct subprocess_entry *entry)
kill(entry->process.pid, SIGTERM);
finish_command(&entry->process);
- hashmap_remove(hashmap, entry, NULL);
+ hashmap_remove(hashmap, &entry->ent, NULL);
}
static void subprocess_exit_handler(struct child_process *process)
@@ -96,7 +98,7 @@ int subprocess_start(struct hashmap *hashmap, struct subprocess_entry *entry, co
return err;
}
- hashmap_entry_init(entry, strhash(cmd));
+ hashmap_entry_init(&entry->ent, strhash(cmd));
err = startfn(entry);
if (err) {
@@ -105,7 +107,7 @@ int subprocess_start(struct hashmap *hashmap, struct subprocess_entry *entry, co
return err;
}
- hashmap_add(hashmap, entry);
+ hashmap_add(hashmap, &entry->ent);
return 0;
}
diff --git a/sub-process.h b/sub-process.h
index 5c182fad98..e85f21fa1a 100644
--- a/sub-process.h
+++ b/sub-process.h
@@ -24,7 +24,7 @@
/* Members should not be accessed directly. */
struct subprocess_entry {
- struct hashmap_entry ent; /* must be the first member! */
+ struct hashmap_entry ent;
const char *cmd;
struct child_process process;
};
@@ -43,8 +43,8 @@ struct subprocess_capability {
/* Function to test two subprocess hashmap entries for equality. */
int cmd2process_cmp(const void *unused_cmp_data,
- const void *e1,
- const void *e2,
+ const struct hashmap_entry *e,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata);
/*
diff --git a/submodule-config.c b/submodule-config.c
index 4264ee216f..401a9b2382 100644
--- a/submodule-config.c
+++ b/submodule-config.c
@@ -38,24 +38,28 @@ enum lookup_type {
};
static int config_path_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
- const struct submodule_entry *a = entry;
- const struct submodule_entry *b = entry_or_key;
+ const struct submodule_entry *a, *b;
+
+ a = container_of(eptr, const struct submodule_entry, ent);
+ b = container_of(entry_or_key, const struct submodule_entry, ent);
return strcmp(a->config->path, b->config->path) ||
!oideq(&a->config->gitmodules_oid, &b->config->gitmodules_oid);
}
static int config_name_cmp(const void *unused_cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *unused_keydata)
{
- const struct submodule_entry *a = entry;
- const struct submodule_entry *b = entry_or_key;
+ const struct submodule_entry *a, *b;
+
+ a = container_of(eptr, const struct submodule_entry, ent);
+ b = container_of(entry_or_key, const struct submodule_entry, ent);
return strcmp(a->config->name, b->config->name) ||
!oideq(&a->config->gitmodules_oid, &b->config->gitmodules_oid);
@@ -95,12 +99,12 @@ static void submodule_cache_clear(struct submodule_cache *cache)
* allocation of struct submodule entries. Each is allocated by
* their .gitmodules blob sha1 and submodule name.
*/
- hashmap_iter_init(&cache->for_name, &iter);
- while ((entry = hashmap_iter_next(&iter)))
+ hashmap_for_each_entry(&cache->for_name, &iter, entry,
+ ent /* member name */)
free_one_config(entry);
- hashmap_free(&cache->for_path, 1);
- hashmap_free(&cache->for_name, 1);
+ hashmap_free_entries(&cache->for_path, struct submodule_entry, ent);
+ hashmap_free_entries(&cache->for_name, struct submodule_entry, ent);
cache->initialized = 0;
cache->gitmodules_read = 0;
}
@@ -123,9 +127,9 @@ static void cache_put_path(struct submodule_cache *cache,
unsigned int hash = hash_oid_string(&submodule->gitmodules_oid,
submodule->path);
struct submodule_entry *e = xmalloc(sizeof(*e));
- hashmap_entry_init(e, hash);
+ hashmap_entry_init(&e->ent, hash);
e->config = submodule;
- hashmap_put(&cache->for_path, e);
+ hashmap_put(&cache->for_path, &e->ent);
}
static void cache_remove_path(struct submodule_cache *cache,
@@ -135,9 +139,9 @@ static void cache_remove_path(struct submodule_cache *cache,
submodule->path);
struct submodule_entry e;
struct submodule_entry *removed;
- hashmap_entry_init(&e, hash);
+ hashmap_entry_init(&e.ent, hash);
e.config = submodule;
- removed = hashmap_remove(&cache->for_path, &e, NULL);
+ removed = hashmap_remove_entry(&cache->for_path, &e, ent, NULL);
free(removed);
}
@@ -147,9 +151,9 @@ static void cache_add(struct submodule_cache *cache,
unsigned int hash = hash_oid_string(&submodule->gitmodules_oid,
submodule->name);
struct submodule_entry *e = xmalloc(sizeof(*e));
- hashmap_entry_init(e, hash);
+ hashmap_entry_init(&e->ent, hash);
e->config = submodule;
- hashmap_add(&cache->for_name, e);
+ hashmap_add(&cache->for_name, &e->ent);
}
static const struct submodule *cache_lookup_path(struct submodule_cache *cache,
@@ -163,10 +167,10 @@ static const struct submodule *cache_lookup_path(struct submodule_cache *cache,
oidcpy(&key_config.gitmodules_oid, gitmodules_oid);
key_config.path = path;
- hashmap_entry_init(&key, hash);
+ hashmap_entry_init(&key.ent, hash);
key.config = &key_config;
- entry = hashmap_get(&cache->for_path, &key, NULL);
+ entry = hashmap_get_entry(&cache->for_path, &key, ent, NULL);
if (entry)
return entry->config;
return NULL;
@@ -183,10 +187,10 @@ static struct submodule *cache_lookup_name(struct submodule_cache *cache,
oidcpy(&key_config.gitmodules_oid, gitmodules_oid);
key_config.name = name;
- hashmap_entry_init(&key, hash);
+ hashmap_entry_init(&key.ent, hash);
key.config = &key_config;
- entry = hashmap_get(&cache->for_name, &key, NULL);
+ entry = hashmap_get_entry(&cache->for_name, &key, ent, NULL);
if (entry)
return entry->config;
return NULL;
@@ -550,7 +554,9 @@ static const struct submodule *config_from(struct submodule_cache *cache,
struct hashmap_iter iter;
struct submodule_entry *entry;
- entry = hashmap_iter_first(&cache->for_name, &iter);
+ entry = hashmap_iter_first_entry(&cache->for_name, &iter,
+ struct submodule_entry,
+ ent /* member name */);
if (!entry)
return NULL;
return entry->config;
diff --git a/t/helper/.gitignore b/t/helper/.gitignore
index 2bad28af92..48c7bb0bbb 100644
--- a/t/helper/.gitignore
+++ b/t/helper/.gitignore
@@ -1,5 +1,4 @@
-*
-!*.sh
-!*.[ch]
-!*.gitignore
-
+/test-tool
+/test-fake-ssh
+/test-line-buffer
+/test-svn-fe
diff --git a/t/helper/test-date.c b/t/helper/test-date.c
index 585347ea48..099eff4f0f 100644
--- a/t/helper/test-date.c
+++ b/t/helper/test-date.c
@@ -12,13 +12,13 @@ static const char *usage_msg = "\n"
" test-tool date is64bit\n"
" test-tool date time_t-is64bit\n";
-static void show_relative_dates(const char **argv, struct timeval *now)
+static void show_relative_dates(const char **argv)
{
struct strbuf buf = STRBUF_INIT;
for (; *argv; argv++) {
time_t t = atoi(*argv);
- show_date_relative(t, now, &buf);
+ show_date_relative(t, &buf);
printf("%s -> %s\n", *argv, buf.buf);
}
strbuf_release(&buf);
@@ -74,20 +74,20 @@ static void parse_dates(const char **argv)
strbuf_release(&result);
}
-static void parse_approxidate(const char **argv, struct timeval *now)
+static void parse_approxidate(const char **argv)
{
for (; *argv; argv++) {
timestamp_t t;
- t = approxidate_relative(*argv, now);
+ t = approxidate_relative(*argv);
printf("%s -> %s\n", *argv, show_date(t, 0, DATE_MODE(ISO8601)));
}
}
-static void parse_approx_timestamp(const char **argv, struct timeval *now)
+static void parse_approx_timestamp(const char **argv)
{
for (; *argv; argv++) {
timestamp_t t;
- t = approxidate_relative(*argv, now);
+ t = approxidate_relative(*argv);
printf("%s -> %"PRItime"\n", *argv, t);
}
}
@@ -103,22 +103,13 @@ static void getnanos(const char **argv)
int cmd__date(int argc, const char **argv)
{
- struct timeval now;
const char *x;
- x = getenv("GIT_TEST_DATE_NOW");
- if (x) {
- now.tv_sec = atoi(x);
- now.tv_usec = 0;
- }
- else
- gettimeofday(&now, NULL);
-
argv++;
if (!*argv)
usage(usage_msg);
if (!strcmp(*argv, "relative"))
- show_relative_dates(argv+1, &now);
+ show_relative_dates(argv+1);
else if (!strcmp(*argv, "human"))
show_human_dates(argv+1);
else if (skip_prefix(*argv, "show:", &x))
@@ -126,9 +117,9 @@ int cmd__date(int argc, const char **argv)
else if (!strcmp(*argv, "parse"))
parse_dates(argv+1);
else if (!strcmp(*argv, "approxidate"))
- parse_approxidate(argv+1, &now);
+ parse_approxidate(argv+1);
else if (!strcmp(*argv, "timestamp"))
- parse_approx_timestamp(argv+1, &now);
+ parse_approx_timestamp(argv+1);
else if (!strcmp(*argv, "getnanos"))
getnanos(argv+1);
else if (!strcmp(*argv, "is64bit"))
diff --git a/t/helper/test-hashmap.c b/t/helper/test-hashmap.c
index aaf17b0ddf..f38706216f 100644
--- a/t/helper/test-hashmap.c
+++ b/t/helper/test-hashmap.c
@@ -5,6 +5,7 @@
struct test_entry
{
+ int padding; /* hashmap entry no longer needs to be the first member */
struct hashmap_entry ent;
/* key and value as two \0-terminated strings */
char key[FLEX_ARRAY];
@@ -16,15 +17,17 @@ static const char *get_value(const struct test_entry *e)
}
static int test_entry_cmp(const void *cmp_data,
- const void *entry,
- const void *entry_or_key,
+ const struct hashmap_entry *eptr,
+ const struct hashmap_entry *entry_or_key,
const void *keydata)
{
const int ignore_case = cmp_data ? *((int *)cmp_data) : 0;
- const struct test_entry *e1 = entry;
- const struct test_entry *e2 = entry_or_key;
+ const struct test_entry *e1, *e2;
const char *key = keydata;
+ e1 = container_of(eptr, const struct test_entry, ent);
+ e2 = container_of(entry_or_key, const struct test_entry, ent);
+
if (ignore_case)
return strcasecmp(e1->key, key ? key : e2->key);
else
@@ -37,7 +40,7 @@ static struct test_entry *alloc_test_entry(unsigned int hash,
size_t klen = strlen(key);
size_t vlen = strlen(value);
struct test_entry *entry = xmalloc(st_add4(sizeof(*entry), klen, vlen, 2));
- hashmap_entry_init(entry, hash);
+ hashmap_entry_init(&entry->ent, hash);
memcpy(entry->key, key, klen + 1);
memcpy(entry->key + klen + 1, value, vlen + 1);
return entry;
@@ -103,11 +106,11 @@ static void perf_hashmap(unsigned int method, unsigned int rounds)
/* add entries */
for (i = 0; i < TEST_SIZE; i++) {
- hashmap_entry_init(entries[i], hashes[i]);
- hashmap_add(&map, entries[i]);
+ hashmap_entry_init(&entries[i]->ent, hashes[i]);
+ hashmap_add(&map, &entries[i]->ent);
}
- hashmap_free(&map, 0);
+ hashmap_free(&map);
}
} else {
/* test map lookups */
@@ -116,8 +119,8 @@ static void perf_hashmap(unsigned int method, unsigned int rounds)
/* fill the map (sparsely if specified) */
j = (method & TEST_SPARSE) ? TEST_SIZE / 10 : TEST_SIZE;
for (i = 0; i < j; i++) {
- hashmap_entry_init(entries[i], hashes[i]);
- hashmap_add(&map, entries[i]);
+ hashmap_entry_init(&entries[i]->ent, hashes[i]);
+ hashmap_add(&map, &entries[i]->ent);
}
for (j = 0; j < rounds; j++) {
@@ -127,7 +130,7 @@ static void perf_hashmap(unsigned int method, unsigned int rounds)
}
}
- hashmap_free(&map, 0);
+ hashmap_free(&map);
}
}
@@ -179,7 +182,7 @@ int cmd__hashmap(int argc, const char **argv)
entry = alloc_test_entry(hash, p1, p2);
/* add to hashmap */
- hashmap_add(&map, entry);
+ hashmap_add(&map, &entry->ent);
} else if (!strcmp("put", cmd) && p1 && p2) {
@@ -187,43 +190,44 @@ int cmd__hashmap(int argc, const char **argv)
entry = alloc_test_entry(hash, p1, p2);
/* add / replace entry */
- entry = hashmap_put(&map, entry);
+ entry = hashmap_put_entry(&map, entry, ent);
/* print and free replaced entry, if any */
puts(entry ? get_value(entry) : "NULL");
free(entry);
} else if (!strcmp("get", cmd) && p1) {
-
/* lookup entry in hashmap */
- entry = hashmap_get_from_hash(&map, hash, p1);
+ entry = hashmap_get_entry_from_hash(&map, hash, p1,
+ struct test_entry, ent);
/* print result */
if (!entry)
puts("NULL");
- while (entry) {
+ hashmap_for_each_entry_from(&map, entry, ent)
puts(get_value(entry));
- entry = hashmap_get_next(&map, entry);
- }
} else if (!strcmp("remove", cmd) && p1) {
/* setup static key */
struct hashmap_entry key;
+ struct hashmap_entry *rm;
hashmap_entry_init(&key, hash);
/* remove entry from hashmap */
- entry = hashmap_remove(&map, &key, p1);
+ rm = hashmap_remove(&map, &key, p1);
+ entry = rm ? container_of(rm, struct test_entry, ent)
+ : NULL;
/* print result and free entry*/
puts(entry ? get_value(entry) : "NULL");
free(entry);
} else if (!strcmp("iterate", cmd)) {
-
struct hashmap_iter iter;
- hashmap_iter_init(&map, &iter);
- while ((entry = hashmap_iter_next(&iter)))
+
+ hashmap_for_each_entry(&map, &iter, entry,
+ ent /* member name */)
printf("%s %s\n", entry->key, get_value(entry));
} else if (!strcmp("size", cmd)) {
@@ -258,6 +262,6 @@ int cmd__hashmap(int argc, const char **argv)
}
strbuf_release(&line);
- hashmap_free(&map, 1);
+ hashmap_free_entries(&map, struct test_entry, ent);
return 0;
}
diff --git a/t/helper/test-lazy-init-name-hash.c b/t/helper/test-lazy-init-name-hash.c
index b99a37080d..cd1b4c9736 100644
--- a/t/helper/test-lazy-init-name-hash.c
+++ b/t/helper/test-lazy-init-name-hash.c
@@ -41,17 +41,13 @@ static void dump_run(void)
die("non-threaded code path used");
}
- dir = hashmap_iter_first(&the_index.dir_hash, &iter_dir);
- while (dir) {
+ hashmap_for_each_entry(&the_index.dir_hash, &iter_dir, dir,
+ ent /* member name */)
printf("dir %08x %7d %s\n", dir->ent.hash, dir->nr, dir->name);
- dir = hashmap_iter_next(&iter_dir);
- }
- ce = hashmap_iter_first(&the_index.name_hash, &iter_cache);
- while (ce) {
+ hashmap_for_each_entry(&the_index.name_hash, &iter_cache, ce,
+ ent /* member name */)
printf("name %08x %s\n", ce->ent.hash, ce->name);
- ce = hashmap_iter_next(&iter_cache);
- }
discard_cache();
}
diff --git a/t/helper/test-progress.c b/t/helper/test-progress.c
new file mode 100644
index 0000000000..4e9f7fafdf
--- /dev/null
+++ b/t/helper/test-progress.c
@@ -0,0 +1,81 @@
+/*
+ * A test helper to exercise the progress display.
+ *
+ * Reads instructions from standard input, one instruction per line:
+ *
+ * "progress <items>" - Call display_progress() with the given item count
+ * as parameter.
+ * "throughput <bytes> <millis> - Call display_throughput() with the given
+ * byte count as parameter. The 'millis'
+ * specify the time elapsed since the
+ * start_progress() call.
+ * "update" - Set the 'progress_update' flag.
+ *
+ * See 't0500-progress-display.sh' for examples.
+ */
+#include "test-tool.h"
+#include "gettext.h"
+#include "parse-options.h"
+#include "progress.h"
+#include "strbuf.h"
+
+/*
+ * These are defined in 'progress.c', but are not exposed in 'progress.h',
+ * because they are exclusively for testing.
+ */
+extern int progress_testing;
+extern uint64_t progress_test_ns;
+void progress_test_force_update(void);
+
+int cmd__progress(int argc, const char **argv)
+{
+ uint64_t total = 0;
+ const char *title;
+ struct strbuf line = STRBUF_INIT;
+ struct progress *progress;
+
+ const char *usage[] = {
+ "test-tool progress [--total=<n>] <progress-title>",
+ NULL
+ };
+ struct option options[] = {
+ OPT_INTEGER(0, "total", &total, "total number of items"),
+ OPT_END(),
+ };
+
+ argc = parse_options(argc, argv, NULL, options, usage, 0);
+ if (argc != 1)
+ die("need a title for the progress output");
+ title = argv[0];
+
+ progress_testing = 1;
+ progress = start_progress(title, total);
+ while (strbuf_getline(&line, stdin) != EOF) {
+ char *end;
+
+ if (skip_prefix(line.buf, "progress ", (const char **) &end)) {
+ uint64_t item_count = strtoull(end, &end, 10);
+ if (*end != '\0')
+ die("invalid input: '%s'\n", line.buf);
+ display_progress(progress, item_count);
+ } else if (skip_prefix(line.buf, "throughput ",
+ (const char **) &end)) {
+ uint64_t byte_count, test_ms;
+
+ byte_count = strtoull(end, &end, 10);
+ if (*end != ' ')
+ die("invalid input: '%s'\n", line.buf);
+ test_ms = strtoull(end + 1, &end, 10);
+ if (*end != '\0')
+ die("invalid input: '%s'\n", line.buf);
+ progress_test_ns = test_ms * 1000 * 1000;
+ display_throughput(progress, byte_count);
+ } else if (!strcmp(line.buf, "update"))
+ progress_test_force_update();
+ else
+ die("invalid input: '%s'\n", line.buf);
+ }
+ stop_progress(&progress);
+
+ return 0;
+}
diff --git a/t/helper/test-read-cache.c b/t/helper/test-read-cache.c
index 7e79b555de..244977a29b 100644
--- a/t/helper/test-read-cache.c
+++ b/t/helper/test-read-cache.c
@@ -4,11 +4,10 @@
int cmd__read_cache(int argc, const char **argv)
{
- int i, cnt = 1, namelen;
+ int i, cnt = 1;
const char *name = NULL;
if (argc > 1 && skip_prefix(argv[1], "--print-and-refresh=", &name)) {
- namelen = strlen(name);
argc--;
argv++;
}
@@ -24,7 +23,7 @@ int cmd__read_cache(int argc, const char **argv)
refresh_index(&the_index, REFRESH_QUIET,
NULL, NULL, NULL);
- pos = index_name_pos(&the_index, name, namelen);
+ pos = index_name_pos(&the_index, name, strlen(name));
if (pos < 0)
die("%s not in index", name);
printf("%s is%s up to date\n", name,
diff --git a/t/helper/test-run-command.c b/t/helper/test-run-command.c
index 2cc93bb69c..ead6dc611a 100644
--- a/t/helper/test-run-command.c
+++ b/t/helper/test-run-command.c
@@ -10,9 +10,14 @@
#include "test-tool.h"
#include "git-compat-util.h"
+#include "cache.h"
#include "run-command.h"
#include "argv-array.h"
#include "strbuf.h"
+#include "parse-options.h"
+#include "string-list.h"
+#include "thread-utils.h"
+#include "wildmatch.h"
#include <string.h>
#include <errno.h>
@@ -50,11 +55,159 @@ static int task_finished(int result,
return 1;
}
+struct testsuite {
+ struct string_list tests, failed;
+ int next;
+ int quiet, immediate, verbose, verbose_log, trace, write_junit_xml;
+};
+#define TESTSUITE_INIT \
+ { STRING_LIST_INIT_DUP, STRING_LIST_INIT_DUP, -1, 0, 0, 0, 0, 0, 0 }
+
+static int next_test(struct child_process *cp, struct strbuf *err, void *cb,
+ void **task_cb)
+{
+ struct testsuite *suite = cb;
+ const char *test;
+ if (suite->next >= suite->tests.nr)
+ return 0;
+
+ test = suite->tests.items[suite->next++].string;
+ argv_array_pushl(&cp->args, "sh", test, NULL);
+ if (suite->quiet)
+ argv_array_push(&cp->args, "--quiet");
+ if (suite->immediate)
+ argv_array_push(&cp->args, "-i");
+ if (suite->verbose)
+ argv_array_push(&cp->args, "-v");
+ if (suite->verbose_log)
+ argv_array_push(&cp->args, "-V");
+ if (suite->trace)
+ argv_array_push(&cp->args, "-x");
+ if (suite->write_junit_xml)
+ argv_array_push(&cp->args, "--write-junit-xml");
+
+ strbuf_addf(err, "Output of '%s':\n", test);
+ *task_cb = (void *)test;
+
+ return 1;
+}
+
+static int test_finished(int result, struct strbuf *err, void *cb,
+ void *task_cb)
+{
+ struct testsuite *suite = cb;
+ const char *name = (const char *)task_cb;
+
+ if (result)
+ string_list_append(&suite->failed, name);
+
+ strbuf_addf(err, "%s: '%s'\n", result ? "FAIL" : "SUCCESS", name);
+
+ return 0;
+}
+
+static int test_failed(struct strbuf *out, void *cb, void *task_cb)
+{
+ struct testsuite *suite = cb;
+ const char *name = (const char *)task_cb;
+
+ string_list_append(&suite->failed, name);
+ strbuf_addf(out, "FAILED TO START: '%s'\n", name);
+
+ return 0;
+}
+
+static const char * const testsuite_usage[] = {
+ "test-run-command testsuite [<options>] [<pattern>...]",
+ NULL
+};
+
+static int testsuite(int argc, const char **argv)
+{
+ struct testsuite suite = TESTSUITE_INIT;
+ int max_jobs = 1, i, ret;
+ DIR *dir;
+ struct dirent *d;
+ struct option options[] = {
+ OPT_BOOL('i', "immediate", &suite.immediate,
+ "stop at first failed test case(s)"),
+ OPT_INTEGER('j', "jobs", &max_jobs, "run <N> jobs in parallel"),
+ OPT_BOOL('q', "quiet", &suite.quiet, "be terse"),
+ OPT_BOOL('v', "verbose", &suite.verbose, "be verbose"),
+ OPT_BOOL('V', "verbose-log", &suite.verbose_log,
+ "be verbose, redirected to a file"),
+ OPT_BOOL('x', "trace", &suite.trace, "trace shell commands"),
+ OPT_BOOL(0, "write-junit-xml", &suite.write_junit_xml,
+ "write JUnit-style XML files"),
+ OPT_END()
+ };
+
+ memset(&suite, 0, sizeof(suite));
+ suite.tests.strdup_strings = suite.failed.strdup_strings = 1;
+
+ argc = parse_options(argc, argv, NULL, options,
+ testsuite_usage, PARSE_OPT_STOP_AT_NON_OPTION);
+
+ if (max_jobs <= 0)
+ max_jobs = online_cpus();
+
+ dir = opendir(".");
+ if (!dir)
+ die("Could not open the current directory");
+ while ((d = readdir(dir))) {
+ const char *p = d->d_name;
+
+ if (*p != 't' || !isdigit(p[1]) || !isdigit(p[2]) ||
+ !isdigit(p[3]) || !isdigit(p[4]) || p[5] != '-' ||
+ !ends_with(p, ".sh"))
+ continue;
+
+ /* No pattern: match all */
+ if (!argc) {
+ string_list_append(&suite.tests, p);
+ continue;
+ }
+
+ for (i = 0; i < argc; i++)
+ if (!wildmatch(argv[i], p, 0)) {
+ string_list_append(&suite.tests, p);
+ break;
+ }
+ }
+ closedir(dir);
+
+ if (!suite.tests.nr)
+ die("No tests match!");
+ if (max_jobs > suite.tests.nr)
+ max_jobs = suite.tests.nr;
+
+ fprintf(stderr, "Running %d tests (%d at a time)\n",
+ suite.tests.nr, max_jobs);
+
+ ret = run_processes_parallel(max_jobs, next_test, test_failed,
+ test_finished, &suite);
+
+ if (suite.failed.nr > 0) {
+ ret = 1;
+ fprintf(stderr, "%d tests failed:\n\n", suite.failed.nr);
+ for (i = 0; i < suite.failed.nr; i++)
+ fprintf(stderr, "\t%s\n", suite.failed.items[i].string);
+ }
+
+ string_list_clear(&suite.tests, 0);
+ string_list_clear(&suite.failed, 0);
+
+ return !!ret;
+}
+
int cmd__run_command(int argc, const char **argv)
{
struct child_process proc = CHILD_PROCESS_INIT;
int jobs;
+ if (argc > 1 && !strcmp(argv[1], "testsuite"))
+ exit(testsuite(argc - 1, argv + 1));
+
if (argc < 3)
return 1;
while (!strcmp(argv[1], "env")) {
diff --git a/t/helper/test-tool.c b/t/helper/test-tool.c
index ce7e89028c..19ee26d931 100644
--- a/t/helper/test-tool.c
+++ b/t/helper/test-tool.c
@@ -42,6 +42,7 @@ static struct test_cmd cmds[] = {
{ "path-utils", cmd__path_utils },
{ "pkt-line", cmd__pkt_line },
{ "prio-queue", cmd__prio_queue },
+ { "progress", cmd__progress },
{ "reach", cmd__reach },
{ "read-cache", cmd__read_cache },
{ "read-midx", cmd__read_midx },
diff --git a/t/helper/test-tool.h b/t/helper/test-tool.h
index f805bb39ae..c2aa56ef50 100644
--- a/t/helper/test-tool.h
+++ b/t/helper/test-tool.h
@@ -32,6 +32,7 @@ int cmd__parse_options(int argc, const char **argv);
int cmd__path_utils(int argc, const char **argv);
int cmd__pkt_line(int argc, const char **argv);
int cmd__prio_queue(int argc, const char **argv);
+int cmd__progress(int argc, const char **argv);
int cmd__reach(int argc, const char **argv);
int cmd__read_cache(int argc, const char **argv);
int cmd__read_midx(int argc, const char **argv);
diff --git a/t/lib-git-svn.sh b/t/lib-git-svn.sh
index 5d4ae629e1..bc0b9c71f8 100644
--- a/t/lib-git-svn.sh
+++ b/t/lib-git-svn.sh
@@ -69,7 +69,7 @@ svn_cmd () {
maybe_start_httpd () {
loc=${1-svn}
- if git env--helper --type=bool --default=false --exit-code GIT_TEST_HTTPD
+ if git env--helper --type=bool --default=false --exit-code GIT_TEST_SVN_HTTPD
then
. "$TEST_DIRECTORY"/lib-httpd.sh
LIB_HTTPD_SVN="$loc"
diff --git a/t/lib-rebase.sh b/t/lib-rebase.sh
index 7ea30e5006..6d87961e41 100644
--- a/t/lib-rebase.sh
+++ b/t/lib-rebase.sh
@@ -44,10 +44,10 @@ set_fake_editor () {
rm -f "$1"
echo 'rebase -i script before editing:'
cat "$1".tmp
- action=pick
+ action=\&
for line in $FAKE_LINES; do
case $line in
- pick|p|squash|s|fixup|f|edit|e|reword|r|drop|d)
+ pick|p|squash|s|fixup|f|edit|e|reword|r|drop|d|label|l|reset|r|merge|m)
action="$line";;
exec_*|x_*|break|b)
echo "$line" | sed 's/_/ /g' >> "$1";;
@@ -58,11 +58,12 @@ set_fake_editor () {
bad)
action="badcmd";;
fakesha)
+ test \& != "$action" || action=pick
echo "$action XXXXXXX False commit" >> "$1"
action=pick;;
*)
- sed -n "${line}s/^pick/$action/p" < "$1".tmp >> "$1"
- action=pick;;
+ sed -n "${line}s/^[a-z][a-z]*/$action/p" < "$1".tmp >> "$1"
+ action=\&;;
esac
done
echo 'rebase -i script after editing:'
diff --git a/t/perf/p5600-clone-reference.sh b/t/perf/p5601-clone-reference.sh
index 68fed66347..68fed66347 100755
--- a/t/perf/p5600-clone-reference.sh
+++ b/t/perf/p5601-clone-reference.sh
diff --git a/t/t0000-basic.sh b/t/t0000-basic.sh
index 9ca0818cbe..4d3f7ba295 100755
--- a/t/t0000-basic.sh
+++ b/t/t0000-basic.sh
@@ -274,23 +274,23 @@ test_expect_success 'pretend we have a mix of all possible results' "
test_expect_success C_LOCALE_OUTPUT 'test --verbose' '
test_must_fail run_sub_test_lib_test \
- test-verbose "test verbose" --verbose <<-\EOF &&
+ t1234-verbose "test verbose" --verbose <<-\EOF &&
test_expect_success "passing test" true
test_expect_success "test with output" "echo foo"
test_expect_success "failing test" false
test_done
EOF
- mv test-verbose/out test-verbose/out+ &&
- grep -v "^Initialized empty" test-verbose/out+ >test-verbose/out &&
- check_sub_test_lib_test test-verbose <<-\EOF
- > expecting success: true
+ mv t1234-verbose/out t1234-verbose/out+ &&
+ grep -v "^Initialized empty" t1234-verbose/out+ >t1234-verbose/out &&
+ check_sub_test_lib_test t1234-verbose <<-\EOF
+ > expecting success of 1234.1 '\''passing test'\'': true
> ok 1 - passing test
> Z
- > expecting success: echo foo
+ > expecting success of 1234.2 '\''test with output'\'': echo foo
> foo
> ok 2 - test with output
> Z
- > expecting success: false
+ > expecting success of 1234.3 '\''failing test'\'': false
> not ok 3 - failing test
> # false
> Z
@@ -301,17 +301,17 @@ test_expect_success C_LOCALE_OUTPUT 'test --verbose' '
test_expect_success 'test --verbose-only' '
test_must_fail run_sub_test_lib_test \
- test-verbose-only-2 "test verbose-only=2" \
+ t2345-verbose-only-2 "test verbose-only=2" \
--verbose-only=2 <<-\EOF &&
test_expect_success "passing test" true
test_expect_success "test with output" "echo foo"
test_expect_success "failing test" false
test_done
EOF
- check_sub_test_lib_test test-verbose-only-2 <<-\EOF
+ check_sub_test_lib_test t2345-verbose-only-2 <<-\EOF
> ok 1 - passing test
> Z
- > expecting success: echo foo
+ > expecting success of 2345.2 '\''test with output'\'': echo foo
> foo
> ok 2 - test with output
> Z
@@ -391,6 +391,44 @@ test_expect_success 'GIT_SKIP_TESTS sh pattern' "
)
"
+test_expect_success 'GIT_SKIP_TESTS entire suite' "
+ (
+ GIT_SKIP_TESTS='git' && export GIT_SKIP_TESTS &&
+ run_sub_test_lib_test git-skip-tests-entire-suite \
+ 'GIT_SKIP_TESTS entire suite' <<-\\EOF &&
+ for i in 1 2 3
+ do
+ test_expect_success \"passing test #\$i\" 'true'
+ done
+ test_done
+ EOF
+ check_sub_test_lib_test git-skip-tests-entire-suite <<-\\EOF
+ > 1..0 # SKIP skip all tests in git
+ EOF
+ )
+"
+
+test_expect_success 'GIT_SKIP_TESTS does not skip unmatched suite' "
+ (
+ GIT_SKIP_TESTS='notgit' && export GIT_SKIP_TESTS &&
+ run_sub_test_lib_test git-skip-tests-unmatched-suite \
+ 'GIT_SKIP_TESTS does not skip unmatched suite' <<-\\EOF &&
+ for i in 1 2 3
+ do
+ test_expect_success \"passing test #\$i\" 'true'
+ done
+ test_done
+ EOF
+ check_sub_test_lib_test git-skip-tests-unmatched-suite <<-\\EOF
+ > ok 1 - passing test #1
+ > ok 2 - passing test #2
+ > ok 3 - passing test #3
+ > # passed all 3 test(s)
+ > 1..3
+ EOF
+ )
+"
+
test_expect_success '--run basic' "
run_sub_test_lib_test run-basic \
'--run basic' --run='1 3 5' <<-\\EOF &&
diff --git a/t/t0014-alias.sh b/t/t0014-alias.sh
index a070e645d7..2694c81afd 100755
--- a/t/t0014-alias.sh
+++ b/t/t0014-alias.sh
@@ -37,4 +37,11 @@ test_expect_success 'looping aliases - internal execution' '
# test_i18ngrep "^fatal: alias loop detected: expansion of" output
#'
+test_expect_success 'run-command formats empty args properly' '
+ GIT_TRACE=1 git frotz a "" b " " c 2>&1 |
+ sed -ne "/run_command:/s/.*trace: run_command: //p" >actual &&
+ echo "git-frotz a '\'''\'' b '\'' '\'' c" >expect &&
+ test_cmp expect actual
+'
+
test_done
diff --git a/t/t0021-conversion.sh b/t/t0021-conversion.sh
index e10f5f787f..c954c709ad 100755
--- a/t/t0021-conversion.sh
+++ b/t/t0021-conversion.sh
@@ -390,6 +390,9 @@ test_expect_success PERL 'required process filter should filter data' '
EOF
test_cmp_exclude_clean expected.log debug.log &&
+ # Make sure that the file appears dirty, so checkout below has to
+ # run the configured filter.
+ touch test.r &&
filter_git checkout --quiet --no-progress empty-branch &&
cat >expected.log <<-EOF &&
START
diff --git a/t/t0028-working-tree-encoding.sh b/t/t0028-working-tree-encoding.sh
index 1090e650ed..7aa0945d8d 100755
--- a/t/t0028-working-tree-encoding.sh
+++ b/t/t0028-working-tree-encoding.sh
@@ -40,7 +40,7 @@ test_expect_success 'setup test files' '
printf "$text" | write_utf16 >test.utf16.raw &&
printf "$text" | write_utf32 >test.utf32.raw &&
printf "\377\376" >test.utf16lebom.raw &&
- printf "$text" | iconv -f UTF-8 -t UTF-32LE >>test.utf16lebom.raw &&
+ printf "$text" | iconv -f UTF-8 -t UTF-16LE >>test.utf16lebom.raw &&
# Line ending tests
printf "one\ntwo\nthree\n" >lf.utf8.raw &&
@@ -280,4 +280,43 @@ test_expect_success ICONV_SHIFT_JIS 'check roundtrip encoding' '
git reset
'
+# $1: checkout encoding
+# $2: test string
+# $3: binary test string in checkout encoding
+test_commit_utf8_checkout_other () {
+ encoding="$1"
+ orig_string="$2"
+ expect_bytes="$3"
+
+ test_expect_success "Commit UTF-8, checkout $encoding" '
+ test_when_finished "git checkout HEAD -- .gitattributes" &&
+
+ test_ext="commit_utf8_checkout_$encoding" &&
+ test_file="test.$test_ext" &&
+
+ # Commit as UTF-8
+ echo "*.$test_ext text working-tree-encoding=UTF-8" >.gitattributes &&
+ printf "$orig_string" >$test_file &&
+ git add $test_file &&
+ git commit -m "Test data" &&
+
+ # Checkout in tested encoding
+ rm $test_file &&
+ echo "*.$test_ext text working-tree-encoding=$encoding" >.gitattributes &&
+ git checkout HEAD -- $test_file &&
+
+ # Test
+ printf $expect_bytes >$test_file.raw &&
+ test_cmp_bin $test_file.raw $test_file
+ '
+}
+
+test_commit_utf8_checkout_other "UTF-8" "Test Тест" "\124\145\163\164\040\320\242\320\265\321\201\321\202"
+test_commit_utf8_checkout_other "UTF-16LE" "Test Тест" "\124\000\145\000\163\000\164\000\040\000\042\004\065\004\101\004\102\004"
+test_commit_utf8_checkout_other "UTF-16BE" "Test Тест" "\000\124\000\145\000\163\000\164\000\040\004\042\004\065\004\101\004\102"
+test_commit_utf8_checkout_other "UTF-16LE-BOM" "Test Тест" "\377\376\124\000\145\000\163\000\164\000\040\000\042\004\065\004\101\004\102\004"
+test_commit_utf8_checkout_other "UTF-16BE-BOM" "Test Тест" "\376\377\000\124\000\145\000\163\000\164\000\040\004\042\004\065\004\101\004\102"
+test_commit_utf8_checkout_other "UTF-32LE" "Test Тест" "\124\000\000\000\145\000\000\000\163\000\000\000\164\000\000\000\040\000\000\000\042\004\000\000\065\004\000\000\101\004\000\000\102\004\000\000"
+test_commit_utf8_checkout_other "UTF-32BE" "Test Тест" "\000\000\000\124\000\000\000\145\000\000\000\163\000\000\000\164\000\000\000\040\000\000\004\042\000\000\004\065\000\000\004\101\000\000\004\102"
+
test_done
diff --git a/t/t0040-parse-options.sh b/t/t0040-parse-options.sh
index cebc77fab0..705a136ed9 100755
--- a/t/t0040-parse-options.sh
+++ b/t/t0040-parse-options.sh
@@ -399,4 +399,11 @@ test_expect_success 'GIT_TEST_DISALLOW_ABBREVIATED_OPTIONS works' '
test-tool parse-options --ye
'
+test_expect_success '--end-of-options treats remainder as args' '
+ test-tool parse-options \
+ --expect="verbose: -1" \
+ --expect="arg 00: --verbose" \
+ --end-of-options --verbose
+'
+
test_done
diff --git a/t/t0050-filesystem.sh b/t/t0050-filesystem.sh
index 192c94eccd..608673fb77 100755
--- a/t/t0050-filesystem.sh
+++ b/t/t0050-filesystem.sh
@@ -131,4 +131,24 @@ $test_unicode 'merge (silent unicode normalization)' '
git merge topic
'
+test_expect_success CASE_INSENSITIVE_FS 'checkout with no pathspec and a case insensitive fs' '
+ git init repo &&
+ (
+ cd repo &&
+
+ >Gitweb &&
+ git add Gitweb &&
+ git commit -m "add Gitweb" &&
+
+ git checkout --orphan todo &&
+ git reset --hard &&
+ mkdir -p gitweb/subdir &&
+ >gitweb/subdir/file &&
+ git add gitweb &&
+ git commit -m "add gitweb/subdir/file" &&
+
+ git checkout master
+ )
+'
+
test_done
diff --git a/t/t0061-run-command.sh b/t/t0061-run-command.sh
index 015fac8b5d..17c9c0f3bb 100755
--- a/t/t0061-run-command.sh
+++ b/t/t0061-run-command.sh
@@ -210,10 +210,23 @@ test_expect_success MINGW 'verify curlies are quoted properly' '
test_cmp expect actual
'
-test_expect_success MINGW 'can spawn with argv[0] containing spaces' '
- cp "$GIT_BUILD_DIR/t/helper/test-fake-ssh$X" ./ &&
- test_must_fail "$PWD/test-fake-ssh$X" 2>err &&
- grep TRASH_DIRECTORY err
+test_expect_success MINGW 'can spawn .bat with argv[0] containing spaces' '
+ bat="$TRASH_DIRECTORY/bat with spaces in name.bat" &&
+
+ # Every .bat invocation will log its arguments to file "out"
+ rm -f out &&
+ echo "echo %* >>out" >"$bat" &&
+
+ # Ask git to invoke .bat; clone will fail due to fake SSH helper
+ test_must_fail env GIT_SSH="$bat" git clone myhost:src ssh-clone &&
+
+ # Spawning .bat can fail if there are two quoted cmd.exe arguments.
+ # .bat itself is first (due to spaces in name), so just one more is
+ # needed to verify. GIT_SSH will invoke .bat multiple times:
+ # 1) -G myhost
+ # 2) myhost "git-upload-pack src"
+ # First invocation will always succeed. Test the second one.
+ grep "git-upload-pack" out
'
test_done
diff --git a/t/t0211-trace2-perf.sh b/t/t0211-trace2-perf.sh
index 2c3ad6e8c1..6ee8ee3b67 100755
--- a/t/t0211-trace2-perf.sh
+++ b/t/t0211-trace2-perf.sh
@@ -130,11 +130,11 @@ test_expect_success 'perf stream, child processes' '
d0|main|version|||||$V
d0|main|start||_T_ABS_|||_EXE_ trace2 004child test-tool trace2 004child test-tool trace2 001return 0
d0|main|cmd_name|||||trace2 (trace2)
- d0|main|child_start||_T_ABS_|||[ch0] class:? argv: test-tool trace2 004child test-tool trace2 001return 0
+ d0|main|child_start||_T_ABS_|||[ch0] class:? argv:[test-tool trace2 004child test-tool trace2 001return 0]
d1|main|version|||||$V
d1|main|start||_T_ABS_|||_EXE_ trace2 004child test-tool trace2 001return 0
d1|main|cmd_name|||||trace2 (trace2/trace2)
- d1|main|child_start||_T_ABS_|||[ch0] class:? argv: test-tool trace2 001return 0
+ d1|main|child_start||_T_ABS_|||[ch0] class:? argv:[test-tool trace2 001return 0]
d2|main|version|||||$V
d2|main|start||_T_ABS_|||_EXE_ trace2 001return 0
d2|main|cmd_name|||||trace2 (trace2/trace2/trace2)
diff --git a/t/t0212-trace2-event.sh b/t/t0212-trace2-event.sh
index ff5b9cc729..7065a1b937 100755
--- a/t/t0212-trace2-event.sh
+++ b/t/t0212-trace2-event.sh
@@ -265,4 +265,23 @@ test_expect_success JSON_PP 'using global config, event stream, error event' '
test_cmp expect actual
'
+test_expect_success 'discard traces when there are too many files' '
+ mkdir trace_target_dir &&
+ test_when_finished "rm -r trace_target_dir" &&
+ (
+ GIT_TRACE2_MAX_FILES=5 &&
+ export GIT_TRACE2_MAX_FILES &&
+ cd trace_target_dir &&
+ test_seq $GIT_TRACE2_MAX_FILES >../expected_filenames.txt &&
+ xargs touch <../expected_filenames.txt &&
+ cd .. &&
+ GIT_TRACE2_EVENT="$(pwd)/trace_target_dir" test-tool trace2 001return 0
+ ) &&
+ echo git-trace2-discard >>expected_filenames.txt &&
+ ls trace_target_dir >ls_output.txt &&
+ test_cmp expected_filenames.txt ls_output.txt &&
+ head -n1 trace_target_dir/git-trace2-discard | grep \"event\":\"version\" &&
+ head -n2 trace_target_dir/git-trace2-discard | tail -n1 | grep \"event\":\"too_many_files\"
+'
+
test_done
diff --git a/t/t0410-partial-clone.sh b/t/t0410-partial-clone.sh
index 5bd892f2f7..a3988bd4b8 100755
--- a/t/t0410-partial-clone.sh
+++ b/t/t0410-partial-clone.sh
@@ -26,7 +26,7 @@ promise_and_delete () {
test_expect_success 'extensions.partialclone without filter' '
test_create_repo server &&
git clone --filter="blob:none" "file://$(pwd)/server" client &&
- git -C client config --unset core.partialclonefilter &&
+ git -C client config --unset remote.origin.partialclonefilter &&
git -C client fetch origin
'
@@ -166,8 +166,9 @@ test_expect_success 'fetching of missing objects' '
# associated packfile contains the object
ls repo/.git/objects/pack/pack-*.promisor >promisorlist &&
test_line_count = 1 promisorlist &&
- IDX=$(cat promisorlist | sed "s/promisor$/idx/") &&
- git verify-pack --verbose "$IDX" | grep "$HASH"
+ IDX=$(sed "s/promisor$/idx/" promisorlist) &&
+ git verify-pack --verbose "$IDX" >out &&
+ grep "$HASH" out
'
test_expect_success 'fetching of missing objects works with ref-in-want enabled' '
@@ -182,8 +183,55 @@ test_expect_success 'fetching of missing objects works with ref-in-want enabled'
grep "git< fetch=.*ref-in-want" trace
'
+test_expect_success 'fetching of missing objects from another promisor remote' '
+ git clone "file://$(pwd)/server" server2 &&
+ test_commit -C server2 bar &&
+ git -C server2 repack -a -d --write-bitmap-index &&
+ HASH2=$(git -C server2 rev-parse bar) &&
+
+ git -C repo remote add server2 "file://$(pwd)/server2" &&
+ git -C repo config remote.server2.promisor true &&
+ git -C repo cat-file -p "$HASH2" &&
+
+ git -C repo fetch server2 &&
+ rm -rf repo/.git/objects/* &&
+ git -C repo cat-file -p "$HASH2" &&
+
+ # Ensure that the .promisor file is written, and check that its
+ # associated packfile contains the object
+ ls repo/.git/objects/pack/pack-*.promisor >promisorlist &&
+ test_line_count = 1 promisorlist &&
+ IDX=$(sed "s/promisor$/idx/" promisorlist) &&
+ git verify-pack --verbose "$IDX" >out &&
+ grep "$HASH2" out
+'
+
+test_expect_success 'fetching of missing objects configures a promisor remote' '
+ git clone "file://$(pwd)/server" server3 &&
+ test_commit -C server3 baz &&
+ git -C server3 repack -a -d --write-bitmap-index &&
+ HASH3=$(git -C server3 rev-parse baz) &&
+ git -C server3 config uploadpack.allowfilter 1 &&
+
+ rm repo/.git/objects/pack/pack-*.promisor &&
+
+ git -C repo remote add server3 "file://$(pwd)/server3" &&
+ git -C repo fetch --filter="blob:none" server3 $HASH3 &&
+
+ test_cmp_config -C repo true remote.server3.promisor &&
+
+ # Ensure that the .promisor file is written, and check that its
+ # associated packfile contains the object
+ ls repo/.git/objects/pack/pack-*.promisor >promisorlist &&
+ test_line_count = 1 promisorlist &&
+ IDX=$(sed "s/promisor$/idx/" promisorlist) &&
+ git verify-pack --verbose "$IDX" >out &&
+ grep "$HASH3" out
+'
+
test_expect_success 'fetching of missing blobs works' '
- rm -rf server repo &&
+ rm -rf server server2 repo &&
+ rm -rf server server3 repo &&
test_create_repo server &&
test_commit -C server foo &&
git -C server repack -a -d --write-bitmap-index &&
@@ -234,7 +282,7 @@ test_expect_success 'rev-list stops traversal at missing and promised commit' '
git -C repo config core.repositoryformatversion 1 &&
git -C repo config extensions.partialclone "arbitrary string" &&
- GIT_TEST_COMMIT_GRAPH=0 git -C repo rev-list --exclude-promisor-objects --objects bar >out &&
+ GIT_TEST_COMMIT_GRAPH=0 git -C repo -c core.commitGraph=false rev-list --exclude-promisor-objects --objects bar >out &&
grep $(git -C repo rev-parse bar) out &&
! grep $FOO out
'
@@ -381,6 +429,19 @@ test_expect_success 'rev-list dies for missing objects on cmd line' '
done
'
+test_expect_success 'single promisor remote can be re-initialized gracefully' '
+ # ensure one promisor is in the promisors list
+ rm -rf repo &&
+ test_create_repo repo &&
+ test_create_repo other &&
+ git -C repo remote add foo "file://$(pwd)/other" &&
+ git -C repo config remote.foo.promisor true &&
+ git -C repo config extensions.partialclone foo &&
+
+ # reinitialize the promisors list
+ git -C repo fetch --filter=blob:none foo
+'
+
test_expect_success 'gc repacks promisor objects separately from non-promisor objects' '
rm -rf repo &&
test_create_repo repo &&
@@ -492,6 +553,20 @@ test_expect_success 'gc stops traversal when a missing but promised object is re
! grep "$TREE_HASH" out
'
+test_expect_success 'do not fetch when checking existence of tree we construct ourselves' '
+ rm -rf repo &&
+ test_create_repo repo &&
+ test_commit -C repo base &&
+ test_commit -C repo side1 &&
+ git -C repo checkout base &&
+ test_commit -C repo side2 &&
+
+ git -C repo config core.repositoryformatversion 1 &&
+ git -C repo config extensions.partialclone "arbitrary string" &&
+
+ git -C repo cherry-pick side1
+'
+
. "$TEST_DIRECTORY"/lib-httpd.sh
start_httpd
@@ -514,8 +589,12 @@ test_expect_success 'fetching of missing objects from an HTTP server' '
# associated packfile contains the object
ls repo/.git/objects/pack/pack-*.promisor >promisorlist &&
test_line_count = 1 promisorlist &&
- IDX=$(cat promisorlist | sed "s/promisor$/idx/") &&
- git verify-pack --verbose "$IDX" | grep "$HASH"
+ IDX=$(sed "s/promisor$/idx/" promisorlist) &&
+ git verify-pack --verbose "$IDX" >out &&
+ grep "$HASH" out
'
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
+
test_done
diff --git a/t/t0500-progress-display.sh b/t/t0500-progress-display.sh
new file mode 100755
index 0000000000..24ccbd8d3b
--- /dev/null
+++ b/t/t0500-progress-display.sh
@@ -0,0 +1,286 @@
+#!/bin/sh
+
+test_description='progress display'
+
+. ./test-lib.sh
+
+show_cr () {
+ tr '\015' Q | sed -e "s/Q/<CR>\\$LF/g"
+}
+
+test_expect_success 'simple progress display' '
+ cat >expect <<-\EOF &&
+ Working hard: 1<CR>
+ Working hard: 2<CR>
+ Working hard: 5<CR>
+ Working hard: 5, done.
+ EOF
+
+ cat >in <<-\EOF &&
+ update
+ progress 1
+ update
+ progress 2
+ progress 3
+ progress 4
+ update
+ progress 5
+ EOF
+ test-tool progress "Working hard" <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_expect_success 'progress display with total' '
+ cat >expect <<-\EOF &&
+ Working hard: 33% (1/3)<CR>
+ Working hard: 66% (2/3)<CR>
+ Working hard: 100% (3/3)<CR>
+ Working hard: 100% (3/3), done.
+ EOF
+
+ cat >in <<-\EOF &&
+ progress 1
+ progress 2
+ progress 3
+ EOF
+ test-tool progress --total=3 "Working hard" <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_expect_success 'progress display breaks long lines #1' '
+ sed -e "s/Z$//" >expect <<\EOF &&
+Working hard.......2.........3.........4.........5.........6: 0% (100/100000)<CR>
+Working hard.......2.........3.........4.........5.........6: 1% (1000/100000)<CR>
+Working hard.......2.........3.........4.........5.........6: Z
+ 10% (10000/100000)<CR>
+ 100% (100000/100000)<CR>
+ 100% (100000/100000), done.
+EOF
+
+ cat >in <<-\EOF &&
+ progress 100
+ progress 1000
+ progress 10000
+ progress 100000
+ EOF
+ test-tool progress --total=100000 \
+ "Working hard.......2.........3.........4.........5.........6" \
+ <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_expect_success 'progress display breaks long lines #2' '
+ # Note: we dont need that many spaces after the title to cover up
+ # the last line before breaking the progress line.
+ sed -e "s/Z$//" >expect <<\EOF &&
+Working hard.......2.........3.........4.........5.........6: 0% (1/100000)<CR>
+Working hard.......2.........3.........4.........5.........6: 0% (2/100000)<CR>
+Working hard.......2.........3.........4.........5.........6: Z
+ 10% (10000/100000)<CR>
+ 100% (100000/100000)<CR>
+ 100% (100000/100000), done.
+EOF
+
+ cat >in <<-\EOF &&
+ update
+ progress 1
+ update
+ progress 2
+ progress 10000
+ progress 100000
+ EOF
+ test-tool progress --total=100000 \
+ "Working hard.......2.........3.........4.........5.........6" \
+ <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_expect_success 'progress display breaks long lines #3 - even the first is too long' '
+ # Note: we dont actually need any spaces at the end of the title
+ # line, because there is no previous progress line to cover up.
+ sed -e "s/Z$//" >expect <<\EOF &&
+Working hard.......2.........3.........4.........5.........6: Z
+ 25% (25000/100000)<CR>
+ 50% (50000/100000)<CR>
+ 75% (75000/100000)<CR>
+ 100% (100000/100000)<CR>
+ 100% (100000/100000), done.
+EOF
+
+ cat >in <<-\EOF &&
+ progress 25000
+ progress 50000
+ progress 75000
+ progress 100000
+ EOF
+ test-tool progress --total=100000 \
+ "Working hard.......2.........3.........4.........5.........6" \
+ <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_expect_success 'progress display breaks long lines #4 - title line matches terminal width' '
+ cat >expect <<\EOF &&
+Working hard.......2.........3.........4.........5.........6.........7.........:
+ 25% (25000/100000)<CR>
+ 50% (50000/100000)<CR>
+ 75% (75000/100000)<CR>
+ 100% (100000/100000)<CR>
+ 100% (100000/100000), done.
+EOF
+
+ cat >in <<-\EOF &&
+ progress 25000
+ progress 50000
+ progress 75000
+ progress 100000
+ EOF
+ test-tool progress --total=100000 \
+ "Working hard.......2.........3.........4.........5.........6.........7........." \
+ <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+# Progress counter goes backwards, this should not happen in practice.
+test_expect_success 'progress shortens - crazy caller' '
+ cat >expect <<-\EOF &&
+ Working hard: 10% (100/1000)<CR>
+ Working hard: 20% (200/1000)<CR>
+ Working hard: 0% (1/1000) <CR>
+ Working hard: 100% (1000/1000)<CR>
+ Working hard: 100% (1000/1000), done.
+ EOF
+
+ cat >in <<-\EOF &&
+ progress 100
+ progress 200
+ progress 1
+ progress 1000
+ EOF
+ test-tool progress --total=1000 "Working hard" <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_expect_success 'progress display with throughput' '
+ cat >expect <<-\EOF &&
+ Working hard: 10<CR>
+ Working hard: 20, 200.00 KiB | 100.00 KiB/s<CR>
+ Working hard: 30, 300.00 KiB | 100.00 KiB/s<CR>
+ Working hard: 40, 400.00 KiB | 100.00 KiB/s<CR>
+ Working hard: 40, 400.00 KiB | 100.00 KiB/s, done.
+ EOF
+
+ cat >in <<-\EOF &&
+ throughput 102400 1000
+ update
+ progress 10
+ throughput 204800 2000
+ update
+ progress 20
+ throughput 307200 3000
+ update
+ progress 30
+ throughput 409600 4000
+ update
+ progress 40
+ EOF
+ test-tool progress "Working hard" <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_expect_success 'progress display with throughput and total' '
+ cat >expect <<-\EOF &&
+ Working hard: 25% (10/40)<CR>
+ Working hard: 50% (20/40), 200.00 KiB | 100.00 KiB/s<CR>
+ Working hard: 75% (30/40), 300.00 KiB | 100.00 KiB/s<CR>
+ Working hard: 100% (40/40), 400.00 KiB | 100.00 KiB/s<CR>
+ Working hard: 100% (40/40), 400.00 KiB | 100.00 KiB/s, done.
+ EOF
+
+ cat >in <<-\EOF &&
+ throughput 102400 1000
+ progress 10
+ throughput 204800 2000
+ progress 20
+ throughput 307200 3000
+ progress 30
+ throughput 409600 4000
+ progress 40
+ EOF
+ test-tool progress --total=40 "Working hard" <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_expect_success 'cover up after throughput shortens' '
+ cat >expect <<-\EOF &&
+ Working hard: 1<CR>
+ Working hard: 2, 800.00 KiB | 400.00 KiB/s<CR>
+ Working hard: 3, 1.17 MiB | 400.00 KiB/s <CR>
+ Working hard: 4, 1.56 MiB | 400.00 KiB/s<CR>
+ Working hard: 4, 1.56 MiB | 400.00 KiB/s, done.
+ EOF
+
+ cat >in <<-\EOF &&
+ throughput 409600 1000
+ update
+ progress 1
+ throughput 819200 2000
+ update
+ progress 2
+ throughput 1228800 3000
+ update
+ progress 3
+ throughput 1638400 4000
+ update
+ progress 4
+ EOF
+ test-tool progress "Working hard" <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_expect_success 'cover up after throughput shortens a lot' '
+ cat >expect <<-\EOF &&
+ Working hard: 1<CR>
+ Working hard: 2, 1000.00 KiB | 1000.00 KiB/s<CR>
+ Working hard: 3, 3.00 MiB | 1.50 MiB/s <CR>
+ Working hard: 3, 3.00 MiB | 1024.00 KiB/s, done.
+ EOF
+
+ cat >in <<-\EOF &&
+ throughput 1 1000
+ update
+ progress 1
+ throughput 1024000 2000
+ update
+ progress 2
+ throughput 3145728 3000
+ update
+ progress 3
+ EOF
+ test-tool progress "Working hard" <in 2>stderr &&
+
+ show_cr <stderr >out &&
+ test_i18ncmp expect out
+'
+
+test_done
diff --git a/t/t1300-config.sh b/t/t1300-config.sh
index 428177c390..983a0a1583 100755
--- a/t/t1300-config.sh
+++ b/t/t1300-config.sh
@@ -1294,26 +1294,25 @@ test_expect_success 'git -c is not confused by empty environment' '
GIT_CONFIG_PARAMETERS="" git -c x.one=1 config --list
'
-sq="'"
test_expect_success 'detect bogus GIT_CONFIG_PARAMETERS' '
cat >expect <<-\EOF &&
env.one one
env.two two
EOF
- GIT_CONFIG_PARAMETERS="${sq}env.one=one${sq} ${sq}env.two=two${sq}" \
+ GIT_CONFIG_PARAMETERS="${SQ}env.one=one${SQ} ${SQ}env.two=two${SQ}" \
git config --get-regexp "env.*" >actual &&
test_cmp expect actual &&
cat >expect <<-EOF &&
- env.one one${sq}
+ env.one one${SQ}
env.two two
EOF
- GIT_CONFIG_PARAMETERS="${sq}env.one=one${sq}\\$sq$sq$sq ${sq}env.two=two${sq}" \
+ GIT_CONFIG_PARAMETERS="${SQ}env.one=one${SQ}\\$SQ$SQ$SQ ${SQ}env.two=two${SQ}" \
git config --get-regexp "env.*" >actual &&
test_cmp expect actual &&
test_must_fail env \
- GIT_CONFIG_PARAMETERS="${sq}env.one=one${sq}\\$sq ${sq}env.two=two${sq}" \
+ GIT_CONFIG_PARAMETERS="${SQ}env.one=one${SQ}\\$SQ ${SQ}env.two=two${SQ}" \
git config --get-regexp "env.*"
'
diff --git a/t/t1309-early-config.sh b/t/t1309-early-config.sh
index 0c37e7180d..3a0de0ddaa 100755
--- a/t/t1309-early-config.sh
+++ b/t/t1309-early-config.sh
@@ -91,7 +91,12 @@ test_expect_failure 'ignore .git/ with invalid config' '
test_expect_success 'early config and onbranch' '
echo "[broken" >broken &&
- test_with_config "[includeif \"onbranch:refs/heads/master\"]path=../broken"
+ test_with_config "[includeif \"onbranch:master\"]path=../broken"
+'
+
+test_expect_success 'onbranch config outside of git repo' '
+ test_config_global includeIf.onbranch:master.path non-existent &&
+ nongit git help
'
test_done
diff --git a/t/t1404-update-ref-errors.sh b/t/t1404-update-ref-errors.sh
index 970c5c36b9..2d142e5535 100755
--- a/t/t1404-update-ref-errors.sh
+++ b/t/t1404-update-ref-errors.sh
@@ -32,8 +32,6 @@ test_update_rejected () {
test_cmp unchanged actual
}
-Q="'"
-
# Test adding and deleting D/F-conflicting references in a single
# transaction.
df_test() {
@@ -93,7 +91,7 @@ df_test() {
delname="$delref"
fi &&
cat >expected-err <<-EOF &&
- fatal: cannot lock ref $Q$addname$Q: $Q$delref$Q exists; cannot create $Q$addref$Q
+ fatal: cannot lock ref $SQ$addname$SQ: $SQ$delref$SQ exists; cannot create $SQ$addref$SQ
EOF
$pack &&
if $add_del
@@ -123,7 +121,7 @@ test_expect_success 'existing loose ref is a simple prefix of new' '
prefix=refs/1l &&
test_update_rejected "a c e" false "b c/x d" \
- "$Q$prefix/c$Q exists; cannot create $Q$prefix/c/x$Q"
+ "$SQ$prefix/c$SQ exists; cannot create $SQ$prefix/c/x$SQ"
'
@@ -131,7 +129,7 @@ test_expect_success 'existing packed ref is a simple prefix of new' '
prefix=refs/1p &&
test_update_rejected "a c e" true "b c/x d" \
- "$Q$prefix/c$Q exists; cannot create $Q$prefix/c/x$Q"
+ "$SQ$prefix/c$SQ exists; cannot create $SQ$prefix/c/x$SQ"
'
@@ -139,7 +137,7 @@ test_expect_success 'existing loose ref is a deeper prefix of new' '
prefix=refs/2l &&
test_update_rejected "a c e" false "b c/x/y d" \
- "$Q$prefix/c$Q exists; cannot create $Q$prefix/c/x/y$Q"
+ "$SQ$prefix/c$SQ exists; cannot create $SQ$prefix/c/x/y$SQ"
'
@@ -147,7 +145,7 @@ test_expect_success 'existing packed ref is a deeper prefix of new' '
prefix=refs/2p &&
test_update_rejected "a c e" true "b c/x/y d" \
- "$Q$prefix/c$Q exists; cannot create $Q$prefix/c/x/y$Q"
+ "$SQ$prefix/c$SQ exists; cannot create $SQ$prefix/c/x/y$SQ"
'
@@ -155,7 +153,7 @@ test_expect_success 'new ref is a simple prefix of existing loose' '
prefix=refs/3l &&
test_update_rejected "a c/x e" false "b c d" \
- "$Q$prefix/c/x$Q exists; cannot create $Q$prefix/c$Q"
+ "$SQ$prefix/c/x$SQ exists; cannot create $SQ$prefix/c$SQ"
'
@@ -163,7 +161,7 @@ test_expect_success 'new ref is a simple prefix of existing packed' '
prefix=refs/3p &&
test_update_rejected "a c/x e" true "b c d" \
- "$Q$prefix/c/x$Q exists; cannot create $Q$prefix/c$Q"
+ "$SQ$prefix/c/x$SQ exists; cannot create $SQ$prefix/c$SQ"
'
@@ -171,7 +169,7 @@ test_expect_success 'new ref is a deeper prefix of existing loose' '
prefix=refs/4l &&
test_update_rejected "a c/x/y e" false "b c d" \
- "$Q$prefix/c/x/y$Q exists; cannot create $Q$prefix/c$Q"
+ "$SQ$prefix/c/x/y$SQ exists; cannot create $SQ$prefix/c$SQ"
'
@@ -179,7 +177,7 @@ test_expect_success 'new ref is a deeper prefix of existing packed' '
prefix=refs/4p &&
test_update_rejected "a c/x/y e" true "b c d" \
- "$Q$prefix/c/x/y$Q exists; cannot create $Q$prefix/c$Q"
+ "$SQ$prefix/c/x/y$SQ exists; cannot create $SQ$prefix/c$SQ"
'
@@ -187,7 +185,7 @@ test_expect_success 'one new ref is a simple prefix of another' '
prefix=refs/5 &&
test_update_rejected "a e" false "b c c/x d" \
- "cannot process $Q$prefix/c$Q and $Q$prefix/c/x$Q at the same time"
+ "cannot process $SQ$prefix/c$SQ and $SQ$prefix/c/x$SQ at the same time"
'
@@ -334,7 +332,7 @@ test_expect_success 'D/F conflict prevents indirect delete long packed + indirec
test_expect_success 'missing old value blocks update' '
prefix=refs/missing-update &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/foo$Q: unable to resolve reference $Q$prefix/foo$Q
+ fatal: cannot lock ref $SQ$prefix/foo$SQ: unable to resolve reference $SQ$prefix/foo$SQ
EOF
printf "%s\n" "update $prefix/foo $E $D" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -345,7 +343,7 @@ test_expect_success 'incorrect old value blocks update' '
prefix=refs/incorrect-update &&
git update-ref $prefix/foo $C &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/foo$Q: is at $C but expected $D
+ fatal: cannot lock ref $SQ$prefix/foo$SQ: is at $C but expected $D
EOF
printf "%s\n" "update $prefix/foo $E $D" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -356,7 +354,7 @@ test_expect_success 'existing old value blocks create' '
prefix=refs/existing-create &&
git update-ref $prefix/foo $C &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/foo$Q: reference already exists
+ fatal: cannot lock ref $SQ$prefix/foo$SQ: reference already exists
EOF
printf "%s\n" "create $prefix/foo $E" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -367,7 +365,7 @@ test_expect_success 'incorrect old value blocks delete' '
prefix=refs/incorrect-delete &&
git update-ref $prefix/foo $C &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/foo$Q: is at $C but expected $D
+ fatal: cannot lock ref $SQ$prefix/foo$SQ: is at $C but expected $D
EOF
printf "%s\n" "delete $prefix/foo $D" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -378,7 +376,7 @@ test_expect_success 'missing old value blocks indirect update' '
prefix=refs/missing-indirect-update &&
git symbolic-ref $prefix/symref $prefix/foo &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: unable to resolve reference $Q$prefix/foo$Q
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: unable to resolve reference $SQ$prefix/foo$SQ
EOF
printf "%s\n" "update $prefix/symref $E $D" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -390,7 +388,7 @@ test_expect_success 'incorrect old value blocks indirect update' '
git symbolic-ref $prefix/symref $prefix/foo &&
git update-ref $prefix/foo $C &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: is at $C but expected $D
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: is at $C but expected $D
EOF
printf "%s\n" "update $prefix/symref $E $D" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -402,7 +400,7 @@ test_expect_success 'existing old value blocks indirect create' '
git symbolic-ref $prefix/symref $prefix/foo &&
git update-ref $prefix/foo $C &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: reference already exists
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: reference already exists
EOF
printf "%s\n" "create $prefix/symref $E" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -414,7 +412,7 @@ test_expect_success 'incorrect old value blocks indirect delete' '
git symbolic-ref $prefix/symref $prefix/foo &&
git update-ref $prefix/foo $C &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: is at $C but expected $D
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: is at $C but expected $D
EOF
printf "%s\n" "delete $prefix/symref $D" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -425,7 +423,7 @@ test_expect_success 'missing old value blocks indirect no-deref update' '
prefix=refs/missing-noderef-update &&
git symbolic-ref $prefix/symref $prefix/foo &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: reference is missing but expected $D
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: reference is missing but expected $D
EOF
printf "%s\n" "option no-deref" "update $prefix/symref $E $D" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -437,7 +435,7 @@ test_expect_success 'incorrect old value blocks indirect no-deref update' '
git symbolic-ref $prefix/symref $prefix/foo &&
git update-ref $prefix/foo $C &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: is at $C but expected $D
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: is at $C but expected $D
EOF
printf "%s\n" "option no-deref" "update $prefix/symref $E $D" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -449,7 +447,7 @@ test_expect_success 'existing old value blocks indirect no-deref create' '
git symbolic-ref $prefix/symref $prefix/foo &&
git update-ref $prefix/foo $C &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: reference already exists
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: reference already exists
EOF
printf "%s\n" "option no-deref" "create $prefix/symref $E" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -461,7 +459,7 @@ test_expect_success 'incorrect old value blocks indirect no-deref delete' '
git symbolic-ref $prefix/symref $prefix/foo &&
git update-ref $prefix/foo $C &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: is at $C but expected $D
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: is at $C but expected $D
EOF
printf "%s\n" "option no-deref" "delete $prefix/symref $D" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -474,13 +472,13 @@ test_expect_success 'non-empty directory blocks create' '
: >.git/$prefix/foo/bar/baz.lock &&
test_when_finished "rm -f .git/$prefix/foo/bar/baz.lock" &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/foo$Q: there is a non-empty directory $Q.git/$prefix/foo$Q blocking reference $Q$prefix/foo$Q
+ fatal: cannot lock ref $SQ$prefix/foo$SQ: there is a non-empty directory $SQ.git/$prefix/foo$SQ blocking reference $SQ$prefix/foo$SQ
EOF
printf "%s\n" "update $prefix/foo $C" |
test_must_fail git update-ref --stdin 2>output.err &&
test_cmp expected output.err &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/foo$Q: unable to resolve reference $Q$prefix/foo$Q
+ fatal: cannot lock ref $SQ$prefix/foo$SQ: unable to resolve reference $SQ$prefix/foo$SQ
EOF
printf "%s\n" "update $prefix/foo $D $C" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -493,13 +491,13 @@ test_expect_success 'broken reference blocks create' '
echo "gobbledigook" >.git/$prefix/foo &&
test_when_finished "rm -f .git/$prefix/foo" &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/foo$Q: unable to resolve reference $Q$prefix/foo$Q: reference broken
+ fatal: cannot lock ref $SQ$prefix/foo$SQ: unable to resolve reference $SQ$prefix/foo$SQ: reference broken
EOF
printf "%s\n" "update $prefix/foo $C" |
test_must_fail git update-ref --stdin 2>output.err &&
test_cmp expected output.err &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/foo$Q: unable to resolve reference $Q$prefix/foo$Q: reference broken
+ fatal: cannot lock ref $SQ$prefix/foo$SQ: unable to resolve reference $SQ$prefix/foo$SQ: reference broken
EOF
printf "%s\n" "update $prefix/foo $D $C" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -513,13 +511,13 @@ test_expect_success 'non-empty directory blocks indirect create' '
: >.git/$prefix/foo/bar/baz.lock &&
test_when_finished "rm -f .git/$prefix/foo/bar/baz.lock" &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: there is a non-empty directory $Q.git/$prefix/foo$Q blocking reference $Q$prefix/foo$Q
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: there is a non-empty directory $SQ.git/$prefix/foo$SQ blocking reference $SQ$prefix/foo$SQ
EOF
printf "%s\n" "update $prefix/symref $C" |
test_must_fail git update-ref --stdin 2>output.err &&
test_cmp expected output.err &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: unable to resolve reference $Q$prefix/foo$Q
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: unable to resolve reference $SQ$prefix/foo$SQ
EOF
printf "%s\n" "update $prefix/symref $D $C" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -532,13 +530,13 @@ test_expect_success 'broken reference blocks indirect create' '
echo "gobbledigook" >.git/$prefix/foo &&
test_when_finished "rm -f .git/$prefix/foo" &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: unable to resolve reference $Q$prefix/foo$Q: reference broken
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: unable to resolve reference $SQ$prefix/foo$SQ: reference broken
EOF
printf "%s\n" "update $prefix/symref $C" |
test_must_fail git update-ref --stdin 2>output.err &&
test_cmp expected output.err &&
cat >expected <<-EOF &&
- fatal: cannot lock ref $Q$prefix/symref$Q: unable to resolve reference $Q$prefix/foo$Q: reference broken
+ fatal: cannot lock ref $SQ$prefix/symref$SQ: unable to resolve reference $SQ$prefix/foo$SQ: reference broken
EOF
printf "%s\n" "update $prefix/symref $D $C" |
test_must_fail git update-ref --stdin 2>output.err &&
@@ -614,7 +612,7 @@ test_expect_success 'delete fails cleanly if packed-refs file is locked' '
test_when_finished "rm -f .git/packed-refs.lock" &&
test_must_fail git update-ref -d $prefix/foo >out 2>err &&
git for-each-ref $prefix >actual &&
- test_i18ngrep "Unable to create $Q.*packed-refs.lock$Q: " err &&
+ test_i18ngrep "Unable to create $SQ.*packed-refs.lock$SQ: " err &&
test_cmp unchanged actual
'
diff --git a/t/t1414-reflog-walk.sh b/t/t1414-reflog-walk.sh
index feb1efd8ff..1181a9fb28 100755
--- a/t/t1414-reflog-walk.sh
+++ b/t/t1414-reflog-walk.sh
@@ -18,10 +18,9 @@ do_walk () {
git log -g --format="%gd %gs" "$@"
}
-sq="'"
test_expect_success 'set up expected reflog' '
cat >expect.all <<-EOF
- HEAD@{0} commit (merge): Merge branch ${sq}master${sq} into side
+ HEAD@{0} commit (merge): Merge branch ${SQ}master${SQ} into side
HEAD@{1} commit: three
HEAD@{2} checkout: moving from master to side
HEAD@{3} commit: two
diff --git a/t/t1450-fsck.sh b/t/t1450-fsck.sh
index b36e0528d0..50d28e6fdb 100755
--- a/t/t1450-fsck.sh
+++ b/t/t1450-fsck.sh
@@ -70,7 +70,6 @@ test_expect_success 'object with bad sha1' '
test_when_finished "git update-ref -d refs/heads/bogus" &&
test_must_fail git fsck 2>out &&
- cat out &&
test_i18ngrep "$sha.*corrupt" out
'
@@ -78,7 +77,6 @@ test_expect_success 'branch pointing to non-commit' '
git rev-parse HEAD^{tree} >.git/refs/heads/invalid &&
test_when_finished "git update-ref -d refs/heads/invalid" &&
test_must_fail git fsck 2>out &&
- cat out &&
test_i18ngrep "not a commit" out
'
@@ -88,7 +86,6 @@ test_expect_success 'HEAD link pointing at a funny object' '
echo $ZERO_OID >.git/HEAD &&
# avoid corrupt/broken HEAD from interfering with repo discovery
test_must_fail env GIT_DIR=.git git fsck 2>out &&
- cat out &&
test_i18ngrep "detached HEAD points" out
'
@@ -98,7 +95,6 @@ test_expect_success 'HEAD link pointing at a funny place' '
echo "ref: refs/funny/place" >.git/HEAD &&
# avoid corrupt/broken HEAD from interfering with repo discovery
test_must_fail env GIT_DIR=.git git fsck 2>out &&
- cat out &&
test_i18ngrep "HEAD points to something strange" out
'
@@ -157,7 +153,6 @@ test_expect_success 'email with embedded > is not okay' '
git update-ref refs/heads/bogus "$new" &&
test_when_finished "git update-ref -d refs/heads/bogus" &&
test_must_fail git fsck 2>out &&
- cat out &&
test_i18ngrep "error in commit $new" out
'
@@ -169,7 +164,6 @@ test_expect_success 'missing < email delimiter is reported nicely' '
git update-ref refs/heads/bogus "$new" &&
test_when_finished "git update-ref -d refs/heads/bogus" &&
test_must_fail git fsck 2>out &&
- cat out &&
test_i18ngrep "error in commit $new.* - bad name" out
'
@@ -181,7 +175,6 @@ test_expect_success 'missing email is reported nicely' '
git update-ref refs/heads/bogus "$new" &&
test_when_finished "git update-ref -d refs/heads/bogus" &&
test_must_fail git fsck 2>out &&
- cat out &&
test_i18ngrep "error in commit $new.* - missing email" out
'
@@ -193,7 +186,6 @@ test_expect_success '> in name is reported' '
git update-ref refs/heads/bogus "$new" &&
test_when_finished "git update-ref -d refs/heads/bogus" &&
test_must_fail git fsck 2>out &&
- cat out &&
test_i18ngrep "error in commit $new" out
'
@@ -207,7 +199,6 @@ test_expect_success 'integer overflow in timestamps is reported' '
git update-ref refs/heads/bogus "$new" &&
test_when_finished "git update-ref -d refs/heads/bogus" &&
test_must_fail git fsck 2>out &&
- cat out &&
test_i18ngrep "error in commit $new.*integer overflow" out
'
@@ -219,7 +210,6 @@ test_expect_success 'commit with NUL in header' '
git update-ref refs/heads/bogus "$new" &&
test_when_finished "git update-ref -d refs/heads/bogus" &&
test_must_fail git fsck 2>out &&
- cat out &&
test_i18ngrep "error in commit $new.*unterminated header: NUL at offset" out
'
@@ -297,7 +287,6 @@ test_expect_success 'tag pointing to nonexistent' '
echo $tag >.git/refs/tags/invalid &&
test_when_finished "git update-ref -d refs/tags/invalid" &&
test_must_fail git fsck --tags >out &&
- cat out &&
test_i18ngrep "broken link" out
'
@@ -378,7 +367,6 @@ test_expect_success 'tag with NUL in header' '
echo $tag >.git/refs/tags/wrong &&
test_when_finished "git update-ref -d refs/tags/wrong" &&
test_must_fail git fsck --tags 2>out &&
- cat out &&
test_i18ngrep "error in tag $tag.*unterminated header: NUL at offset" out
'
@@ -409,7 +397,6 @@ test_expect_success 'rev-list --verify-objects with bad sha1' '
test_when_finished "git update-ref -d refs/heads/bogus" &&
test_might_fail git rev-list --verify-objects refs/heads/bogus >/dev/null 2>out &&
- cat out &&
test_i18ngrep -q "error: hash mismatch $(dirname $new)$(test_oid ff_2)" out
'
@@ -433,7 +420,6 @@ test_expect_success 'fsck notices blob entry pointing to null sha1' '
sha=$(printf "100644 file$_bz$_bzoid" |
git hash-object -w --stdin -t tree) &&
git fsck 2>out &&
- cat out &&
test_i18ngrep "warning.*null sha1" out
)
'
@@ -444,7 +430,6 @@ test_expect_success 'fsck notices submodule entry pointing to null sha1' '
sha=$(printf "160000 submodule$_bz$_bzoid" |
git hash-object -w --stdin -t tree) &&
git fsck 2>out &&
- cat out &&
test_i18ngrep "warning.*null sha1" out
)
'
@@ -465,7 +450,6 @@ while read name path pretty; do
printf "$mode $type %s\t%s" "$value" "$path" >bad &&
bad_tree=$(git mktree <bad) &&
git fsck 2>out &&
- cat out &&
test_i18ngrep "warning.*tree $bad_tree" out
)'
done <<-\EOF
diff --git a/t/t1506-rev-parse-diagnosis.sh b/t/t1506-rev-parse-diagnosis.sh
index 4ee009da66..624d0a588f 100755
--- a/t/t1506-rev-parse-diagnosis.sh
+++ b/t/t1506-rev-parse-diagnosis.sh
@@ -8,10 +8,9 @@ exec </dev/null
test_did_you_mean ()
{
- sq="'" &&
cat >expected <<-EOF &&
- fatal: Path '$2$3' $4, but not ${5:-$sq$3$sq}.
- Did you mean '$1:$2$3'${2:+ aka $sq$1:./$3$sq}?
+ fatal: Path '$2$3' $4, but not ${5:-$SQ$3$SQ}.
+ Did you mean '$1:$2$3'${2:+ aka $SQ$1:./$3$SQ}?
EOF
test_cmp expected error
}
@@ -215,4 +214,12 @@ test_expect_success 'arg before dashdash must be a revision (ambiguous)' '
test_cmp expect actual
'
+test_expect_success 'reject Nth parent if N is too high' '
+ test_must_fail git rev-parse HEAD^100000000000000000000000000000000
+'
+
+test_expect_success 'reject Nth ancestor if N is too high' '
+ test_must_fail git rev-parse HEAD~100000000000000000000000000000000
+'
+
test_done
diff --git a/t/t1507-rev-parse-upstream.sh b/t/t1507-rev-parse-upstream.sh
index fa3e499641..8b4cf8a6e3 100755
--- a/t/t1507-rev-parse-upstream.sh
+++ b/t/t1507-rev-parse-upstream.sh
@@ -28,8 +28,6 @@ test_expect_success 'setup' '
)
'
-sq="'"
-
full_name () {
(cd clone &&
git rev-parse --symbolic-full-name "$@")
@@ -129,7 +127,7 @@ test_expect_success 'merge my-side@{u} records the correct name' '
git branch -t new my-side@{u} &&
git merge -s ours new@{u} &&
git show -s --pretty=tformat:%s >actual &&
- echo "Merge remote-tracking branch ${sq}origin/side${sq}" >expect &&
+ echo "Merge remote-tracking branch ${SQ}origin/side${SQ}" >expect &&
test_cmp expect actual
)
'
@@ -156,7 +154,7 @@ test_expect_success 'branch@{u} works when tracking a local branch' '
test_expect_success 'branch@{u} error message when no upstream' '
cat >expect <<-EOF &&
- fatal: no upstream configured for branch ${sq}non-tracking${sq}
+ fatal: no upstream configured for branch ${SQ}non-tracking${SQ}
EOF
error_message non-tracking@{u} &&
test_i18ncmp expect error
@@ -164,7 +162,7 @@ test_expect_success 'branch@{u} error message when no upstream' '
test_expect_success '@{u} error message when no upstream' '
cat >expect <<-EOF &&
- fatal: no upstream configured for branch ${sq}master${sq}
+ fatal: no upstream configured for branch ${SQ}master${SQ}
EOF
test_must_fail git rev-parse --verify @{u} 2>actual &&
test_i18ncmp expect actual
@@ -172,7 +170,7 @@ test_expect_success '@{u} error message when no upstream' '
test_expect_success 'branch@{u} error message with misspelt branch' '
cat >expect <<-EOF &&
- fatal: no such branch: ${sq}no-such-branch${sq}
+ fatal: no such branch: ${SQ}no-such-branch${SQ}
EOF
error_message no-such-branch@{u} &&
test_i18ncmp expect error
@@ -189,7 +187,7 @@ test_expect_success '@{u} error message when not on a branch' '
test_expect_success 'branch@{u} error message if upstream branch not fetched' '
cat >expect <<-EOF &&
- fatal: upstream branch ${sq}refs/heads/side${sq} not stored as a remote-tracking branch
+ fatal: upstream branch ${SQ}refs/heads/side${SQ} not stored as a remote-tracking branch
EOF
error_message bad-upstream@{u} &&
test_i18ncmp expect error
diff --git a/t/t1600-index.sh b/t/t1600-index.sh
index 42962ed7d4..c77721b580 100755
--- a/t/t1600-index.sh
+++ b/t/t1600-index.sh
@@ -59,17 +59,38 @@ test_expect_success 'out of bounds index.version issues warning' '
)
'
-test_expect_success 'GIT_INDEX_VERSION takes precedence over config' '
+test_index_version () {
+ INDEX_VERSION_CONFIG=$1 &&
+ FEATURE_MANY_FILES=$2 &&
+ ENV_VAR_VERSION=$3
+ EXPECTED_OUTPUT_VERSION=$4 &&
(
rm -f .git/index &&
- GIT_INDEX_VERSION=4 &&
- export GIT_INDEX_VERSION &&
- git config --add index.version 2 &&
+ rm -f .git/config &&
+ if test "$INDEX_VERSION_CONFIG" -ne 0
+ then
+ git config --add index.version $INDEX_VERSION_CONFIG
+ fi &&
+ git config --add feature.manyFiles $FEATURE_MANY_FILES
+ if test "$ENV_VAR_VERSION" -ne 0
+ then
+ GIT_INDEX_VERSION=$ENV_VAR_VERSION &&
+ export GIT_INDEX_VERSION
+ else
+ unset GIT_INDEX_VERSION
+ fi &&
git add a 2>&1 &&
- echo 4 >expect &&
+ echo $EXPECTED_OUTPUT_VERSION >expect &&
test-tool index-version <.git/index >actual &&
test_cmp expect actual
)
+}
+
+test_expect_success 'index version config precedence' '
+ test_index_version 2 false 4 4 &&
+ test_index_version 2 true 0 2 &&
+ test_index_version 0 true 0 4 &&
+ test_index_version 0 true 2 2
'
test_done
diff --git a/t/t2022-checkout-paths.sh b/t/t2022-checkout-paths.sh
index fc3eb43b89..6844afafc0 100755
--- a/t/t2022-checkout-paths.sh
+++ b/t/t2022-checkout-paths.sh
@@ -78,4 +78,15 @@ test_expect_success 'do not touch files that are already up-to-date' '
test_cmp expect actual
'
+test_expect_success 'checkout HEAD adds deleted intent-to-add file back to index' '
+ echo "nonempty" >nonempty &&
+ >empty &&
+ git add nonempty empty &&
+ git commit -m "create files to be deleted" &&
+ git rm --cached nonempty empty &&
+ git add -N nonempty empty &&
+ git checkout HEAD nonempty empty &&
+ git diff --cached --exit-code
+'
+
test_done
diff --git a/t/t2070-restore.sh b/t/t2070-restore.sh
index 2650df1966..21c3f84459 100755
--- a/t/t2070-restore.sh
+++ b/t/t2070-restore.sh
@@ -95,4 +95,15 @@ test_expect_success 'restore --ignore-unmerged ignores unmerged entries' '
)
'
+test_expect_success 'restore --staged adds deleted intent-to-add file back to index' '
+ echo "nonempty" >nonempty &&
+ >empty &&
+ git add nonempty empty &&
+ git commit -m "create files to be deleted" &&
+ git rm --cached nonempty empty &&
+ git add -N nonempty empty &&
+ git restore --staged nonempty empty &&
+ git diff --cached --exit-code
+'
+
test_done
diff --git a/t/t3005-ls-files-relative.sh b/t/t3005-ls-files-relative.sh
index 209b4c7cd8..2ec69a8a26 100755
--- a/t/t3005-ls-files-relative.sh
+++ b/t/t3005-ls-files-relative.sh
@@ -7,10 +7,6 @@ This test runs git ls-files with various relative path arguments.
. ./test-lib.sh
-new_line='
-'
-sq=\'
-
test_expect_success 'prepare' '
: >never-mind-me &&
git add never-mind-me &&
@@ -44,9 +40,9 @@ test_expect_success 'ls-files -c' '
cd top/sub &&
for f in ../y*
do
- echo "error: pathspec $sq$f$sq did not match any file(s) known to git"
+ echo "error: pathspec $SQ$f$SQ did not match any file(s) known to git"
done >expect.err &&
- echo "Did you forget to ${sq}git add${sq}?" >>expect.err &&
+ echo "Did you forget to ${SQ}git add${SQ}?" >>expect.err &&
ls ../x* >expect.out &&
test_must_fail git ls-files -c --error-unmatch ../[xy]* >actual.out 2>actual.err &&
test_cmp expect.out actual.out &&
@@ -59,9 +55,9 @@ test_expect_success 'ls-files -o' '
cd top/sub &&
for f in ../x*
do
- echo "error: pathspec $sq$f$sq did not match any file(s) known to git"
+ echo "error: pathspec $SQ$f$SQ did not match any file(s) known to git"
done >expect.err &&
- echo "Did you forget to ${sq}git add${sq}?" >>expect.err &&
+ echo "Did you forget to ${SQ}git add${SQ}?" >>expect.err &&
ls ../y* >expect.out &&
test_must_fail git ls-files -o --error-unmatch ../[xy]* >actual.out 2>actual.err &&
test_cmp expect.out actual.out &&
diff --git a/t/t3030-merge-recursive.sh b/t/t3030-merge-recursive.sh
index ff641b348a..2170758e38 100755
--- a/t/t3030-merge-recursive.sh
+++ b/t/t3030-merge-recursive.sh
@@ -452,6 +452,34 @@ test_expect_success 'merge-recursive d/f conflict result' '
'
+test_expect_success SYMLINKS 'dir in working tree with symlink ancestor does not produce d/f conflict' '
+ git init sym &&
+ (
+ cd sym &&
+ ln -s . foo &&
+ mkdir bar &&
+ >bar/file &&
+ git add foo bar/file &&
+ git commit -m "foo symlink" &&
+
+ git checkout -b branch1 &&
+ git commit --allow-empty -m "empty commit" &&
+
+ git checkout master &&
+ git rm foo &&
+ mkdir foo &&
+ >foo/bar &&
+ git add foo/bar &&
+ git commit -m "replace foo symlink with real foo dir and foo/bar file" &&
+
+ git checkout branch1 &&
+
+ git cherry-pick master &&
+ test_path_is_dir foo &&
+ test_path_is_file foo/bar
+ )
+'
+
test_expect_success 'reset and 3-way merge' '
git reset --hard "$c2" &&
@@ -667,15 +695,22 @@ test_expect_success 'merging with triple rename across D/F conflict' '
test_expect_success 'merge-recursive remembers the names of all base trees' '
git reset --hard HEAD &&
+ # make the index match $c1 so that merge-recursive below does not
+ # fail early
+ git diff --binary HEAD $c1 -- | git apply --cached &&
+
# more trees than static slots used by oid_to_hex()
for commit in $c0 $c2 $c4 $c5 $c6 $c7
do
git rev-parse "$commit^{tree}"
done >trees &&
- # ignore the return code -- it only fails because the input is weird
+ # ignore the return code; it only fails because the input is weird...
test_must_fail git -c merge.verbosity=5 merge-recursive $(cat trees) -- $c1 $c3 >out &&
+ # ...but make sure it fails in the expected way
+ test_i18ngrep CONFLICT.*rename/rename out &&
+
# merge-recursive prints in reverse order, but we do not care
sort <trees >expect &&
sed -n "s/^virtual //p" out | sort >actual &&
diff --git a/t/t3201-branch-contains.sh b/t/t3201-branch-contains.sh
index 0ea4fc4694..40251c9f8f 100755
--- a/t/t3201-branch-contains.sh
+++ b/t/t3201-branch-contains.sh
@@ -192,10 +192,10 @@ test_expect_success 'branch --merged with --verbose' '
EOF
test_cmp expect actual &&
git branch --verbose --merged topic >actual &&
- cat >expect <<-\EOF &&
- master c77a0a9 second on master
- * topic 2c939f4 [ahead 1] foo
- zzz c77a0a9 second on master
+ cat >expect <<-EOF &&
+ master $(git rev-parse --short master) second on master
+ * topic $(git rev-parse --short topic ) [ahead 1] foo
+ zzz $(git rev-parse --short zzz ) second on master
EOF
test_i18ncmp expect actual
'
diff --git a/t/t3206-range-diff.sh b/t/t3206-range-diff.sh
index ec548654ce..0579cd9969 100755
--- a/t/t3206-range-diff.sh
+++ b/t/t3206-range-diff.sh
@@ -8,17 +8,124 @@ test_description='range-diff tests'
# harm than good. We need some real history.
test_expect_success 'setup' '
- git fast-import < "$TEST_DIRECTORY"/t3206/history.export
+ git fast-import < "$TEST_DIRECTORY"/t3206/history.export &&
+ test_oid_cache <<-EOF
+ # topic
+ t1 sha1:4de457d
+ t2 sha1:fccce22
+ t3 sha1:147e64e
+ t4 sha1:a63e992
+ t1 sha256:b89f8b9
+ t2 sha256:5f12aad
+ t3 sha256:ea8b273
+ t4 sha256:14b7336
+
+ # unmodified
+ u1 sha1:35b9b25
+ u2 sha1:de345ab
+ u3 sha1:9af6654
+ u4 sha1:2901f77
+ u1 sha256:e3731be
+ u2 sha256:14fadf8
+ u3 sha256:736c4bc
+ u4 sha256:673e77d
+
+ # reordered
+ r1 sha1:aca177a
+ r2 sha1:14ad629
+ r3 sha1:ee58208
+ r4 sha1:307b27a
+ r1 sha256:f59d3aa
+ r2 sha256:fb261a8
+ r3 sha256:cb2649b
+ r4 sha256:958577e
+
+ # removed (deleted)
+ d1 sha1:7657159
+ d2 sha1:43d84d3
+ d3 sha1:a740396
+ d1 sha256:e312513
+ d2 sha256:eb19258
+ d3 sha256:1ccb3c1
+
+ # added
+ a1 sha1:2716022
+ a2 sha1:b62accd
+ a3 sha1:df46cfa
+ a4 sha1:3e64548
+ a5 sha1:12b4063
+ a1 sha256:d724f4d
+ a2 sha256:1de7762
+ a3 sha256:e159431
+ a4 sha256:b3e483c
+ a5 sha256:90866a7
+
+ # rebased
+ b1 sha1:cc9c443
+ b2 sha1:c5d9641
+ b3 sha1:28cc2b6
+ b4 sha1:5628ab7
+ b5 sha1:a31b12e
+ b1 sha256:a1a8717
+ b2 sha256:20a5862
+ b3 sha256:587172a
+ b4 sha256:2721c5d
+ b5 sha256:7b57864
+
+ # changed
+ c1 sha1:a4b3333
+ c2 sha1:f51d370
+ c3 sha1:0559556
+ c4 sha1:d966c5c
+ c1 sha256:f8c2b9d
+ c2 sha256:3fb6318
+ c3 sha256:168ab68
+ c4 sha256:3526539
+
+ # changed-message
+ m1 sha1:f686024
+ m2 sha1:4ab067d
+ m3 sha1:b9cb956
+ m4 sha1:8add5f1
+ m1 sha256:31e6281
+ m2 sha256:a06bf1b
+ m3 sha256:82dc654
+ m4 sha256:48470c5
+
+ # renamed
+ n1 sha1:f258d75
+ n2 sha1:017b62d
+ n3 sha1:3ce7af6
+ n4 sha1:1e6226b
+ n1 sha256:ad52114
+ n2 sha256:3b54c8f
+ n3 sha256:3b0a644
+ n4 sha256:e461653
+
+ # added and removed
+ s1 sha1:096b1ba
+ s2 sha1:d92e698
+ s3 sha1:9a1db4d
+ s4 sha1:fea3b5c
+ s1 sha256:a7f9134
+ s2 sha256:b4c2580
+ s3 sha256:1d62aa2
+ s4 sha256:48160e8
+
+ # Empty delimiter (included so lines match neatly)
+ __ sha1:-------
+ __ sha256:-------
+ EOF
'
test_expect_success 'simple A..B A..C (unmodified)' '
git range-diff --no-color master..topic master..unmodified \
>actual &&
cat >expected <<-EOF &&
- 1: 4de457d = 1: 35b9b25 s/5/A/
- 2: fccce22 = 2: de345ab s/4/A/
- 3: 147e64e = 3: 9af6654 s/11/B/
- 4: a63e992 = 4: 2901f77 s/12/B/
+ 1: $(test_oid t1) = 1: $(test_oid u1) s/5/A/
+ 2: $(test_oid t2) = 2: $(test_oid u2) s/4/A/
+ 3: $(test_oid t3) = 3: $(test_oid u3) s/11/B/
+ 4: $(test_oid t4) = 4: $(test_oid u4) s/12/B/
EOF
test_cmp expected actual
'
@@ -38,10 +145,10 @@ test_expect_success 'simple A B C (unmodified)' '
test_expect_success 'trivial reordering' '
git range-diff --no-color master topic reordered >actual &&
cat >expected <<-EOF &&
- 1: 4de457d = 1: aca177a s/5/A/
- 3: 147e64e = 2: 14ad629 s/11/B/
- 4: a63e992 = 3: ee58208 s/12/B/
- 2: fccce22 = 4: 307b27a s/4/A/
+ 1: $(test_oid t1) = 1: $(test_oid r1) s/5/A/
+ 3: $(test_oid t3) = 2: $(test_oid r2) s/11/B/
+ 4: $(test_oid t4) = 3: $(test_oid r3) s/12/B/
+ 2: $(test_oid t2) = 4: $(test_oid r4) s/4/A/
EOF
test_cmp expected actual
'
@@ -49,10 +156,10 @@ test_expect_success 'trivial reordering' '
test_expect_success 'removed a commit' '
git range-diff --no-color master topic removed >actual &&
cat >expected <<-EOF &&
- 1: 4de457d = 1: 7657159 s/5/A/
- 2: fccce22 < -: ------- s/4/A/
- 3: 147e64e = 2: 43d84d3 s/11/B/
- 4: a63e992 = 3: a740396 s/12/B/
+ 1: $(test_oid t1) = 1: $(test_oid d1) s/5/A/
+ 2: $(test_oid t2) < -: $(test_oid __) s/4/A/
+ 3: $(test_oid t3) = 2: $(test_oid d2) s/11/B/
+ 4: $(test_oid t4) = 3: $(test_oid d3) s/12/B/
EOF
test_cmp expected actual
'
@@ -60,11 +167,11 @@ test_expect_success 'removed a commit' '
test_expect_success 'added a commit' '
git range-diff --no-color master topic added >actual &&
cat >expected <<-EOF &&
- 1: 4de457d = 1: 2716022 s/5/A/
- 2: fccce22 = 2: b62accd s/4/A/
- -: ------- > 3: df46cfa s/6/A/
- 3: 147e64e = 4: 3e64548 s/11/B/
- 4: a63e992 = 5: 12b4063 s/12/B/
+ 1: $(test_oid t1) = 1: $(test_oid a1) s/5/A/
+ 2: $(test_oid t2) = 2: $(test_oid a2) s/4/A/
+ -: $(test_oid __) > 3: $(test_oid a3) s/6/A/
+ 3: $(test_oid t3) = 4: $(test_oid a4) s/11/B/
+ 4: $(test_oid t4) = 5: $(test_oid a5) s/12/B/
EOF
test_cmp expected actual
'
@@ -72,10 +179,10 @@ test_expect_success 'added a commit' '
test_expect_success 'new base, A B C' '
git range-diff --no-color master topic rebased >actual &&
cat >expected <<-EOF &&
- 1: 4de457d = 1: cc9c443 s/5/A/
- 2: fccce22 = 2: c5d9641 s/4/A/
- 3: 147e64e = 3: 28cc2b6 s/11/B/
- 4: a63e992 = 4: 5628ab7 s/12/B/
+ 1: $(test_oid t1) = 1: $(test_oid b1) s/5/A/
+ 2: $(test_oid t2) = 2: $(test_oid b2) s/4/A/
+ 3: $(test_oid t3) = 3: $(test_oid b3) s/11/B/
+ 4: $(test_oid t4) = 4: $(test_oid b4) s/12/B/
EOF
test_cmp expected actual
'
@@ -84,11 +191,11 @@ test_expect_success 'new base, B...C' '
# this syntax includes the commits from master!
git range-diff --no-color topic...rebased >actual &&
cat >expected <<-EOF &&
- -: ------- > 1: a31b12e unrelated
- 1: 4de457d = 2: cc9c443 s/5/A/
- 2: fccce22 = 3: c5d9641 s/4/A/
- 3: 147e64e = 4: 28cc2b6 s/11/B/
- 4: a63e992 = 5: 5628ab7 s/12/B/
+ -: $(test_oid __) > 1: $(test_oid b5) unrelated
+ 1: $(test_oid t1) = 2: $(test_oid b1) s/5/A/
+ 2: $(test_oid t2) = 3: $(test_oid b2) s/4/A/
+ 3: $(test_oid t3) = 4: $(test_oid b3) s/11/B/
+ 4: $(test_oid t4) = 5: $(test_oid b4) s/12/B/
EOF
test_cmp expected actual
'
@@ -96,9 +203,9 @@ test_expect_success 'new base, B...C' '
test_expect_success 'changed commit' '
git range-diff --no-color topic...changed >actual &&
cat >expected <<-EOF &&
- 1: 4de457d = 1: a4b3333 s/5/A/
- 2: fccce22 = 2: f51d370 s/4/A/
- 3: 147e64e ! 3: 0559556 s/11/B/
+ 1: $(test_oid t1) = 1: $(test_oid c1) s/5/A/
+ 2: $(test_oid t2) = 2: $(test_oid c2) s/4/A/
+ 3: $(test_oid t3) ! 3: $(test_oid c3) s/11/B/
@@ file: A
9
10
@@ -108,7 +215,7 @@ test_expect_success 'changed commit' '
12
13
14
- 4: a63e992 ! 4: d966c5c s/12/B/
+ 4: $(test_oid t4) ! 4: $(test_oid c4) s/12/B/
@@ file
@@ file: A
9
@@ -125,10 +232,10 @@ test_expect_success 'changed commit' '
test_expect_success 'changed commit with --no-patch diff option' '
git range-diff --no-color --no-patch topic...changed >actual &&
cat >expected <<-EOF &&
- 1: 4de457d = 1: a4b3333 s/5/A/
- 2: fccce22 = 2: f51d370 s/4/A/
- 3: 147e64e ! 3: 0559556 s/11/B/
- 4: a63e992 ! 4: d966c5c s/12/B/
+ 1: $(test_oid t1) = 1: $(test_oid c1) s/5/A/
+ 2: $(test_oid t2) = 2: $(test_oid c2) s/4/A/
+ 3: $(test_oid t3) ! 3: $(test_oid c3) s/11/B/
+ 4: $(test_oid t4) ! 4: $(test_oid c4) s/12/B/
EOF
test_cmp expected actual
'
@@ -136,16 +243,16 @@ test_expect_success 'changed commit with --no-patch diff option' '
test_expect_success 'changed commit with --stat diff option' '
git range-diff --no-color --stat topic...changed >actual &&
cat >expected <<-EOF &&
- 1: 4de457d = 1: a4b3333 s/5/A/
+ 1: $(test_oid t1) = 1: $(test_oid c1) s/5/A/
a => b | 0
1 file changed, 0 insertions(+), 0 deletions(-)
- 2: fccce22 = 2: f51d370 s/4/A/
+ 2: $(test_oid t2) = 2: $(test_oid c2) s/4/A/
a => b | 0
1 file changed, 0 insertions(+), 0 deletions(-)
- 3: 147e64e ! 3: 0559556 s/11/B/
+ 3: $(test_oid t3) ! 3: $(test_oid c3) s/11/B/
a => b | 0
1 file changed, 0 insertions(+), 0 deletions(-)
- 4: a63e992 ! 4: d966c5c s/12/B/
+ 4: $(test_oid t4) ! 4: $(test_oid c4) s/12/B/
a => b | 0
1 file changed, 0 insertions(+), 0 deletions(-)
EOF
@@ -155,9 +262,9 @@ test_expect_success 'changed commit with --stat diff option' '
test_expect_success 'changed commit with sm config' '
git range-diff --no-color --submodule=log topic...changed >actual &&
cat >expected <<-EOF &&
- 1: 4de457d = 1: a4b3333 s/5/A/
- 2: fccce22 = 2: f51d370 s/4/A/
- 3: 147e64e ! 3: 0559556 s/11/B/
+ 1: $(test_oid t1) = 1: $(test_oid c1) s/5/A/
+ 2: $(test_oid t2) = 2: $(test_oid c2) s/4/A/
+ 3: $(test_oid t3) ! 3: $(test_oid c3) s/11/B/
@@ file: A
9
10
@@ -167,7 +274,7 @@ test_expect_success 'changed commit with sm config' '
12
13
14
- 4: a63e992 ! 4: d966c5c s/12/B/
+ 4: $(test_oid t4) ! 4: $(test_oid c4) s/12/B/
@@ file
@@ file: A
9
@@ -184,8 +291,8 @@ test_expect_success 'changed commit with sm config' '
test_expect_success 'renamed file' '
git range-diff --no-color --submodule=log topic...renamed-file >actual &&
sed s/Z/\ /g >expected <<-EOF &&
- 1: 4de457d = 1: f258d75 s/5/A/
- 2: fccce22 ! 2: 017b62d s/4/A/
+ 1: $(test_oid t1) = 1: $(test_oid n1) s/5/A/
+ 2: $(test_oid t2) ! 2: $(test_oid n2) s/4/A/
@@ Metadata
ZAuthor: Thomas Rast <trast@inf.ethz.ch>
Z
@@ -198,7 +305,7 @@ test_expect_success 'renamed file' '
Z@@
Z 1
Z 2
- 3: 147e64e ! 3: 3ce7af6 s/11/B/
+ 3: $(test_oid t3) ! 3: $(test_oid n3) s/11/B/
@@ Metadata
Z ## Commit message ##
Z s/11/B/
@@ -210,7 +317,7 @@ test_expect_success 'renamed file' '
Z 8
Z 9
Z 10
- 4: a63e992 ! 4: 1e6226b s/12/B/
+ 4: $(test_oid t4) ! 4: $(test_oid n4) s/12/B/
@@ Metadata
Z ## Commit message ##
Z s/12/B/
@@ -226,11 +333,51 @@ test_expect_success 'renamed file' '
test_cmp expected actual
'
+test_expect_success 'file with mode only change' '
+ git range-diff --no-color --submodule=log topic...mode-only-change >actual &&
+ sed s/Z/\ /g >expected <<-EOF &&
+ 1: fccce22 ! 1: 4d39cb3 s/4/A/
+ @@ Metadata
+ ZAuthor: Thomas Rast <trast@inf.ethz.ch>
+ Z
+ Z ## Commit message ##
+ - s/4/A/
+ + s/4/A/ + add other-file
+ Z
+ Z ## file ##
+ Z@@
+ @@ file
+ Z A
+ Z 6
+ Z 7
+ +
+ + ## other-file (new) ##
+ 2: 147e64e ! 2: 26c107f s/11/B/
+ @@ Metadata
+ ZAuthor: Thomas Rast <trast@inf.ethz.ch>
+ Z
+ Z ## Commit message ##
+ - s/11/B/
+ + s/11/B/ + mode change other-file
+ Z
+ Z ## file ##
+ Z@@ file: A
+ @@ file: A
+ Z 12
+ Z 13
+ Z 14
+ +
+ + ## other-file (mode change 100644 => 100755) ##
+ 3: a63e992 = 3: 4c1e0f5 s/12/B/
+ EOF
+ test_cmp expected actual
+'
+
test_expect_success 'file added and later removed' '
git range-diff --no-color --submodule=log topic...added-removed >actual &&
sed s/Z/\ /g >expected <<-EOF &&
- 1: 4de457d = 1: 096b1ba s/5/A/
- 2: fccce22 ! 2: d92e698 s/4/A/
+ 1: $(test_oid t1) = 1: $(test_oid s1) s/5/A/
+ 2: $(test_oid t2) ! 2: $(test_oid s2) s/4/A/
@@ Metadata
ZAuthor: Thomas Rast <trast@inf.ethz.ch>
Z
@@ -246,7 +393,7 @@ test_expect_success 'file added and later removed' '
Z 7
+
+ ## new-file (new) ##
- 3: 147e64e ! 3: 9a1db4d s/11/B/
+ 3: $(test_oid t3) ! 3: $(test_oid s3) s/11/B/
@@ Metadata
ZAuthor: Thomas Rast <trast@inf.ethz.ch>
Z
@@ -262,7 +409,7 @@ test_expect_success 'file added and later removed' '
Z 14
+
+ ## new-file (deleted) ##
- 4: a63e992 = 4: fea3b5c s/12/B/
+ 4: $(test_oid t4) = 4: $(test_oid s4) s/12/B/
EOF
test_cmp expected actual
'
@@ -275,8 +422,8 @@ test_expect_success 'no commits on one side' '
test_expect_success 'changed message' '
git range-diff --no-color topic...changed-message >actual &&
sed s/Z/\ /g >expected <<-EOF &&
- 1: 4de457d = 1: f686024 s/5/A/
- 2: fccce22 ! 2: 4ab067d s/4/A/
+ 1: $(test_oid t1) = 1: $(test_oid m1) s/5/A/
+ 2: $(test_oid t2) ! 2: $(test_oid m2) s/4/A/
@@ Metadata
Z ## Commit message ##
Z s/4/A/
@@ -286,16 +433,16 @@ test_expect_success 'changed message' '
Z ## file ##
Z@@
Z 1
- 3: 147e64e = 3: b9cb956 s/11/B/
- 4: a63e992 = 4: 8add5f1 s/12/B/
+ 3: $(test_oid t3) = 3: $(test_oid m3) s/11/B/
+ 4: $(test_oid t4) = 4: $(test_oid m4) s/12/B/
EOF
test_cmp expected actual
'
test_expect_success 'dual-coloring' '
- sed -e "s|^:||" >expect <<-\EOF &&
- :<YELLOW>1: a4b3333 = 1: f686024 s/5/A/<RESET>
- :<RED>2: f51d370 <RESET><YELLOW>!<RESET><GREEN> 2: 4ab067d<RESET><YELLOW> s/4/A/<RESET>
+ sed -e "s|^:||" >expect <<-EOF &&
+ :<YELLOW>1: $(test_oid c1) = 1: $(test_oid m1) s/5/A/<RESET>
+ :<RED>2: $(test_oid c2) <RESET><YELLOW>!<RESET><GREEN> 2: $(test_oid m2)<RESET><YELLOW> s/4/A/<RESET>
: <REVERSE><CYAN>@@<RESET> <RESET>Metadata<RESET>
: ## Commit message ##<RESET>
: s/4/A/<RESET>
@@ -305,7 +452,7 @@ test_expect_success 'dual-coloring' '
: ## file ##<RESET>
: <CYAN> @@<RESET>
: 1<RESET>
- :<RED>3: 0559556 <RESET><YELLOW>!<RESET><GREEN> 3: b9cb956<RESET><YELLOW> s/11/B/<RESET>
+ :<RED>3: $(test_oid c3) <RESET><YELLOW>!<RESET><GREEN> 3: $(test_oid m3)<RESET><YELLOW> s/11/B/<RESET>
: <REVERSE><CYAN>@@<RESET> <RESET>file: A<RESET>
: 9<RESET>
: 10<RESET>
@@ -315,7 +462,7 @@ test_expect_success 'dual-coloring' '
: 12<RESET>
: 13<RESET>
: 14<RESET>
- :<RED>4: d966c5c <RESET><YELLOW>!<RESET><GREEN> 4: 8add5f1<RESET><YELLOW> s/12/B/<RESET>
+ :<RED>4: $(test_oid c4) <RESET><YELLOW>!<RESET><GREEN> 4: $(test_oid m4)<RESET><YELLOW> s/12/B/<RESET>
: <REVERSE><CYAN>@@<RESET> <RESET>file<RESET>
: <CYAN> @@ file: A<RESET>
: 9<RESET>
@@ -354,4 +501,8 @@ test_expect_success 'format-patch --range-diff as commentary' '
grep "> 1: .* new message" 0001-*
'
+test_expect_success 'range-diff overrides diff.noprefix internally' '
+ git -c diff.noprefix=true range-diff HEAD^...
+'
+
test_done
diff --git a/t/t3206/history.export b/t/t3206/history.export
index 7bb3814962..4c808e5b3b 100644
--- a/t/t3206/history.export
+++ b/t/t3206/history.export
@@ -55,7 +55,7 @@ A
19
20
-commit refs/heads/topic
+commit refs/heads/mode-only-change
mark :4
author Thomas Rast <trast@inf.ethz.ch> 1374485014 +0200
committer Thomas Rast <trast@inf.ethz.ch> 1374485014 +0200
@@ -678,3 +678,32 @@ s/12/B/
from :55
M 100644 :9 renamed-file
+commit refs/heads/mode-only-change
+mark :57
+author Thomas Rast <trast@inf.ethz.ch> 1374485024 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1570473767 +0100
+data 24
+s/4/A/ + add other-file
+from :4
+M 100644 :5 file
+M 100644 :49 other-file
+
+commit refs/heads/mode-only-change
+mark :58
+author Thomas Rast <trast@inf.ethz.ch> 1374485036 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1570473768 +0100
+data 33
+s/11/B/ + mode change other-file
+from :57
+M 100644 :7 file
+M 100755 :49 other-file
+
+commit refs/heads/mode-only-change
+mark :59
+author Thomas Rast <trast@inf.ethz.ch> 1374485044 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1570473768 +0100
+data 8
+s/12/B/
+from :58
+M 100644 :9 file
+
diff --git a/t/t3301-notes.sh b/t/t3301-notes.sh
index 704bbc6541..d3fa298c6a 100755
--- a/t/t3301-notes.sh
+++ b/t/t3301-notes.sh
@@ -66,8 +66,9 @@ test_expect_success 'show notes entry with %N' '
'
test_expect_success 'create reflog entry' '
+ ref=$(git rev-parse --short refs/notes/commits) &&
cat <<-EOF >expect &&
- a1d8fa6 refs/notes/commits@{0}: notes: Notes added by '\''git notes add'\''
+ $ref refs/notes/commits@{0}: notes: Notes added by '\''git notes add'\''
EOF
git reflog show refs/notes/commits >actual &&
test_cmp expect actual
@@ -134,8 +135,9 @@ test_expect_success 'can overwrite existing note with "git notes add -f"' '
'
test_expect_success 'show notes' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 7a4ca6ee52a974a66cbaa78e33214535dff1d691
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:14:13 2005 -0700
@@ -152,8 +154,9 @@ test_expect_success 'show notes' '
test_expect_success 'show multi-line notes' '
test_commit 3rd &&
MSG="b3${LF}c3c3c3c3${LF}d3d3d3" git notes add &&
+ commit=$(git rev-parse HEAD) &&
cat >expect-multiline <<-EOF &&
- commit d07d62e5208f22eb5695e7eb47667dc8b9860290
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:15:13 2005 -0700
@@ -174,8 +177,9 @@ test_expect_success 'show -F notes' '
test_commit 4th &&
echo "xyzzy" >note5 &&
git notes add -F note5 &&
+ commit=$(git rev-parse HEAD) &&
cat >expect-F <<-EOF &&
- commit 0f7aa3ec6325aeb88b910453bb3eb37c49d75c11
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:16:13 2005 -0700
@@ -198,10 +202,13 @@ test_expect_success 'Re-adding -F notes without -f fails' '
'
test_expect_success 'git log --pretty=raw does not show notes' '
+ commit=$(git rev-parse HEAD) &&
+ tree=$(git rev-parse HEAD^{tree}) &&
+ parent=$(git rev-parse HEAD^) &&
cat >expect <<-EOF &&
- commit 0f7aa3ec6325aeb88b910453bb3eb37c49d75c11
- tree 05ac65288c4c4b3b709a020ae94b2ece2f2201ae
- parent d07d62e5208f22eb5695e7eb47667dc8b9860290
+ commit $commit
+ tree $tree
+ parent $parent
author A U Thor <author@example.com> 1112912173 -0700
committer C O Mitter <committer@example.com> 1112912173 -0700
@@ -291,8 +298,9 @@ test_expect_success 'git log --no-notes resets ref list' '
test_expect_success 'show -m notes' '
test_commit 5th &&
git notes add -m spam -m "foo${LF}bar${LF}baz" &&
+ commit=$(git rev-parse HEAD) &&
cat >expect-m <<-EOF &&
- commit 7f9ad8836c775acb134c0a055fc55fb4cd1ba361
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:17:13 2005 -0700
@@ -313,8 +321,9 @@ test_expect_success 'show -m notes' '
test_expect_success 'remove note with add -f -F /dev/null' '
git notes add -f -F /dev/null &&
+ commit=$(git rev-parse HEAD) &&
cat >expect-rm-F <<-EOF &&
- commit 7f9ad8836c775acb134c0a055fc55fb4cd1ba361
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:17:13 2005 -0700
@@ -356,14 +365,16 @@ test_expect_success 'create note with combination of -m and -F' '
test_expect_success 'remove note with "git notes remove"' '
git notes remove HEAD^ &&
git notes remove &&
+ commit=$(git rev-parse HEAD) &&
+ parent=$(git rev-parse HEAD^) &&
cat >expect-rm-remove <<-EOF &&
- commit 7f9ad8836c775acb134c0a055fc55fb4cd1ba361
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:17:13 2005 -0700
${indent}5th
- commit 0f7aa3ec6325aeb88b910453bb3eb37c49d75c11
+ commit $parent
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:16:13 2005 -0700
@@ -459,9 +470,11 @@ test_expect_success 'removing with --stdin --ignore-missing' '
'
test_expect_success 'list notes with "git notes list"' '
- cat >expect <<-EOF &&
- c9c6af7f78bc47490dbf3e822cf2f3c24d4b9061 7a4ca6ee52a974a66cbaa78e33214535dff1d691
- c18dc024e14f08d18d14eea0d747ff692d66d6a3 d07d62e5208f22eb5695e7eb47667dc8b9860290
+ commit_2=$(git rev-parse 2nd) &&
+ commit_3=$(git rev-parse 3rd) &&
+ sort -t" " -k2 >expect <<-EOF &&
+ $(git rev-parse refs/notes/commits:$commit_2) $commit_2
+ $(git rev-parse refs/notes/commits:$commit_3) $commit_3
EOF
git notes list >actual &&
test_cmp expect actual
@@ -474,7 +487,7 @@ test_expect_success 'list notes with "git notes"' '
test_expect_success 'list specific note with "git notes list <object>"' '
cat >expect <<-EOF &&
- c18dc024e14f08d18d14eea0d747ff692d66d6a3
+ $(git rev-parse refs/notes/commits:$commit_3)
EOF
git notes list HEAD^^ >actual &&
test_cmp expect actual
@@ -498,10 +511,11 @@ test_expect_success 'append to existing note with "git notes append"' '
'
test_expect_success '"git notes list" does not expand to "git notes list HEAD"' '
- cat >expect_list <<-EOF &&
- c9c6af7f78bc47490dbf3e822cf2f3c24d4b9061 7a4ca6ee52a974a66cbaa78e33214535dff1d691
- 4b6ad22357cc8a1296720574b8d2fbc22fab0671 7f9ad8836c775acb134c0a055fc55fb4cd1ba361
- c18dc024e14f08d18d14eea0d747ff692d66d6a3 d07d62e5208f22eb5695e7eb47667dc8b9860290
+ commit_5=$(git rev-parse 5th) &&
+ sort -t" " -k2 >expect_list <<-EOF &&
+ $(git rev-parse refs/notes/commits:$commit_2) $commit_2
+ $(git rev-parse refs/notes/commits:$commit_3) $commit_3
+ $(git rev-parse refs/notes/commits:$commit_5) $commit_5
EOF
git notes list >actual &&
test_cmp expect_list actual
@@ -531,8 +545,9 @@ test_expect_success 'appending empty string to non-existing note does not create
test_expect_success 'create other note on a different notes ref (setup)' '
test_commit 6th &&
GIT_NOTES_REF="refs/notes/other" git notes add -m "other note" &&
+ commit=$(git rev-parse HEAD) &&
cat >expect-not-other <<-EOF &&
- commit 2c125331118caba0ff8238b7f4958ac6e93fe39c
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:18:13 2005 -0700
@@ -569,8 +584,10 @@ test_expect_success 'Do not show note when core.notesRef is overridden' '
'
test_expect_success 'Show all notes when notes.displayRef=refs/notes/*' '
+ commit=$(git rev-parse HEAD) &&
+ parent=$(git rev-parse HEAD^) &&
cat >expect-both <<-EOF &&
- commit 2c125331118caba0ff8238b7f4958ac6e93fe39c
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:18:13 2005 -0700
@@ -582,7 +599,7 @@ test_expect_success 'Show all notes when notes.displayRef=refs/notes/*' '
Notes (other):
${indent}other note
- commit 7f9ad8836c775acb134c0a055fc55fb4cd1ba361
+ commit $parent
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:17:13 2005 -0700
@@ -616,8 +633,9 @@ test_expect_success 'notes.displayRef can be given more than once' '
'
test_expect_success 'notes.displayRef respects order' '
+ commit=$(git rev-parse HEAD) &&
cat >expect-both-reversed <<-EOF &&
- commit 2c125331118caba0ff8238b7f4958ac6e93fe39c
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:18:13 2005 -0700
@@ -642,14 +660,16 @@ test_expect_success 'GIT_NOTES_DISPLAY_REF works' '
'
test_expect_success 'GIT_NOTES_DISPLAY_REF overrides config' '
+ commit=$(git rev-parse HEAD) &&
+ parent=$(git rev-parse HEAD^) &&
cat >expect-none <<-EOF &&
- commit 2c125331118caba0ff8238b7f4958ac6e93fe39c
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:18:13 2005 -0700
${indent}6th
- commit 7f9ad8836c775acb134c0a055fc55fb4cd1ba361
+ commit $parent
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:17:13 2005 -0700
@@ -666,8 +686,9 @@ test_expect_success '--show-notes=* adds to GIT_NOTES_DISPLAY_REF' '
'
test_expect_success '--no-standard-notes' '
+ commit=$(git rev-parse HEAD) &&
cat >expect-commits <<-EOF &&
- commit 2c125331118caba0ff8238b7f4958ac6e93fe39c
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:18:13 2005 -0700
@@ -712,8 +733,10 @@ test_expect_success 'Allow notes on non-commits (trees, blobs, tags)' '
'
test_expect_success 'create note from other note with "git notes add -C"' '
+ test_commit 7th &&
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit fb01e0ca8c33b6cc0c6451dde747f97df567cb5c
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:19:13 2005 -0700
@@ -722,7 +745,6 @@ test_expect_success 'create note from other note with "git notes add -C"' '
Notes:
${indent}order test
EOF
- test_commit 7th &&
git notes add -C $(git notes list HEAD^) &&
git log -1 >actual &&
test_cmp expect actual &&
@@ -744,8 +766,9 @@ test_expect_success 'create note from non-blob with "git notes add -C" fails' '
'
test_expect_success 'create note from blob with "git notes add -C" reuses blob id' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 9a4c31c7f722b5d517e92c64e932dd751e1413bf
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:20:13 2005 -0700
@@ -762,8 +785,10 @@ test_expect_success 'create note from blob with "git notes add -C" reuses blob i
'
test_expect_success 'create note from other note with "git notes add -c"' '
+ test_commit 9th &&
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 2e0db4bc649e174d667a1cde19e725cf897a5bd2
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:21:13 2005 -0700
@@ -772,7 +797,6 @@ test_expect_success 'create note from other note with "git notes add -c"' '
Notes:
${indent}yet another note
EOF
- test_commit 9th &&
MSG="yet another note" git notes add -c $(git notes list HEAD^^) &&
git log -1 >actual &&
test_cmp expect actual
@@ -785,8 +809,9 @@ test_expect_success 'create note from non-existing note with "git notes add -c"
'
test_expect_success 'append to note from other note with "git notes append -C"' '
+ commit=$(git rev-parse HEAD^) &&
cat >expect <<-EOF &&
- commit 2e0db4bc649e174d667a1cde19e725cf897a5bd2
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:21:13 2005 -0700
@@ -803,8 +828,9 @@ test_expect_success 'append to note from other note with "git notes append -C"'
'
test_expect_success 'create note from other note with "git notes append -c"' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 7c3b87ab368f81e11b1ea87b2ab99a71ccd25406
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:22:13 2005 -0700
@@ -819,8 +845,9 @@ test_expect_success 'create note from other note with "git notes append -c"' '
'
test_expect_success 'append to note from other note with "git notes append -c"' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 7c3b87ab368f81e11b1ea87b2ab99a71ccd25406
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:22:13 2005 -0700
@@ -837,8 +864,10 @@ test_expect_success 'append to note from other note with "git notes append -c"'
'
test_expect_success 'copy note with "git notes copy"' '
+ test_commit 11th &&
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit a446fff8777efdc6eb8f4b7c8a5ff699484df0d5
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:23:13 2005 -0700
@@ -849,7 +878,6 @@ test_expect_success 'copy note with "git notes copy"' '
${indent}
${indent}yet another note
EOF
- test_commit 11th &&
git notes copy HEAD^ HEAD &&
git log -1 >actual &&
test_cmp expect actual &&
@@ -864,8 +892,9 @@ test_expect_success 'prevent overwrite with "git notes copy"' '
'
test_expect_success 'allow overwrite with "git notes copy -f"' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit a446fff8777efdc6eb8f4b7c8a5ff699484df0d5
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:23:13 2005 -0700
@@ -889,8 +918,10 @@ test_expect_success 'cannot copy note from object without notes' '
'
test_expect_success 'git notes copy --stdin' '
+ commit=$(git rev-parse HEAD) &&
+ parent=$(git rev-parse HEAD^) &&
cat >expect <<-EOF &&
- commit e871aa61182b1d95d0a6fb75445d891722863b6b
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:25:13 2005 -0700
@@ -901,7 +932,7 @@ test_expect_success 'git notes copy --stdin' '
${indent}
${indent}yet another note
- commit 65e263ded02ae4e8839bc151095113737579dc12
+ commit $parent
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:24:13 2005 -0700
@@ -922,21 +953,23 @@ test_expect_success 'git notes copy --stdin' '
'
test_expect_success 'git notes copy --for-rewrite (unconfigured)' '
+ test_commit 14th &&
+ test_commit 15th &&
+ commit=$(git rev-parse HEAD) &&
+ parent=$(git rev-parse HEAD^) &&
cat >expect <<-EOF &&
- commit 4acf42e847e7fffbbf89ee365c20ac7caf40de89
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:27:13 2005 -0700
${indent}15th
- commit 07c85d77059393ed0154b8c96906547a59dfcddd
+ commit $parent
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:26:13 2005 -0700
${indent}14th
EOF
- test_commit 14th &&
- test_commit 15th &&
(echo $(git rev-parse HEAD~3) $(git rev-parse HEAD^) &&
echo $(git rev-parse HEAD~2) $(git rev-parse HEAD)) |
git notes copy --for-rewrite=foo &&
@@ -945,8 +978,10 @@ test_expect_success 'git notes copy --for-rewrite (unconfigured)' '
'
test_expect_success 'git notes copy --for-rewrite (enabled)' '
+ commit=$(git rev-parse HEAD) &&
+ parent=$(git rev-parse HEAD^) &&
cat >expect <<-EOF &&
- commit 4acf42e847e7fffbbf89ee365c20ac7caf40de89
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:27:13 2005 -0700
@@ -957,7 +992,7 @@ test_expect_success 'git notes copy --for-rewrite (enabled)' '
${indent}
${indent}yet another note
- commit 07c85d77059393ed0154b8c96906547a59dfcddd
+ commit $parent
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:26:13 2005 -0700
@@ -986,8 +1021,9 @@ test_expect_success 'git notes copy --for-rewrite (disabled)' '
'
test_expect_success 'git notes copy --for-rewrite (overwrite)' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 4acf42e847e7fffbbf89ee365c20ac7caf40de89
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:27:13 2005 -0700
@@ -1015,8 +1051,9 @@ test_expect_success 'git notes copy --for-rewrite (ignore)' '
'
test_expect_success 'git notes copy --for-rewrite (append)' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 4acf42e847e7fffbbf89ee365c20ac7caf40de89
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:27:13 2005 -0700
@@ -1037,8 +1074,9 @@ test_expect_success 'git notes copy --for-rewrite (append)' '
'
test_expect_success 'git notes copy --for-rewrite (append two to one)' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 4acf42e847e7fffbbf89ee365c20ac7caf40de89
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:27:13 2005 -0700
@@ -1075,8 +1113,9 @@ test_expect_success 'git notes copy --for-rewrite (append empty)' '
'
test_expect_success 'GIT_NOTES_REWRITE_MODE works' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 4acf42e847e7fffbbf89ee365c20ac7caf40de89
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:27:13 2005 -0700
@@ -1095,8 +1134,9 @@ test_expect_success 'GIT_NOTES_REWRITE_MODE works' '
'
test_expect_success 'GIT_NOTES_REWRITE_REF works' '
+ commit=$(git rev-parse HEAD) &&
cat >expect <<-EOF &&
- commit 4acf42e847e7fffbbf89ee365c20ac7caf40de89
+ commit $commit
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:27:13 2005 -0700
diff --git a/t/t3305-notes-fanout.sh b/t/t3305-notes-fanout.sh
index 54460beec4..831f83d211 100755
--- a/t/t3305-notes-fanout.sh
+++ b/t/t3305-notes-fanout.sh
@@ -35,15 +35,10 @@ test_expect_success 'many notes created with git-notes triggers fanout' '
git ls-tree -r --name-only refs/notes/commits |
while read path
do
- case "$path" in
- ??/??????????????????????????????????????)
- : true
- ;;
- *)
+ echo $path | grep "^../[0-9a-f]*$" || {
echo "Invalid path \"$path\"" &&
- return 1
- ;;
- esac
+ return 1;
+ }
done
'
@@ -77,15 +72,10 @@ test_expect_success 'deleting most notes triggers fanout consolidation' '
git ls-tree -r --name-only refs/notes/commits |
while read path
do
- case "$path" in
- ????????????????????????????????????????)
- : true
- ;;
- *)
+ echo $path | grep -v "^../.*" || {
echo "Invalid path \"$path\"" &&
- return 1
- ;;
- esac
+ return 1;
+ }
done
'
diff --git a/t/t3306-notes-prune.sh b/t/t3306-notes-prune.sh
index 61748088eb..8f4102ff9e 100755
--- a/t/t3306-notes-prune.sh
+++ b/t/t3306-notes-prune.sh
@@ -11,23 +11,26 @@ test_expect_success 'setup: create a few commits with notes' '
test_tick &&
git commit -m 1st &&
git notes add -m "Note #1" &&
+ first=$(git rev-parse HEAD) &&
: > file2 &&
git add file2 &&
test_tick &&
git commit -m 2nd &&
git notes add -m "Note #2" &&
+ second=$(git rev-parse HEAD) &&
: > file3 &&
git add file3 &&
test_tick &&
git commit -m 3rd &&
- COMMIT_FILE=.git/objects/5e/e1c35e83ea47cd3cc4f8cbee0568915fbbbd29 &&
+ third=$(git rev-parse HEAD) &&
+ COMMIT_FILE=$(echo $third | sed "s!^..!.git/objects/&/!") &&
test -f $COMMIT_FILE &&
test-tool chmtime =+0 $COMMIT_FILE &&
git notes add -m "Note #3"
'
cat > expect <<END_OF_LOG
-commit 5ee1c35e83ea47cd3cc4f8cbee0568915fbbbd29
+commit $third
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:15:13 2005 -0700
@@ -36,7 +39,7 @@ Date: Thu Apr 7 15:15:13 2005 -0700
Notes:
Note #3
-commit 08341ad9e94faa089d60fd3f523affb25c6da189
+commit $second
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:14:13 2005 -0700
@@ -45,7 +48,7 @@ Date: Thu Apr 7 15:14:13 2005 -0700
Notes:
Note #2
-commit ab5f302035f2e7aaf04265f08b42034c23256e1f
+commit $first
Author: A U Thor <author@example.com>
Date: Thu Apr 7 15:13:13 2005 -0700
@@ -70,16 +73,16 @@ test_expect_success 'remove some commits' '
test_expect_success 'verify that commits are gone' '
- test_must_fail git cat-file -p 5ee1c35e83ea47cd3cc4f8cbee0568915fbbbd29 &&
- git cat-file -p 08341ad9e94faa089d60fd3f523affb25c6da189 &&
- git cat-file -p ab5f302035f2e7aaf04265f08b42034c23256e1f
+ test_must_fail git cat-file -p $third &&
+ git cat-file -p $second &&
+ git cat-file -p $first
'
test_expect_success 'verify that notes are still present' '
- git notes show 5ee1c35e83ea47cd3cc4f8cbee0568915fbbbd29 &&
- git notes show 08341ad9e94faa089d60fd3f523affb25c6da189 &&
- git notes show ab5f302035f2e7aaf04265f08b42034c23256e1f
+ git notes show $third &&
+ git notes show $second &&
+ git notes show $first
'
test_expect_success 'prune -n does not remove notes' '
@@ -90,13 +93,10 @@ test_expect_success 'prune -n does not remove notes' '
test_cmp expect actual
'
-cat > expect <<EOF
-5ee1c35e83ea47cd3cc4f8cbee0568915fbbbd29
-EOF
test_expect_success 'prune -n lists prunable notes' '
-
+ echo $third >expect &&
git notes prune -n > actual &&
test_cmp expect actual
'
@@ -109,9 +109,9 @@ test_expect_success 'prune notes' '
test_expect_success 'verify that notes are gone' '
- test_must_fail git notes show 5ee1c35e83ea47cd3cc4f8cbee0568915fbbbd29 &&
- git notes show 08341ad9e94faa089d60fd3f523affb25c6da189 &&
- git notes show ab5f302035f2e7aaf04265f08b42034c23256e1f
+ test_must_fail git notes show $third &&
+ git notes show $second &&
+ git notes show $first
'
test_expect_success 'remove some commits' '
@@ -121,21 +121,18 @@ test_expect_success 'remove some commits' '
git gc --prune=now
'
-cat > expect <<EOF
-08341ad9e94faa089d60fd3f523affb25c6da189
-EOF
-
test_expect_success 'prune -v notes' '
+ echo $second >expect &&
git notes prune -v > actual &&
test_cmp expect actual
'
test_expect_success 'verify that notes are gone' '
- test_must_fail git notes show 5ee1c35e83ea47cd3cc4f8cbee0568915fbbbd29 &&
- test_must_fail git notes show 08341ad9e94faa089d60fd3f523affb25c6da189 &&
- git notes show ab5f302035f2e7aaf04265f08b42034c23256e1f
+ test_must_fail git notes show $third &&
+ test_must_fail git notes show $second &&
+ git notes show $first
'
test_done
diff --git a/t/t3400-rebase.sh b/t/t3400-rebase.sh
index 80b23fd326..ab18ac5f28 100755
--- a/t/t3400-rebase.sh
+++ b/t/t3400-rebase.sh
@@ -295,12 +295,48 @@ test_expect_success 'rebase --am and --show-current-patch' '
echo two >>init.t &&
git commit -a -m two &&
git tag two &&
- test_must_fail git rebase --onto init HEAD^ &&
+ test_must_fail git rebase -f --onto init HEAD^ &&
GIT_TRACE=1 git rebase --show-current-patch >/dev/null 2>stderr &&
grep "show.*$(git rev-parse two)" stderr
)
'
+test_expect_success 'rebase --am and .gitattributes' '
+ test_create_repo attributes &&
+ (
+ cd attributes &&
+ test_commit init &&
+ git config filter.test.clean "sed -e '\''s/smudged/clean/g'\''" &&
+ git config filter.test.smudge "sed -e '\''s/clean/smudged/g'\''" &&
+
+ test_commit second &&
+ git checkout -b test HEAD^ &&
+
+ echo "*.txt filter=test" >.gitattributes &&
+ git add .gitattributes &&
+ test_commit third &&
+
+ echo "This text is smudged." >a.txt &&
+ git add a.txt &&
+ test_commit fourth &&
+
+ git checkout -b removal HEAD^ &&
+ git rm .gitattributes &&
+ git add -u &&
+ test_commit fifth &&
+ git cherry-pick test &&
+
+ git checkout test &&
+ git rebase master &&
+ grep "smudged" a.txt &&
+
+ git checkout removal &&
+ git reset --hard &&
+ git rebase master &&
+ grep "clean" a.txt
+ )
+'
+
test_expect_success 'rebase--merge.sh and --show-current-patch' '
test_create_repo conflict-merge &&
(
diff --git a/t/t3404-rebase-interactive.sh b/t/t3404-rebase-interactive.sh
index 461dd539ff..d2dfbe46b9 100755
--- a/t/t3404-rebase-interactive.sh
+++ b/t/t3404-rebase-interactive.sh
@@ -29,9 +29,6 @@ Initial setup:
. "$TEST_DIRECTORY"/lib-rebase.sh
-# WARNING: Modifications to the initial repository can change the SHA ID used
-# in the expect2 file for the 'stop on conflicting pick' test.
-
test_expect_success 'setup' '
test_commit A file1 &&
test_commit B file1 &&
@@ -155,8 +152,6 @@ test_expect_success 'rebase -x with empty command fails' '
test_i18ncmp expected actual
'
-LF='
-'
test_expect_success 'rebase -x with newline in command fails' '
test_when_finished "git rebase --abort ||:" &&
test_must_fail env git rebase -x "a${LF}b" @ 2>actual &&
@@ -233,25 +228,28 @@ test_expect_success 'exchange two commits' '
set_fake_editor &&
FAKE_LINES="2 1" git rebase -i HEAD~2 &&
test H = $(git cat-file commit HEAD^ | sed -ne \$p) &&
- test G = $(git cat-file commit HEAD | sed -ne \$p)
+ test G = $(git cat-file commit HEAD | sed -ne \$p) &&
+ blob1=$(git rev-parse --short HEAD^:file1) &&
+ blob2=$(git rev-parse --short HEAD:file1) &&
+ commit=$(git rev-parse --short HEAD)
'
test_expect_success 'stop on conflicting pick' '
- cat >expect <<-\EOF &&
+ cat >expect <<-EOF &&
diff --git a/file1 b/file1
- index f70f10e..fd79235 100644
+ index $blob1..$blob2 100644
--- a/file1
+++ b/file1
@@ -1 +1 @@
-A
+G
EOF
- cat >expect2 <<-\EOF &&
+ cat >expect2 <<-EOF &&
<<<<<<< HEAD
D
=======
G
- >>>>>>> 5d18e54... G
+ >>>>>>> $commit... G
EOF
git tag new-branch1 &&
set_fake_editor &&
@@ -1003,7 +1001,7 @@ test_expect_success 'rebase -i --root temporary sentinel commit' '
git checkout B &&
set_fake_editor &&
test_must_fail env FAKE_LINES="2" git rebase -i --root &&
- git cat-file commit HEAD | grep "^tree 4b825dc642cb" &&
+ git cat-file commit HEAD | grep "^tree $EMPTY_TREE" &&
git rebase --abort
'
@@ -1016,9 +1014,9 @@ test_expect_success 'rebase -i --root fixup root commit' '
test 0 = $(git cat-file commit HEAD | grep -c ^parent\ )
'
-test_expect_success 'rebase -i --root reword root commit' '
+test_expect_success 'rebase -i --root reword original root commit' '
test_when_finished "test_might_fail git rebase --abort" &&
- git checkout -b reword-root-branch master &&
+ git checkout -b reword-original-root-branch master &&
set_fake_editor &&
FAKE_LINES="reword 1 2" FAKE_COMMIT_MESSAGE="A changed" \
git rebase -i --root &&
@@ -1026,6 +1024,16 @@ test_expect_success 'rebase -i --root reword root commit' '
test -z "$(git show -s --format=%p HEAD^)"
'
+test_expect_success 'rebase -i --root reword new root commit' '
+ test_when_finished "test_might_fail git rebase --abort" &&
+ git checkout -b reword-now-root-branch master &&
+ set_fake_editor &&
+ FAKE_LINES="reword 3 1" FAKE_COMMIT_MESSAGE="C changed" \
+ git rebase -i --root &&
+ git show HEAD^ | grep "C changed" &&
+ test -z "$(git show -s --format=%p HEAD^)"
+'
+
test_expect_success 'rebase -i --root when root has untracked file conflict' '
test_when_finished "reset_rebase" &&
git checkout -b failing-root-pick A &&
@@ -1054,11 +1062,11 @@ test_expect_success 'rebase -i --root reword root when root has untracked file c
'
test_expect_success C_LOCALE_OUTPUT 'rebase --edit-todo does not work on non-interactive rebase' '
- git checkout reword-root-branch &&
+ git checkout reword-original-root-branch &&
git reset --hard &&
git checkout conflict-branch &&
set_fake_editor &&
- test_must_fail git rebase --onto HEAD~2 HEAD~ &&
+ test_must_fail git rebase -f --onto HEAD~2 HEAD~ &&
test_must_fail git rebase --edit-todo &&
git rebase --abort
'
@@ -1161,7 +1169,7 @@ test_expect_success 'rebase -i error on commits with \ in message' '
test_expect_code 1 grep " emp" error
'
-test_expect_success 'short SHA-1 setup' '
+test_expect_success SHA1 'short SHA-1 setup' '
test_when_finished "git checkout master" &&
git checkout --orphan collide &&
git rm -rf . &&
@@ -1173,7 +1181,7 @@ test_expect_success 'short SHA-1 setup' '
)
'
-test_expect_success 'short SHA-1 collide' '
+test_expect_success SHA1 'short SHA-1 collide' '
test_when_finished "reset_rebase && git checkout master" &&
git checkout collide &&
(
@@ -1419,7 +1427,6 @@ test_expect_success 'editor saves as CR/LF' '
)
'
-SQ="'"
test_expect_success 'rebase -i --gpg-sign=<key-id>' '
test_when_finished "test_might_fail git rebase --abort" &&
set_fake_editor &&
diff --git a/t/t3416-rebase-onto-threedots.sh b/t/t3416-rebase-onto-threedots.sh
index ddf2f64853..9c2548423b 100755
--- a/t/t3416-rebase-onto-threedots.sh
+++ b/t/t3416-rebase-onto-threedots.sh
@@ -99,7 +99,64 @@ test_expect_success 'rebase -i --onto master...side' '
git checkout side &&
git reset --hard K &&
+ set_fake_editor &&
test_must_fail git rebase -i --onto master...side J
'
+test_expect_success 'rebase --keep-base --onto incompatible' '
+ test_must_fail git rebase --keep-base --onto master...
+'
+
+test_expect_success 'rebase --keep-base --root incompatible' '
+ test_must_fail git rebase --keep-base --root
+'
+
+test_expect_success 'rebase --keep-base master from topic' '
+ git reset --hard &&
+ git checkout topic &&
+ git reset --hard G &&
+
+ git rebase --keep-base master &&
+ git rev-parse C >base.expect &&
+ git merge-base master HEAD >base.actual &&
+ test_cmp base.expect base.actual &&
+
+ git rev-parse HEAD~2 >actual &&
+ git rev-parse C^0 >expect &&
+ test_cmp expect actual
+'
+
+test_expect_success 'rebase --keep-base master from side' '
+ git reset --hard &&
+ git checkout side &&
+ git reset --hard K &&
+
+ test_must_fail git rebase --keep-base master
+'
+
+test_expect_success 'rebase -i --keep-base master from topic' '
+ git reset --hard &&
+ git checkout topic &&
+ git reset --hard G &&
+
+ set_fake_editor &&
+ EXPECT_COUNT=2 git rebase -i --keep-base master &&
+ git rev-parse C >base.expect &&
+ git merge-base master HEAD >base.actual &&
+ test_cmp base.expect base.actual &&
+
+ git rev-parse HEAD~2 >actual &&
+ git rev-parse C^0 >expect &&
+ test_cmp expect actual
+'
+
+test_expect_success 'rebase -i --keep-base master from side' '
+ git reset --hard &&
+ git checkout side &&
+ git reset --hard K &&
+
+ set_fake_editor &&
+ test_must_fail git rebase -i --keep-base master
+'
+
test_done
diff --git a/t/t3418-rebase-continue.sh b/t/t3418-rebase-continue.sh
index 4eff14dae5..7a2da972fd 100755
--- a/t/t3418-rebase-continue.sh
+++ b/t/t3418-rebase-continue.sh
@@ -120,6 +120,20 @@ test_expect_success REBASE_P 'rebase passes merge strategy options correctly' '
git rebase --continue
'
+test_expect_success 'rebase -r passes merge strategy options correctly' '
+ rm -fr .git/rebase-* &&
+ git reset --hard commit-new-file-F3-on-topic-branch &&
+ test_commit merge-theirs &&
+ git reset --hard HEAD^ &&
+ test_commit some-other-commit &&
+ test_tick &&
+ git merge --no-ff merge-theirs &&
+ FAKE_LINES="1 3 edit 4 5 7 8 9" git rebase -i -f -r -m \
+ -s recursive --strategy-option=theirs HEAD~2 &&
+ test_commit force-change-ours &&
+ git rebase --continue
+'
+
test_expect_success '--skip after failed fixup cleans commit message' '
test_when_finished "test_might_fail git rebase --abort" &&
git checkout -b with-conflicting-fixup &&
diff --git a/t/t3420-rebase-autostash.sh b/t/t3420-rebase-autostash.sh
index b8f4d03467..5f7e73cf83 100755
--- a/t/t3420-rebase-autostash.sh
+++ b/t/t3420-rebase-autostash.sh
@@ -37,7 +37,6 @@ test_expect_success setup '
create_expected_success_am () {
cat >expected <<-EOF
$(grep "^Created autostash: [0-9a-f][0-9a-f]*\$" actual)
- HEAD is now at $(git rev-parse --short feature-branch) third commit
First, rewinding head to replay your work on top of it...
Applying: second commit
Applying: third commit
@@ -48,7 +47,6 @@ create_expected_success_am () {
create_expected_success_interactive () {
q_to_cr >expected <<-EOF
$(grep "^Created autostash: [0-9a-f][0-9a-f]*\$" actual)
- HEAD is now at $(git rev-parse --short feature-branch) third commit
Applied autostash.
Successfully rebased and updated refs/heads/rebased-feature-branch.
EOF
@@ -57,7 +55,6 @@ create_expected_success_interactive () {
create_expected_failure_am () {
cat >expected <<-EOF
$(grep "^Created autostash: [0-9a-f][0-9a-f]*\$" actual)
- HEAD is now at $(git rev-parse --short feature-branch) third commit
First, rewinding head to replay your work on top of it...
Applying: second commit
Applying: third commit
@@ -70,7 +67,6 @@ create_expected_failure_am () {
create_expected_failure_interactive () {
cat >expected <<-EOF
$(grep "^Created autostash: [0-9a-f][0-9a-f]*\$" actual)
- HEAD is now at $(git rev-parse --short feature-branch) third commit
Applying autostash resulted in conflicts.
Your changes are safe in the stash.
You can run "git stash pop" or "git stash drop" at any time.
@@ -306,4 +302,12 @@ test_expect_success 'branch is left alone when possible' '
test unchanged-branch = "$(git rev-parse --abbrev-ref HEAD)"
'
+test_expect_success 'never change active branch' '
+ git checkout -b not-the-feature-branch unrelated-onto-branch &&
+ test_when_finished "git reset --hard && git checkout master" &&
+ echo changed >file0 &&
+ git rebase --autostash not-the-feature-branch feature-branch &&
+ test_cmp_rev not-the-feature-branch unrelated-onto-branch
+'
+
test_done
diff --git a/t/t3421-rebase-topology-linear.sh b/t/t3421-rebase-topology-linear.sh
index 7274dca40b..b847064f91 100755
--- a/t/t3421-rebase-topology-linear.sh
+++ b/t/t3421-rebase-topology-linear.sh
@@ -31,6 +31,16 @@ test_run_rebase success -m
test_run_rebase success -i
test_have_prereq !REBASE_P || test_run_rebase success -p
+test_expect_success 'setup branches and remote tracking' '
+ git tag -l >tags &&
+ for tag in $(cat tags)
+ do
+ git branch branch-$tag $tag || return 1
+ done &&
+ git remote add origin "file://$PWD" &&
+ git fetch origin
+'
+
test_run_rebase () {
result=$1
shift
@@ -57,6 +67,7 @@ test_run_rebase () {
"
}
test_run_rebase success ''
+test_run_rebase success --fork-point
test_run_rebase success -m
test_run_rebase success -i
test_have_prereq !REBASE_P || test_run_rebase failure -p
@@ -64,6 +75,23 @@ test_have_prereq !REBASE_P || test_run_rebase failure -p
test_run_rebase () {
result=$1
shift
+ test_expect_$result "rebase $* -f rewrites even if remote upstream is an ancestor" "
+ reset_rebase &&
+ git rebase $* -f branch-b branch-e &&
+ ! test_cmp_rev branch-e origin/branch-e &&
+ test_cmp_rev branch-b HEAD~2 &&
+ test_linear_range 'd e' branch-b..
+ "
+}
+test_run_rebase success ''
+test_run_rebase success --fork-point
+test_run_rebase success -m
+test_run_rebase success -i
+test_have_prereq !REBASE_P || test_run_rebase success -p
+
+test_run_rebase () {
+ result=$1
+ shift
test_expect_$result "rebase $* fast-forwards from ancestor of upstream" "
reset_rebase &&
git rebase $* e b &&
@@ -71,6 +99,7 @@ test_run_rebase () {
"
}
test_run_rebase success ''
+test_run_rebase success --fork-point
test_run_rebase success -m
test_run_rebase success -i
test_have_prereq !REBASE_P || test_run_rebase success -p
diff --git a/t/t3422-rebase-incompatible-options.sh b/t/t3422-rebase-incompatible-options.sh
index a5868ea152..50e7960702 100755
--- a/t/t3422-rebase-incompatible-options.sh
+++ b/t/t3422-rebase-incompatible-options.sh
@@ -76,14 +76,4 @@ test_expect_success REBASE_P \
test_must_fail git rebase --preserve-merges --rebase-merges A
'
-test_expect_success '--rebase-merges incompatible with --strategy' '
- git checkout B^0 &&
- test_must_fail git rebase --rebase-merges -s resolve A
-'
-
-test_expect_success '--rebase-merges incompatible with --strategy-option' '
- git checkout B^0 &&
- test_must_fail git rebase --rebase-merges -Xignore-space-change A
-'
-
test_done
diff --git a/t/t3427-rebase-subtree.sh b/t/t3427-rebase-subtree.sh
index d8640522a0..bec48e6a1f 100755
--- a/t/t3427-rebase-subtree.sh
+++ b/t/t3427-rebase-subtree.sh
@@ -11,113 +11,99 @@ commit_message() {
git log --pretty=format:%s -1 "$1"
}
+# There are a few bugs in the rebase with regards to the subtree strategy, and
+# this test script tries to document them. First, the following commit history
+# is generated (the onelines are shown, time flows from left to right):
+#
+# master1 - master2 - master3
+# \
+# README ---------------------- Add subproject master - master4 - files_subtree/master5
+#
+# Where the merge moves the files master[123].t into the subdirectory
+# files_subtree/ and master4 as well as files_subtree/master5 add files to that
+# directory directly.
+#
+# Then, in subsequent test cases, `git filter-branch` is used to distill just
+# the commits that touch files_subtree/. To give it a final pre-rebase touch,
+# an empty commit is added on top. The pre-rebase commit history looks like
+# this:
+#
+# Add subproject master - master4 - files_subtree/master5 - Empty commit
+#
+# where the root commit adds three files: master1.t, master2.t and master3.t.
+#
+# This commit history is then rebased onto `master3` with the
+# `-Xsubtree=files_subtree` option in three different ways:
+#
+# 1. using `--preserve-merges`
+# 2. using `--preserve-merges` and --keep-empty
+# 3. without specifying a rebase backend
+
test_expect_success 'setup' '
test_commit README &&
- mkdir files &&
- (
- cd files &&
- git init &&
- test_commit master1 &&
- test_commit master2 &&
- test_commit master3
- ) &&
- git fetch files master &&
- git branch files-master FETCH_HEAD &&
- git read-tree --prefix=files_subtree files-master &&
- git checkout -- files_subtree &&
- tree=$(git write-tree) &&
- head=$(git rev-parse HEAD) &&
- rev=$(git rev-parse --verify files-master^0) &&
- commit=$(git commit-tree -p $head -p $rev -m "Add subproject master" $tree) &&
- git update-ref HEAD $commit &&
- (
- cd files_subtree &&
- test_commit master4
- ) &&
- test_commit files_subtree/master5
-'
-# FAILURE: Does not preserve master4.
-test_expect_failure REBASE_P \
- 'Rebase -Xsubtree --preserve-merges --onto commit 4' '
- reset_rebase &&
- git checkout -b rebase-preserve-merges-4 master &&
- git filter-branch --prune-empty -f --subdirectory-filter files_subtree &&
- git commit -m "Empty commit" --allow-empty &&
- git rebase -Xsubtree=files_subtree --preserve-merges --onto files-master master &&
- verbose test "$(commit_message HEAD~)" = "files_subtree/master4"
+ git init files &&
+ test_commit -C files master1 &&
+ test_commit -C files master2 &&
+ test_commit -C files master3 &&
+
+ : perform subtree merge into files_subtree/ &&
+ git fetch files refs/heads/master:refs/heads/files-master &&
+ git merge -s ours --no-commit --allow-unrelated-histories \
+ files-master &&
+ git read-tree --prefix=files_subtree -u files-master &&
+ git commit -m "Add subproject master" &&
+
+ : add two extra commits to rebase &&
+ test_commit -C files_subtree master4 &&
+ test_commit files_subtree/master5 &&
+
+ git checkout -b to-rebase &&
+ git fast-export --no-data HEAD -- files_subtree/ |
+ sed -e "s%\([0-9a-f]\{40\} \)files_subtree/%\1%" |
+ git fast-import --force --quiet &&
+ git reset --hard &&
+ git commit -m "Empty commit" --allow-empty
'
-# FAILURE: Does not preserve master5.
-test_expect_failure REBASE_P \
- 'Rebase -Xsubtree --preserve-merges --onto commit 5' '
+# FAILURE: Does not preserve master4.
+test_expect_failure REBASE_P 'Rebase -Xsubtree --preserve-merges --onto commit' '
reset_rebase &&
- git checkout -b rebase-preserve-merges-5 master &&
- git filter-branch --prune-empty -f --subdirectory-filter files_subtree &&
- git commit -m "Empty commit" --allow-empty &&
+ git checkout -b rebase-preserve-merges to-rebase &&
git rebase -Xsubtree=files_subtree --preserve-merges --onto files-master master &&
+ verbose test "$(commit_message HEAD~)" = "master4" &&
verbose test "$(commit_message HEAD)" = "files_subtree/master5"
'
# FAILURE: Does not preserve master4.
-test_expect_failure REBASE_P \
- 'Rebase -Xsubtree --keep-empty --preserve-merges --onto commit 4' '
- reset_rebase &&
- git checkout -b rebase-keep-empty-4 master &&
- git filter-branch --prune-empty -f --subdirectory-filter files_subtree &&
- git commit -m "Empty commit" --allow-empty &&
- git rebase -Xsubtree=files_subtree --keep-empty --preserve-merges --onto files-master master &&
- verbose test "$(commit_message HEAD~2)" = "files_subtree/master4"
-'
-
-# FAILURE: Does not preserve master5.
-test_expect_failure REBASE_P \
- 'Rebase -Xsubtree --keep-empty --preserve-merges --onto commit 5' '
- reset_rebase &&
- git checkout -b rebase-keep-empty-5 master &&
- git filter-branch --prune-empty -f --subdirectory-filter files_subtree &&
- git commit -m "Empty commit" --allow-empty &&
- git rebase -Xsubtree=files_subtree --keep-empty --preserve-merges --onto files-master master &&
- verbose test "$(commit_message HEAD~)" = "files_subtree/master5"
-'
-
-# FAILURE: Does not preserve Empty.
-test_expect_failure REBASE_P \
- 'Rebase -Xsubtree --keep-empty --preserve-merges --onto empty commit' '
+test_expect_failure REBASE_P 'Rebase -Xsubtree --keep-empty --preserve-merges --onto commit' '
reset_rebase &&
- git checkout -b rebase-keep-empty-empty master &&
- git filter-branch --prune-empty -f --subdirectory-filter files_subtree &&
- git commit -m "Empty commit" --allow-empty &&
+ git checkout -b rebase-keep-empty to-rebase &&
git rebase -Xsubtree=files_subtree --keep-empty --preserve-merges --onto files-master master &&
+ verbose test "$(commit_message HEAD~2)" = "master4" &&
+ verbose test "$(commit_message HEAD~)" = "files_subtree/master5" &&
verbose test "$(commit_message HEAD)" = "Empty commit"
'
-# FAILURE: fatal: Could not parse object
-test_expect_failure 'Rebase -Xsubtree --onto commit 4' '
+test_expect_success 'Rebase -Xsubtree --keep-empty --onto commit' '
reset_rebase &&
- git checkout -b rebase-onto-4 master &&
- git filter-branch --prune-empty -f --subdirectory-filter files_subtree &&
- git commit -m "Empty commit" --allow-empty &&
- git rebase -Xsubtree=files_subtree --onto files-master master &&
- verbose test "$(commit_message HEAD~2)" = "files_subtree/master4"
+ git checkout -b rebase-onto to-rebase &&
+ test_must_fail git rebase -Xsubtree=files_subtree --keep-empty --onto files-master master &&
+ : first pick results in no changes &&
+ git rebase --continue &&
+ verbose test "$(commit_message HEAD~2)" = "master4" &&
+ verbose test "$(commit_message HEAD~)" = "files_subtree/master5" &&
+ verbose test "$(commit_message HEAD)" = "Empty commit"
'
-# FAILURE: fatal: Could not parse object
-test_expect_failure 'Rebase -Xsubtree --onto commit 5' '
- reset_rebase &&
- git checkout -b rebase-onto-5 master &&
- git filter-branch --prune-empty -f --subdirectory-filter files_subtree &&
- git commit -m "Empty commit" --allow-empty &&
- git rebase -Xsubtree=files_subtree --onto files-master master &&
- verbose test "$(commit_message HEAD~)" = "files_subtree/master5"
-'
-# FAILURE: fatal: Could not parse object
-test_expect_failure 'Rebase -Xsubtree --onto empty commit' '
+test_expect_success 'Rebase -Xsubtree --keep-empty --rebase-merges --onto commit' '
reset_rebase &&
- git checkout -b rebase-onto-empty master &&
- git filter-branch --prune-empty -f --subdirectory-filter files_subtree &&
- git commit -m "Empty commit" --allow-empty &&
- git rebase -Xsubtree=files_subtree --onto files-master master &&
+ git checkout -b rebase-merges-onto to-rebase &&
+ test_must_fail git rebase -Xsubtree=files_subtree --keep-empty --rebase-merges --onto files-master --root &&
+ : first pick results in no changes &&
+ git rebase --continue &&
+ verbose test "$(commit_message HEAD~2)" = "master4" &&
+ verbose test "$(commit_message HEAD~)" = "files_subtree/master5" &&
verbose test "$(commit_message HEAD)" = "Empty commit"
'
diff --git a/t/t3429-rebase-edit-todo.sh b/t/t3429-rebase-edit-todo.sh
index 76f6d306ea..8739cb60a7 100755
--- a/t/t3429-rebase-edit-todo.sh
+++ b/t/t3429-rebase-edit-todo.sh
@@ -3,9 +3,15 @@
test_description='rebase should reread the todo file if an exec modifies it'
. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-rebase.sh
+
+test_expect_success 'setup' '
+ test_commit first file &&
+ test_commit second file &&
+ test_commit third file
+'
test_expect_success 'rebase exec modifies rebase-todo' '
- test_commit initial &&
todo=.git/rebase-merge/git-rebase-todo &&
git rebase HEAD -x "echo exec touch F >>$todo" &&
test -e F
@@ -33,4 +39,17 @@ test_expect_success SHA1 'loose object cache vs re-reading todo list' '
git rebase HEAD -x "./append-todo.sh 5 6"
'
+test_expect_success 'todo is re-read after reword and squash' '
+ write_script reword-editor.sh <<-\EOS &&
+ GIT_SEQUENCE_EDITOR="echo \"exec echo $(cat file) >>actual\" >>" \
+ git rebase --edit-todo
+ EOS
+
+ test_write_lines first third >expected &&
+ set_fake_editor &&
+ GIT_SEQUENCE_EDITOR="$EDITOR" FAKE_LINES="reword 1 squash 2 fixup 3" \
+ GIT_EDITOR=./reword-editor.sh git rebase -i --root third &&
+ test_cmp expected actual
+'
+
test_done
diff --git a/t/t3430-rebase-merges.sh b/t/t3430-rebase-merges.sh
index 7b6c4847ad..9efcf4808a 100755
--- a/t/t3430-rebase-merges.sh
+++ b/t/t3430-rebase-merges.sh
@@ -37,20 +37,27 @@ test_expect_success 'setup' '
test_commit A &&
git checkout -b first &&
test_commit B &&
+ b=$(git rev-parse --short HEAD) &&
git checkout master &&
test_commit C &&
+ c=$(git rev-parse --short HEAD) &&
test_commit D &&
+ d=$(git rev-parse --short HEAD) &&
git merge --no-commit B &&
test_tick &&
git commit -m E &&
git tag -m E E &&
+ e=$(git rev-parse --short HEAD) &&
git checkout -b second C &&
test_commit F &&
+ f=$(git rev-parse --short HEAD) &&
test_commit G &&
+ g=$(git rev-parse --short HEAD) &&
git checkout master &&
git merge --no-commit G &&
test_tick &&
git commit -m H &&
+ h=$(git rev-parse --short HEAD) &&
git tag -m H H &&
git checkout A &&
test_commit conflicting-G G.t
@@ -93,24 +100,24 @@ test_expect_success 'create completely different structure' '
'
test_expect_success 'generate correct todo list' '
- cat >expect <<-\EOF &&
+ cat >expect <<-EOF &&
label onto
reset onto
- pick d9df450 B
+ pick $b B
label E
reset onto
- pick 5dee784 C
+ pick $c C
label branch-point
- pick ca2c861 F
- pick 088b00a G
+ pick $f F
+ pick $g G
label H
reset branch-point # C
- pick 12bd07b D
- merge -C 2051b56 E # E
- merge -C 233d48a H # H
+ pick $d D
+ merge -C $e E # E
+ merge -C $h H # H
EOF
@@ -151,7 +158,6 @@ test_expect_success 'failed `merge -C` writes patch (may be rescheduled, too)' '
test_path_is_file .git/rebase-merge/patch
'
-SQ="'"
test_expect_success 'failed `merge <branch>` does not crash' '
test_when_finished "test_might_fail git rebase --abort" &&
git checkout conflicting-G &&
@@ -441,4 +447,25 @@ test_expect_success '--continue after resolving conflicts after a merge' '
test_path_is_missing .git/MERGE_HEAD
'
+test_expect_success '--rebase-merges with strategies' '
+ git checkout -b with-a-strategy F &&
+ test_tick &&
+ git merge -m "Merge conflicting-G" conflicting-G &&
+
+ : first, test with a merge strategy option &&
+ git rebase -ir -Xtheirs G &&
+ echo conflicting-G >expect &&
+ test_cmp expect G.t &&
+
+ : now, try with a merge strategy other than recursive &&
+ git reset --hard @{1} &&
+ write_script git-merge-override <<-\EOF &&
+ echo overridden$1 >>G.t
+ git add G.t
+ EOF
+ PATH="$PWD:$PATH" git rebase -ir -s override -Xxopt G &&
+ test_write_lines G overridden--xopt >expect &&
+ test_cmp expect G.t
+'
+
test_done
diff --git a/t/t3431-rebase-fork-point.sh b/t/t3431-rebase-fork-point.sh
new file mode 100755
index 0000000000..78851b9a2a
--- /dev/null
+++ b/t/t3431-rebase-fork-point.sh
@@ -0,0 +1,57 @@
+#!/bin/sh
+#
+# Copyright (c) 2019 Denton Liu
+#
+
+test_description='git rebase --fork-point test'
+
+. ./test-lib.sh
+
+# A---B---D---E (master)
+# \
+# C*---F---G (side)
+#
+# C was formerly part of master but master was rewound to remove C
+#
+test_expect_success setup '
+ test_commit A &&
+ test_commit B &&
+ test_commit C &&
+ git branch -t side &&
+ git reset --hard HEAD^ &&
+ test_commit D &&
+ test_commit E &&
+ git checkout side &&
+ test_commit F &&
+ test_commit G
+'
+
+test_rebase () {
+ expected="$1" &&
+ shift &&
+ test_expect_success "git rebase $*" "
+ git checkout master &&
+ git reset --hard E &&
+ git checkout side &&
+ git reset --hard G &&
+ git rebase $* &&
+ test_write_lines $expected >expect &&
+ git log --pretty=%s >actual &&
+ test_cmp expect actual
+ "
+}
+
+test_rebase 'G F E D B A'
+test_rebase 'G F D B A' --onto D
+test_rebase 'G F B A' --keep-base
+test_rebase 'G F C E D B A' --no-fork-point
+test_rebase 'G F C D B A' --no-fork-point --onto D
+test_rebase 'G F C B A' --no-fork-point --keep-base
+test_rebase 'G F E D B A' --fork-point refs/heads/master
+test_rebase 'G F D B A' --fork-point --onto D refs/heads/master
+test_rebase 'G F B A' --fork-point --keep-base refs/heads/master
+test_rebase 'G F C E D B A' refs/heads/master
+test_rebase 'G F C D B A' --onto D refs/heads/master
+test_rebase 'G F C B A' --keep-base refs/heads/master
+
+test_done
diff --git a/t/t3432-rebase-fast-forward.sh b/t/t3432-rebase-fast-forward.sh
new file mode 100755
index 0000000000..034ffc7e76
--- /dev/null
+++ b/t/t3432-rebase-fast-forward.sh
@@ -0,0 +1,125 @@
+#!/bin/sh
+#
+# Copyright (c) 2019 Denton Liu
+#
+
+test_description='ensure rebase fast-forwards commits when possible'
+
+. ./test-lib.sh
+
+test_expect_success setup '
+ test_commit A &&
+ test_commit B &&
+ test_commit C &&
+ test_commit D &&
+ git checkout -t -b side
+'
+
+test_rebase_same_head () {
+ status_n="$1" &&
+ shift &&
+ what_n="$1" &&
+ shift &&
+ cmp_n="$1" &&
+ shift &&
+ status_f="$1" &&
+ shift &&
+ what_f="$1" &&
+ shift &&
+ cmp_f="$1" &&
+ shift &&
+ test_rebase_same_head_ $status_n $what_n $cmp_n "" "$*" &&
+ test_rebase_same_head_ $status_f $what_f $cmp_f " --no-ff" "$*"
+}
+
+test_rebase_same_head_ () {
+ status="$1" &&
+ shift &&
+ what="$1" &&
+ shift &&
+ cmp="$1" &&
+ shift &&
+ flag="$1"
+ shift &&
+ test_expect_$status "git rebase$flag $* with $changes is $what with $cmp HEAD" "
+ oldhead=\$(git rev-parse HEAD) &&
+ test_when_finished 'git reset --hard \$oldhead' &&
+ git rebase$flag $* >stdout &&
+ if test $what = work
+ then
+ # Must check this case first, for 'is up to
+ # date, rebase forced[...]rewinding head' cases
+ test_i18ngrep 'rewinding head' stdout
+ elif test $what = noop
+ then
+ test_i18ngrep 'is up to date' stdout &&
+ test_i18ngrep ! 'rebase forced' stdout
+ elif test $what = noop-force
+ then
+ test_i18ngrep 'is up to date, rebase forced' stdout
+ fi &&
+ newhead=\$(git rev-parse HEAD) &&
+ if test $cmp = same
+ then
+ test_cmp_rev \$oldhead \$newhead
+ elif test $cmp = diff
+ then
+ ! test_cmp_rev \$oldhead \$newhead
+ fi
+ "
+}
+
+changes='no changes'
+test_rebase_same_head success noop same success work same
+test_rebase_same_head success noop same success noop-force same master
+test_rebase_same_head success noop same success noop-force diff --onto B B
+test_rebase_same_head success noop same success noop-force diff --onto B... B
+test_rebase_same_head success noop same success noop-force same --onto master... master
+test_rebase_same_head success noop same success noop-force same --keep-base master
+test_rebase_same_head success noop same success noop-force same --keep-base
+test_rebase_same_head success noop same success noop-force same --no-fork-point
+test_rebase_same_head success noop same success noop-force same --keep-base --no-fork-point
+test_rebase_same_head success noop same success work same --fork-point master
+test_rebase_same_head success noop same success work diff --fork-point --onto B B
+test_rebase_same_head success noop same success work diff --fork-point --onto B... B
+test_rebase_same_head success noop same success work same --fork-point --onto master... master
+test_rebase_same_head success noop same success work same --keep-base --keep-base master
+
+test_expect_success 'add work same to side' '
+ test_commit E
+'
+
+changes='our changes'
+test_rebase_same_head success noop same success work same
+test_rebase_same_head success noop same success noop-force same master
+test_rebase_same_head success noop same success noop-force diff --onto B B
+test_rebase_same_head success noop same success noop-force diff --onto B... B
+test_rebase_same_head success noop same success noop-force same --onto master... master
+test_rebase_same_head success noop same success noop-force same --keep-base master
+test_rebase_same_head success noop same success noop-force same --keep-base
+test_rebase_same_head success noop same success noop-force same --no-fork-point
+test_rebase_same_head success noop same success noop-force same --keep-base --no-fork-point
+test_rebase_same_head success noop same success work same --fork-point master
+test_rebase_same_head success noop same success work diff --fork-point --onto B B
+test_rebase_same_head success noop same success work diff --fork-point --onto B... B
+test_rebase_same_head success noop same success work same --fork-point --onto master... master
+test_rebase_same_head success noop same success work same --fork-point --keep-base master
+
+test_expect_success 'add work same to upstream' '
+ git checkout master &&
+ test_commit F &&
+ git checkout side
+'
+
+changes='our and their changes'
+test_rebase_same_head success noop same success noop-force diff --onto B B
+test_rebase_same_head success noop same success noop-force diff --onto B... B
+test_rebase_same_head success noop same success work diff --onto master... master
+test_rebase_same_head success noop same success work diff --keep-base master
+test_rebase_same_head success noop same success work diff --keep-base
+test_rebase_same_head failure work same success work diff --fork-point --onto B B
+test_rebase_same_head failure work same success work diff --fork-point --onto B... B
+test_rebase_same_head success noop same success work diff --fork-point --onto master... master
+test_rebase_same_head success noop same success work diff --fork-point --keep-base master
+
+test_done
diff --git a/t/t3506-cherry-pick-ff.sh b/t/t3506-cherry-pick-ff.sh
index 127dd0082f..9d5adbc130 100755
--- a/t/t3506-cherry-pick-ff.sh
+++ b/t/t3506-cherry-pick-ff.sh
@@ -16,7 +16,11 @@ test_expect_success setup '
git add file1 &&
test_tick &&
git commit -m "second" &&
- git tag second
+ git tag second &&
+ test_oid_cache <<-EOF
+ cp_ff sha1:1df192cd8bc58a2b275d842cede4d221ad9000d1
+ cp_ff sha256:e70d6b7fc064bddb516b8d512c9057094b96ce6ff08e12080acc4fe7f1d60a1d
+ EOF
'
test_expect_success 'cherry-pick using --ff fast forwards' '
@@ -102,7 +106,7 @@ test_expect_success 'cherry pick a root commit with --ff' '
git add file2 &&
git commit --amend -m "file2" &&
git cherry-pick --ff first &&
- test "$(git rev-parse --verify HEAD)" = "1df192cd8bc58a2b275d842cede4d221ad9000d1"
+ test "$(git rev-parse --verify HEAD)" = "$(test_oid cp_ff)"
'
test_expect_success 'cherry-pick --ff on unborn branch' '
diff --git a/t/t3600-rm.sh b/t/t3600-rm.sh
index 66282a720e..8c8cca5bfb 100755
--- a/t/t3600-rm.sh
+++ b/t/t3600-rm.sh
@@ -240,12 +240,14 @@ test_expect_success 'refresh index before checking if it is up-to-date' '
'
test_expect_success 'choking "git rm" should not let it die with cruft' '
+ test_oid_init &&
git reset -q --hard &&
test_when_finished "rm -f .git/index.lock && git reset -q --hard" &&
i=0 &&
+ hash=$(test_oid deadbeef) &&
while test $i -lt 12000
do
- echo "100644 1234567890123456789012345678901234567890 0 some-file-$i"
+ echo "100644 $hash 0 some-file-$i"
i=$(( $i + 1 ))
done | git update-index --index-info &&
git rm -n "some-file-*" | : &&
diff --git a/t/t3701-add-interactive.sh b/t/t3701-add-interactive.sh
index 69991a3168..d50e165ca8 100755
--- a/t/t3701-add-interactive.sh
+++ b/t/t3701-add-interactive.sh
@@ -314,7 +314,7 @@ test_expect_success C_LOCALE_OUTPUT 'add first line works' '
git commit -am "clear local changes" &&
git apply patch &&
printf "%s\n" s y y | git add -p file 2>error |
- sed -n -e "s/^Stage this hunk[^@]*\(@@ .*\)/\1/" \
+ sed -n -e "s/^([1-2]\/[1-2]) Stage this hunk[^@]*\(@@ .*\)/\1/" \
-e "/^[-+@ \\\\]"/p >output &&
test_must_be_empty error &&
git diff --cached >diff &&
diff --git a/t/t3800-mktag.sh b/t/t3800-mktag.sh
index 8eb47942e2..64dcc5ec28 100755
--- a/t/t3800-mktag.sh
+++ b/t/t3800-mktag.sh
@@ -23,6 +23,7 @@ check_verify_failure () {
# first create a commit, so we have a valid object/type
# for the tag.
test_expect_success 'setup' '
+ test_oid_init &&
echo Hello >A &&
git update-index --add A &&
git commit -m "Initial commit" &&
@@ -69,28 +70,28 @@ check_verify_failure '"object" line SHA1 check' '^error: char7: .*SHA1 hash$'
# 4. type line label check
cat >tag.sig <<EOF
-object 779e9b33986b1c2670fff52c5067603117b3e895
+object $head
xxxx tag
tag mytag
tagger . <> 0 +0000
EOF
-check_verify_failure '"type" line label check' '^error: char47: .*"\\ntype "$'
+check_verify_failure '"type" line label check' '^error: char.*: .*"\\ntype "$'
############################################################
# 5. type line eol check
-echo "object 779e9b33986b1c2670fff52c5067603117b3e895" >tag.sig
+echo "object $head" >tag.sig
printf "type tagsssssssssssssssssssssssssssssss" >>tag.sig
-check_verify_failure '"type" line eol check' '^error: char48: .*"\\n"$'
+check_verify_failure '"type" line eol check' '^error: char.*: .*"\\n"$'
############################################################
# 6. tag line label check #1
cat >tag.sig <<EOF
-object 779e9b33986b1c2670fff52c5067603117b3e895
+object $head
type tag
xxx mytag
tagger . <> 0 +0000
@@ -98,37 +99,37 @@ tagger . <> 0 +0000
EOF
check_verify_failure '"tag" line label check #1' \
- '^error: char57: no "tag " found$'
+ '^error: char.*: no "tag " found$'
############################################################
# 7. tag line label check #2
cat >tag.sig <<EOF
-object 779e9b33986b1c2670fff52c5067603117b3e895
+object $head
type taggggggggggggggggggggggggggggggg
tag
EOF
check_verify_failure '"tag" line label check #2' \
- '^error: char87: no "tag " found$'
+ '^error: char.*: no "tag " found$'
############################################################
# 8. type line type-name length check
cat >tag.sig <<EOF
-object 779e9b33986b1c2670fff52c5067603117b3e895
+object $head
type taggggggggggggggggggggggggggggggg
tag mytag
EOF
check_verify_failure '"type" line type-name length check' \
- '^error: char53: type too long$'
+ '^error: char.*: type too long$'
############################################################
# 9. verify object (SHA1/type) check
cat >tag.sig <<EOF
-object 779e9b33986b1c2670fff52c5067603117b3e895
+object $(test_oid deadbeef)
type tagggg
tag mytag
tagger . <> 0 +0000
@@ -150,7 +151,7 @@ tagger . <> 0 +0000
EOF
check_verify_failure 'verify tag-name check' \
- '^error: char67: could not verify tag name$'
+ '^error: char.*: could not verify tag name$'
############################################################
# 11. tagger line label check #1
@@ -164,7 +165,7 @@ This is filler
EOF
check_verify_failure '"tagger" line label check #1' \
- '^error: char70: could not find "tagger "$'
+ '^error: char.*: could not find "tagger "$'
############################################################
# 12. tagger line label check #2
@@ -179,7 +180,7 @@ This is filler
EOF
check_verify_failure '"tagger" line label check #2' \
- '^error: char70: could not find "tagger "$'
+ '^error: char.*: could not find "tagger "$'
############################################################
# 13. disallow missing tag author name
@@ -194,7 +195,7 @@ This is filler
EOF
check_verify_failure 'disallow missing tag author name' \
- '^error: char77: missing tagger name$'
+ '^error: char.*: missing tagger name$'
############################################################
# 14. disallow missing tag author name
@@ -209,7 +210,7 @@ tagger T A Gger <
EOF
check_verify_failure 'disallow malformed tagger' \
- '^error: char77: malformed tagger field$'
+ '^error: char.*: malformed tagger field$'
############################################################
# 15. allow empty tag email
@@ -238,7 +239,7 @@ tagger T A Gger <tag ger@example.com> 0 +0000
EOF
check_verify_failure 'disallow spaces in tag email' \
- '^error: char77: malformed tagger field$'
+ '^error: char.*: malformed tagger field$'
############################################################
# 17. disallow missing tag timestamp
@@ -252,7 +253,7 @@ tagger T A Gger <tagger@example.com>__
EOF
check_verify_failure 'disallow missing tag timestamp' \
- '^error: char107: missing tag timestamp$'
+ '^error: char.*: missing tag timestamp$'
############################################################
# 18. detect invalid tag timestamp1
@@ -266,7 +267,7 @@ tagger T A Gger <tagger@example.com> Tue Mar 25 15:47:44 2008
EOF
check_verify_failure 'detect invalid tag timestamp1' \
- '^error: char107: missing tag timestamp$'
+ '^error: char.*: missing tag timestamp$'
############################################################
# 19. detect invalid tag timestamp2
@@ -280,7 +281,7 @@ tagger T A Gger <tagger@example.com> 2008-03-31T12:20:15-0500
EOF
check_verify_failure 'detect invalid tag timestamp2' \
- '^error: char111: malformed tag timestamp$'
+ '^error: char.*: malformed tag timestamp$'
############################################################
# 20. detect invalid tag timezone1
@@ -294,7 +295,7 @@ tagger T A Gger <tagger@example.com> 1206478233 GMT
EOF
check_verify_failure 'detect invalid tag timezone1' \
- '^error: char118: malformed tag timezone$'
+ '^error: char.*: malformed tag timezone$'
############################################################
# 21. detect invalid tag timezone2
@@ -308,7 +309,7 @@ tagger T A Gger <tagger@example.com> 1206478233 + 30
EOF
check_verify_failure 'detect invalid tag timezone2' \
- '^error: char118: malformed tag timezone$'
+ '^error: char.*: malformed tag timezone$'
############################################################
# 22. detect invalid tag timezone3
@@ -322,7 +323,7 @@ tagger T A Gger <tagger@example.com> 1206478233 -1430
EOF
check_verify_failure 'detect invalid tag timezone3' \
- '^error: char118: malformed tag timezone$'
+ '^error: char.*: malformed tag timezone$'
############################################################
# 23. detect invalid header entry
@@ -337,7 +338,7 @@ this line should not be here
EOF
check_verify_failure 'detect invalid header entry' \
- '^error: char124: trailing garbage in tag header$'
+ '^error: char.*: trailing garbage in tag header$'
############################################################
# 24. create valid tag
diff --git a/t/t3903-stash.sh b/t/t3903-stash.sh
index b8e337893f..580bfbdc23 100755
--- a/t/t3903-stash.sh
+++ b/t/t3903-stash.sh
@@ -7,6 +7,18 @@ test_description='Test git stash'
. ./test-lib.sh
+diff_cmp () {
+ for i in "$1" "$2"
+ do
+ sed -e 's/^index 0000000\.\.[0-9a-f]*/index 0000000..1234567/' \
+ -e 's/^index [0-9a-f]*\.\.[0-9a-f]*/index 1234567..89abcde/' \
+ -e 's/^index [0-9a-f]*,[0-9a-f]*\.\.[0-9a-f]*/index 1234567,7654321..89abcde/' \
+ "$i" >"$i.compare" || return 1
+ done &&
+ test_cmp "$1.compare" "$2.compare" &&
+ rm -f "$1.compare" "$2.compare"
+}
+
test_expect_success 'stash some dirty working directory' '
echo 1 >file &&
git add file &&
@@ -36,7 +48,7 @@ EOF
test_expect_success 'parents of stash' '
test $(git rev-parse stash^) = $(git rev-parse HEAD) &&
git diff stash^2..stash >output &&
- test_cmp expect output
+ diff_cmp expect output
'
test_expect_success 'applying bogus stash does nothing' '
@@ -210,13 +222,13 @@ test_expect_success 'stash branch' '
test refs/heads/stashbranch = $(git symbolic-ref HEAD) &&
test $(git rev-parse HEAD) = $(git rev-parse master^) &&
git diff --cached >output &&
- test_cmp expect output &&
+ diff_cmp expect output &&
git diff >output &&
- test_cmp expect1 output &&
+ diff_cmp expect1 output &&
git add file &&
git commit -m alternate\ second &&
git diff master..stashbranch >output &&
- test_cmp output expect2 &&
+ diff_cmp output expect2 &&
test 0 = $(git stash list | wc -l)
'
@@ -577,7 +589,7 @@ test_expect_success 'stash show -p - stashes on stack, stash-like argument' '
+bar
EOF
git stash show -p ${STASH_ID} >actual &&
- test_cmp expected actual
+ diff_cmp expected actual
'
test_expect_success 'stash show - no stashes on stack, stash-like argument' '
@@ -609,7 +621,7 @@ test_expect_success 'stash show -p - no stashes on stack, stash-like argument' '
+foo
EOF
git stash show -p ${STASH_ID} >actual &&
- test_cmp expected actual
+ diff_cmp expected actual
'
test_expect_success 'stash show --patience shows diff' '
@@ -627,7 +639,7 @@ test_expect_success 'stash show --patience shows diff' '
+foo
EOF
git stash show --patience ${STASH_ID} >actual &&
- test_cmp expected actual
+ diff_cmp expected actual
'
test_expect_success 'drop: fail early if specified stash is not a stash ref' '
@@ -791,7 +803,7 @@ test_expect_success 'stash where working directory contains "HEAD" file' '
git diff-index --cached --quiet HEAD &&
test "$(git rev-parse stash^)" = "$(git rev-parse HEAD)" &&
git diff stash^..stash >output &&
- test_cmp expect output
+ diff_cmp expect output
'
test_expect_success 'store called with invalid commit' '
@@ -847,7 +859,7 @@ test_expect_success 'stash list implies --first-parent -m' '
+working
EOF
git stash list --format=%gd -p >actual &&
- test_cmp expect actual
+ diff_cmp expect actual
'
test_expect_success 'stash list --cc shows combined diff' '
@@ -864,7 +876,7 @@ test_expect_success 'stash list --cc shows combined diff' '
++working
EOF
git stash list --format=%gd -p --cc >actual &&
- test_cmp expect actual
+ diff_cmp expect actual
'
test_expect_success 'stash is not confused by partial renames' '
@@ -1241,4 +1253,20 @@ test_expect_success 'stash --keep-index with file deleted in index does not resu
test_path_is_missing to-remove
'
+test_expect_success 'stash apply should succeed with unmodified file' '
+ echo base >file &&
+ git add file &&
+ git commit -m base &&
+
+ # now stash a modification
+ echo modified >file &&
+ git stash &&
+
+ # make the file stat dirty
+ cp file other &&
+ mv other file &&
+
+ git stash apply
+'
+
test_done
diff --git a/t/t3908-stash-in-worktree.sh b/t/t3908-stash-in-worktree.sh
new file mode 100755
index 0000000000..2b2b366ef9
--- /dev/null
+++ b/t/t3908-stash-in-worktree.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+#
+# Copyright (c) 2019 Johannes E Schindelin
+#
+
+test_description='Test git stash in a worktree'
+
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ test_commit initial &&
+ git worktree add wt &&
+ test_commit -C wt in-worktree
+'
+
+test_expect_success 'apply in subdirectory' '
+ mkdir wt/subdir &&
+ (
+ cd wt/subdir &&
+ echo modified >../initial.t &&
+ git stash &&
+ git stash apply >out
+ ) &&
+ grep "\.\.\/initial\.t" wt/subdir/out
+'
+
+test_done
diff --git a/t/t4000-diff-format.sh b/t/t4000-diff-format.sh
index 8de36b7d12..e5116a76a1 100755
--- a/t/t4000-diff-format.sh
+++ b/t/t4000-diff-format.sh
@@ -78,7 +78,7 @@ test_expect_success 'git diff-files --no-patch --patch shows the patch' '
test_expect_success 'git diff-files --no-patch --patch-with-raw shows the patch and raw data' '
git diff-files --no-patch --patch-with-raw >actual &&
- grep -q "^:100644 100755 .* 0000000000000000000000000000000000000000 M path0\$" actual &&
+ grep -q "^:100644 100755 .* $ZERO_OID M path0\$" actual &&
tail -n +4 actual >actual-patch &&
compare_diff_patch expected actual-patch
'
diff --git a/t/t4002-diff-basic.sh b/t/t4002-diff-basic.sh
index 3a6c21e825..cbcdd10464 100755
--- a/t/t4002-diff-basic.sh
+++ b/t/t4002-diff-basic.sh
@@ -7,123 +7,272 @@ test_description='Test diff raw-output.
'
. ./test-lib.sh
+
. "$TEST_DIRECTORY"/lib-read-tree-m-3way.sh
-cat >.test-plain-OA <<\EOF
-:000000 100644 0000000000000000000000000000000000000000 ccba72ad3888a3520b39efcf780b9ee64167535d A AA
-:000000 100644 0000000000000000000000000000000000000000 7e426fb079479fd67f6d81f984e4ec649a44bc25 A AN
-:100644 000000 bcc68ef997017466d5c9094bcf7692295f588c9a 0000000000000000000000000000000000000000 D DD
-:000000 040000 0000000000000000000000000000000000000000 6d50f65d3bdab91c63444294d38f08aeff328e42 A DF
-:100644 000000 141c1f1642328e4bc46a7d801a71da392e66791e 0000000000000000000000000000000000000000 D DM
-:100644 000000 35abde1506ddf806572ff4d407bd06885d0f8ee9 0000000000000000000000000000000000000000 D DN
-:000000 100644 0000000000000000000000000000000000000000 1d41122ebdd7a640f29d3c9cc4f9d70094374762 A LL
-:100644 100644 03f24c8c4700babccfd28b654e7e8eac402ad6cd 103d9f89b50b9aad03054b579be5e7aa665f2d57 M MD
-:100644 100644 b258508afb7ceb449981bd9d63d2d3e971bf8d34 b431b272d829ff3aa4d1a5085f4394ab4d3305b6 M MM
-:100644 100644 bd084b0c27c7b6cc34f11d6d0509a29be3caf970 a716d58de4a570e0038f5c307bd8db34daea021f M MN
-:100644 100644 40c959f984c8b89a2b02520d17f00d717f024397 2ac547ae9614a00d1b28275de608131f7a0e259f M SS
-:100644 100644 4ac13458899ab908ef3b1128fa378daefc88d356 4c86f9a85fbc5e6804ee2e17a797538fbe785bca M TT
-:040000 040000 7d670fdcdb9929f6c7dac196ff78689cd1c566a1 5e5f22072bb39f6e12cf663a57cb634c76eefb49 M Z
+test_oid_init
+
+test_oid_cache <<\EOF
+aa_1 sha1:ccba72ad3888a3520b39efcf780b9ee64167535d
+aa_1 sha256:9febfbf18197819b2735c45291f138525d2476d59470f98239647544586ba403
+
+aa_2 sha1:6aa2b5335b16431a0ef71e5c0a28be69183cf6a2
+aa_2 sha256:6eaa3437de83f145a4aaa6ba355303075ade547b128ec6a2cd00a81ff7ce7a56
+
+an_1 sha1:7e426fb079479fd67f6d81f984e4ec649a44bc25
+an_1 sha256:8f92a0bec99e399a38e3bd0e1bf19fbf121e0160efb29b857df79d439f1c4536
+
+dd_1 sha1:bcc68ef997017466d5c9094bcf7692295f588c9a
+dd_1 sha256:07e17428b00639b85485d2b01083d219e2f3e3ba8579e9ca44e9cc8dd554d952
+
+df_1 sha1:6d50f65d3bdab91c63444294d38f08aeff328e42
+df_1 sha256:e367cecc27e9bf5451b1c65828cb21938d36a5f8e39c1b03ad6509cc36bb8e9d
+
+df_2 sha1:71420ab81e254145d26d6fc0cddee64c1acd4787
+df_2 sha256:0f0a86d10347ff6921d03a3c954679f3f1d14fa3d5cd82f57b32c09755f3a47d
+
+dfd1 sha1:68a6d8b91da11045cf4aa3a5ab9f2a781c701249
+dfd1 sha256:f3bd3265b02b6978ce86490d8ad026c573639c974b3de1d9faf30d8d5a77d3d5
+
+dm_1 sha1:141c1f1642328e4bc46a7d801a71da392e66791e
+dm_1 sha256:c89f8656e7b94e21ee5fbaf0e2149bbf783c51edbe2ce110349cac13059ee7ed
+
+dm_2 sha1:3c4d8de5fbad08572bab8e10eef8dbb264cf0231
+dm_2 sha256:83a572e37e0c94086294dae2cecc43d9131afd6f6c906e495c78972230b54988
+
+dn_1 sha1:35abde1506ddf806572ff4d407bd06885d0f8ee9
+dn_1 sha256:775d5852582070e620be63327bfa515fab8f71c7ac3e4f0c3cd6267b4377ba28
+
+ll_2 sha1:1d41122ebdd7a640f29d3c9cc4f9d70094374762
+ll_2 sha256:7917b4948a883cfed0a77d3d5a625dc8577d6ddcc3c6c3bbc56c4d4226a2246d
+
+md_1 sha1:03f24c8c4700babccfd28b654e7e8eac402ad6cd
+md_1 sha256:fc9f30369b978595ad685ba11ca9a17de0af16d79cd4b629975f4f1590033902
+
+md_2 sha1:103d9f89b50b9aad03054b579be5e7aa665f2d57
+md_2 sha256:fc78ec75275628762fe520479a6b2398dec295ce7aabcb1d15e5963c7b4e9317
+
+mm_1 sha1:b258508afb7ceb449981bd9d63d2d3e971bf8d34
+mm_1 sha256:a4b7847d228e900e3000285e240c20fd96f9dd41ce1445305f6eada126d4a04a
+
+mm_2 sha1:b431b272d829ff3aa4d1a5085f4394ab4d3305b6
+mm_2 sha256:3f8b83ea36aacf689bcf1a1290a9a8ed341564d32682ea6f76fea9a979186782
+
+mm_3 sha1:19989d4559aae417fedee240ccf2ba315ea4dc2b
+mm_3 sha256:71b3bfc5747ac033fff9ea0ab39ee453a3af2969890e75d6ef547b87544e2681
+
+mn_1 sha1:bd084b0c27c7b6cc34f11d6d0509a29be3caf970
+mn_1 sha256:47a67450583d7a329eb01a7c4ba644945af72c0ed2c7c95eb5a00d6e46d4d483
+
+mn_2 sha1:a716d58de4a570e0038f5c307bd8db34daea021f
+mn_2 sha256:f95104c1ebe27acb84bac25a7be98c71f6b8d3054b21f357a5be0c524ad97e08
+
+nm_1 sha1:c8f25781e8f1792e3e40b74225e20553041b5226
+nm_1 sha256:09baddc7afaa62e62e152c23c9c3ab94bf15a3894031e227e9be7fe68e1f4e49
+
+nm_2 sha1:cdb9a8c3da571502ac30225e9c17beccb8387983
+nm_2 sha256:58b5227956ac2d2a08d0efa513c0ae37430948b16791ea3869a1308dbf05536d
+
+na_1 sha1:15885881ea69115351c09b38371f0348a3fb8c67
+na_1 sha256:18e4fdd1670cd7968ee23d35bfd29e5418d56fb190c840094c1c57ceee0aad8f
+
+nd_1 sha1:a4e179e4291e5536a5e1c82e091052772d2c5a93
+nd_1 sha256:07dac9b01d00956ea0c65bd993d7de4864aeef2ed3cbb1255d9f1d949fcd6df6
+
+ss_1 sha1:40c959f984c8b89a2b02520d17f00d717f024397
+ss_1 sha256:50fc1b5df74d9910db2f9270993484235f15b69b75b01bcfb53e059289d14af9
+
+ss_2 sha1:2ac547ae9614a00d1b28275de608131f7a0e259f
+ss_2 sha256:a90f02e6044f1497d13db587d22ab12f90150a7d1e084afcf96065fab35ae2bc
+
+tt_1 sha1:4ac13458899ab908ef3b1128fa378daefc88d356
+tt_1 sha256:c53113c7dd5060e86b5b251428bd058f6726f66273c6a24bff1c61a04f498dd3
+
+tt_2 sha1:4c86f9a85fbc5e6804ee2e17a797538fbe785bca
+tt_2 sha256:0775f2a296129a7cf2862b46bc0e88c14d593f2773a3e3fb1c5193db6f5a7e77
+
+tt_3 sha1:c4e4a12231b9fa79a0053cb6077fcb21bb5b135a
+tt_3 sha256:47860f93cdd211f96443e0560f21c57ab6c2f4b0ac27ff03651a352e53fe8484
+
+z__1 sha1:7d670fdcdb9929f6c7dac196ff78689cd1c566a1
+z__1 sha256:44d0f37aff5e51cfcfdd1134c93a6419bcca7b9964f792ffcd5f9b4fcba1ee63
+
+z__2 sha1:5e5f22072bb39f6e12cf663a57cb634c76eefb49
+z__2 sha256:d29de162113190fed104eb5f010820cef4e315f89b9326e8497f7219fb737894
+
+z__3 sha1:1ba523955d5160681af65cb776411f574c1e8155
+z__3 sha256:07422d772b07794ab4369a5648e617719f89c2d2212cbeab05d97214b6471636
+
+zaa1 sha1:8acb8e9750e3f644bf323fcf3d338849db106c77
+zaa1 sha256:e79b029282c8abec2d9f3f7faceaf2a1405e02d1f368e66450ae66cf5b68d1f4
+
+zaa2 sha1:6c0b99286d0bce551ac4a7b3dff8b706edff3715
+zaa2 sha256:c82bd78c3e69ea1796e6b1a7a3ba45bb106c50e819296475b862123d3f5cc5a0
+
+zan1 sha1:087494262084cefee7ed484d20c8dc0580791272
+zan1 sha256:4b159eb3804d05599023dd074f771d06d02870f4ab24a7165add8ac3d703b8d3
+
+zdd1 sha1:879007efae624d2b1307214b24a956f0a8d686a8
+zdd1 sha256:eecfdd4d8092dd0363fb6d4548b54c6afc8982c3ed9b34e393f1d6a921d8eaa3
+
+zdm1 sha1:9b541b2275c06e3a7b13f28badf5294e2ae63df4
+zdm1 sha256:ab136e88e19a843c4bf7713d2090d5a2186ba16a6a80dacc12eeddd256a8e556
+
+zdm2 sha1:d77371d15817fcaa57eeec27f770c505ba974ec1
+zdm2 sha256:1c1a5f57363f46a15d95ce8527b3c2c158d88d16853b4acbf81bd20fd2c89a46
+
+zdn1 sha1:beb5d38c55283d280685ea21a0e50cfcc0ca064a
+zdn1 sha256:0f0eca66183617b0aa5ad74b256540329f841470922ca6760263c996d825eb18
+
+zmd1 sha1:d41fda41b7ec4de46b43cb7ea42a45001ae393d5
+zmd1 sha256:1ed32d481852eddf31a0ce12652a0ad14bf5b7a842667b5dbb0b50f35bf1c80a
+
+zmd2 sha1:a79ac3be9377639e1c7d1edf1ae1b3a5f0ccd8a9
+zmd2 sha256:b238da211b404f8917df2d9c6f7030535e904b2186131007a3c292ec6902f933
+
+zmm1 sha1:4ca22bae2527d3d9e1676498a0fba3b355bd1278
+zmm1 sha256:072b1d85b5f34fabc99dfa46008c5418df68302d3e317430006f49b32d244226
+
+zmm2 sha1:61422ba9c2c873416061a88cd40a59a35b576474
+zmm2 sha256:81dd5d2b3c5cda16fef552256aed4e2ea0802a8450a08f308a92142112ff6dda
+
+zmm3 sha1:697aad7715a1e7306ca76290a3dd4208fbaeddfa
+zmm3 sha256:8b10fab49e9be3414aa5e9a93d0e46f9569053440138a7c19a5eb5536d8e95bf
+
+zmn1 sha1:b16d7b25b869f2beb124efa53467d8a1550ad694
+zmn1 sha256:609e4f75d1295e844c826feeba213acb0b6cfc609adfe8ff705b19e3829ae3e9
+
+zmn2 sha1:a5c544c21cfcb07eb80a4d89a5b7d1570002edfd
+zmn2 sha256:d6d03edf2dc1a3b267a8205de5f41a2ff4b03def8c7ae02052b543fb09d589fc
+
+zna1 sha1:d12979c22fff69c59ca9409e7a8fe3ee25eaee80
+zna1 sha256:b37b80e789e8ea32aa323f004628f02013f632124b0282c7fe00a127d3c64c3c
+
+znd1 sha1:a18393c636b98e9bd7296b8b437ea4992b72440c
+znd1 sha256:af92a22eee8c38410a0c9d2b5135a10aeb052cbc7cf675541ed9a67bfcaf7cf9
+
+znm1 sha1:3fdbe17fd013303a2e981e1ca1c6cd6e72789087
+znm1 sha256:f75aeaa0c11e76918e381c105f0752932c6150e941fec565d24fa31098a13dc1
+
+znm2 sha1:7e09d6a3a14bd630913e8c75693cea32157b606d
+znm2 sha256:938d73cfbaa1c902a84fb5b3afd9736aa0590367fb9bd59c6c4d072ce70fcd6d
+EOF
+
+cat >.test-plain-OA <<EOF
+:000000 100644 $(test_oid zero) $(test_oid aa_1) A AA
+:000000 100644 $(test_oid zero) $(test_oid an_1) A AN
+:100644 000000 $(test_oid dd_1) $(test_oid zero) D DD
+:000000 040000 $(test_oid zero) $(test_oid df_1) A DF
+:100644 000000 $(test_oid dm_1) $(test_oid zero) D DM
+:100644 000000 $(test_oid dn_1) $(test_oid zero) D DN
+:000000 100644 $(test_oid zero) $(test_oid ll_2) A LL
+:100644 100644 $(test_oid md_1) $(test_oid md_2) M MD
+:100644 100644 $(test_oid mm_1) $(test_oid mm_2) M MM
+:100644 100644 $(test_oid mn_1) $(test_oid mn_2) M MN
+:100644 100644 $(test_oid ss_1) $(test_oid ss_2) M SS
+:100644 100644 $(test_oid tt_1) $(test_oid tt_2) M TT
+:040000 040000 $(test_oid z__1) $(test_oid z__2) M Z
EOF
-cat >.test-recursive-OA <<\EOF
-:000000 100644 0000000000000000000000000000000000000000 ccba72ad3888a3520b39efcf780b9ee64167535d A AA
-:000000 100644 0000000000000000000000000000000000000000 7e426fb079479fd67f6d81f984e4ec649a44bc25 A AN
-:100644 000000 bcc68ef997017466d5c9094bcf7692295f588c9a 0000000000000000000000000000000000000000 D DD
-:000000 100644 0000000000000000000000000000000000000000 68a6d8b91da11045cf4aa3a5ab9f2a781c701249 A DF/DF
-:100644 000000 141c1f1642328e4bc46a7d801a71da392e66791e 0000000000000000000000000000000000000000 D DM
-:100644 000000 35abde1506ddf806572ff4d407bd06885d0f8ee9 0000000000000000000000000000000000000000 D DN
-:000000 100644 0000000000000000000000000000000000000000 1d41122ebdd7a640f29d3c9cc4f9d70094374762 A LL
-:100644 100644 03f24c8c4700babccfd28b654e7e8eac402ad6cd 103d9f89b50b9aad03054b579be5e7aa665f2d57 M MD
-:100644 100644 b258508afb7ceb449981bd9d63d2d3e971bf8d34 b431b272d829ff3aa4d1a5085f4394ab4d3305b6 M MM
-:100644 100644 bd084b0c27c7b6cc34f11d6d0509a29be3caf970 a716d58de4a570e0038f5c307bd8db34daea021f M MN
-:100644 100644 40c959f984c8b89a2b02520d17f00d717f024397 2ac547ae9614a00d1b28275de608131f7a0e259f M SS
-:100644 100644 4ac13458899ab908ef3b1128fa378daefc88d356 4c86f9a85fbc5e6804ee2e17a797538fbe785bca M TT
-:000000 100644 0000000000000000000000000000000000000000 8acb8e9750e3f644bf323fcf3d338849db106c77 A Z/AA
-:000000 100644 0000000000000000000000000000000000000000 087494262084cefee7ed484d20c8dc0580791272 A Z/AN
-:100644 000000 879007efae624d2b1307214b24a956f0a8d686a8 0000000000000000000000000000000000000000 D Z/DD
-:100644 000000 9b541b2275c06e3a7b13f28badf5294e2ae63df4 0000000000000000000000000000000000000000 D Z/DM
-:100644 000000 beb5d38c55283d280685ea21a0e50cfcc0ca064a 0000000000000000000000000000000000000000 D Z/DN
-:100644 100644 d41fda41b7ec4de46b43cb7ea42a45001ae393d5 a79ac3be9377639e1c7d1edf1ae1b3a5f0ccd8a9 M Z/MD
-:100644 100644 4ca22bae2527d3d9e1676498a0fba3b355bd1278 61422ba9c2c873416061a88cd40a59a35b576474 M Z/MM
-:100644 100644 b16d7b25b869f2beb124efa53467d8a1550ad694 a5c544c21cfcb07eb80a4d89a5b7d1570002edfd M Z/MN
+cat >.test-recursive-OA <<EOF
+:000000 100644 $(test_oid zero) $(test_oid aa_1) A AA
+:000000 100644 $(test_oid zero) $(test_oid an_1) A AN
+:100644 000000 $(test_oid dd_1) $(test_oid zero) D DD
+:000000 100644 $(test_oid zero) $(test_oid dfd1) A DF/DF
+:100644 000000 $(test_oid dm_1) $(test_oid zero) D DM
+:100644 000000 $(test_oid dn_1) $(test_oid zero) D DN
+:000000 100644 $(test_oid zero) $(test_oid ll_2) A LL
+:100644 100644 $(test_oid md_1) $(test_oid md_2) M MD
+:100644 100644 $(test_oid mm_1) $(test_oid mm_2) M MM
+:100644 100644 $(test_oid mn_1) $(test_oid mn_2) M MN
+:100644 100644 $(test_oid ss_1) $(test_oid ss_2) M SS
+:100644 100644 $(test_oid tt_1) $(test_oid tt_2) M TT
+:000000 100644 $(test_oid zero) $(test_oid zaa1) A Z/AA
+:000000 100644 $(test_oid zero) $(test_oid zan1) A Z/AN
+:100644 000000 $(test_oid zdd1) $(test_oid zero) D Z/DD
+:100644 000000 $(test_oid zdm1) $(test_oid zero) D Z/DM
+:100644 000000 $(test_oid zdn1) $(test_oid zero) D Z/DN
+:100644 100644 $(test_oid zmd1) $(test_oid zmd2) M Z/MD
+:100644 100644 $(test_oid zmm1) $(test_oid zmm2) M Z/MM
+:100644 100644 $(test_oid zmn1) $(test_oid zmn2) M Z/MN
EOF
-cat >.test-plain-OB <<\EOF
-:000000 100644 0000000000000000000000000000000000000000 6aa2b5335b16431a0ef71e5c0a28be69183cf6a2 A AA
-:100644 000000 bcc68ef997017466d5c9094bcf7692295f588c9a 0000000000000000000000000000000000000000 D DD
-:000000 100644 0000000000000000000000000000000000000000 71420ab81e254145d26d6fc0cddee64c1acd4787 A DF
-:100644 100644 141c1f1642328e4bc46a7d801a71da392e66791e 3c4d8de5fbad08572bab8e10eef8dbb264cf0231 M DM
-:000000 100644 0000000000000000000000000000000000000000 1d41122ebdd7a640f29d3c9cc4f9d70094374762 A LL
-:100644 000000 03f24c8c4700babccfd28b654e7e8eac402ad6cd 0000000000000000000000000000000000000000 D MD
-:100644 100644 b258508afb7ceb449981bd9d63d2d3e971bf8d34 19989d4559aae417fedee240ccf2ba315ea4dc2b M MM
-:000000 100644 0000000000000000000000000000000000000000 15885881ea69115351c09b38371f0348a3fb8c67 A NA
-:100644 000000 a4e179e4291e5536a5e1c82e091052772d2c5a93 0000000000000000000000000000000000000000 D ND
-:100644 100644 c8f25781e8f1792e3e40b74225e20553041b5226 cdb9a8c3da571502ac30225e9c17beccb8387983 M NM
-:100644 100644 40c959f984c8b89a2b02520d17f00d717f024397 2ac547ae9614a00d1b28275de608131f7a0e259f M SS
-:100644 100644 4ac13458899ab908ef3b1128fa378daefc88d356 c4e4a12231b9fa79a0053cb6077fcb21bb5b135a M TT
-:040000 040000 7d670fdcdb9929f6c7dac196ff78689cd1c566a1 1ba523955d5160681af65cb776411f574c1e8155 M Z
+cat >.test-plain-OB <<EOF
+:000000 100644 $(test_oid zero) $(test_oid aa_2) A AA
+:100644 000000 $(test_oid dd_1) $(test_oid zero) D DD
+:000000 100644 $(test_oid zero) $(test_oid df_2) A DF
+:100644 100644 $(test_oid dm_1) $(test_oid dm_2) M DM
+:000000 100644 $(test_oid zero) $(test_oid ll_2) A LL
+:100644 000000 $(test_oid md_1) $(test_oid zero) D MD
+:100644 100644 $(test_oid mm_1) $(test_oid mm_3) M MM
+:000000 100644 $(test_oid zero) $(test_oid na_1) A NA
+:100644 000000 $(test_oid nd_1) $(test_oid zero) D ND
+:100644 100644 $(test_oid nm_1) $(test_oid nm_2) M NM
+:100644 100644 $(test_oid ss_1) $(test_oid ss_2) M SS
+:100644 100644 $(test_oid tt_1) $(test_oid tt_3) M TT
+:040000 040000 $(test_oid z__1) $(test_oid z__3) M Z
EOF
-cat >.test-recursive-OB <<\EOF
-:000000 100644 0000000000000000000000000000000000000000 6aa2b5335b16431a0ef71e5c0a28be69183cf6a2 A AA
-:100644 000000 bcc68ef997017466d5c9094bcf7692295f588c9a 0000000000000000000000000000000000000000 D DD
-:000000 100644 0000000000000000000000000000000000000000 71420ab81e254145d26d6fc0cddee64c1acd4787 A DF
-:100644 100644 141c1f1642328e4bc46a7d801a71da392e66791e 3c4d8de5fbad08572bab8e10eef8dbb264cf0231 M DM
-:000000 100644 0000000000000000000000000000000000000000 1d41122ebdd7a640f29d3c9cc4f9d70094374762 A LL
-:100644 000000 03f24c8c4700babccfd28b654e7e8eac402ad6cd 0000000000000000000000000000000000000000 D MD
-:100644 100644 b258508afb7ceb449981bd9d63d2d3e971bf8d34 19989d4559aae417fedee240ccf2ba315ea4dc2b M MM
-:000000 100644 0000000000000000000000000000000000000000 15885881ea69115351c09b38371f0348a3fb8c67 A NA
-:100644 000000 a4e179e4291e5536a5e1c82e091052772d2c5a93 0000000000000000000000000000000000000000 D ND
-:100644 100644 c8f25781e8f1792e3e40b74225e20553041b5226 cdb9a8c3da571502ac30225e9c17beccb8387983 M NM
-:100644 100644 40c959f984c8b89a2b02520d17f00d717f024397 2ac547ae9614a00d1b28275de608131f7a0e259f M SS
-:100644 100644 4ac13458899ab908ef3b1128fa378daefc88d356 c4e4a12231b9fa79a0053cb6077fcb21bb5b135a M TT
-:000000 100644 0000000000000000000000000000000000000000 6c0b99286d0bce551ac4a7b3dff8b706edff3715 A Z/AA
-:100644 000000 879007efae624d2b1307214b24a956f0a8d686a8 0000000000000000000000000000000000000000 D Z/DD
-:100644 100644 9b541b2275c06e3a7b13f28badf5294e2ae63df4 d77371d15817fcaa57eeec27f770c505ba974ec1 M Z/DM
-:100644 000000 d41fda41b7ec4de46b43cb7ea42a45001ae393d5 0000000000000000000000000000000000000000 D Z/MD
-:100644 100644 4ca22bae2527d3d9e1676498a0fba3b355bd1278 697aad7715a1e7306ca76290a3dd4208fbaeddfa M Z/MM
-:000000 100644 0000000000000000000000000000000000000000 d12979c22fff69c59ca9409e7a8fe3ee25eaee80 A Z/NA
-:100644 000000 a18393c636b98e9bd7296b8b437ea4992b72440c 0000000000000000000000000000000000000000 D Z/ND
-:100644 100644 3fdbe17fd013303a2e981e1ca1c6cd6e72789087 7e09d6a3a14bd630913e8c75693cea32157b606d M Z/NM
+cat >.test-recursive-OB <<EOF
+:000000 100644 $(test_oid zero) $(test_oid aa_2) A AA
+:100644 000000 $(test_oid dd_1) $(test_oid zero) D DD
+:000000 100644 $(test_oid zero) $(test_oid df_2) A DF
+:100644 100644 $(test_oid dm_1) $(test_oid dm_2) M DM
+:000000 100644 $(test_oid zero) $(test_oid ll_2) A LL
+:100644 000000 $(test_oid md_1) $(test_oid zero) D MD
+:100644 100644 $(test_oid mm_1) $(test_oid mm_3) M MM
+:000000 100644 $(test_oid zero) $(test_oid na_1) A NA
+:100644 000000 $(test_oid nd_1) $(test_oid zero) D ND
+:100644 100644 $(test_oid nm_1) $(test_oid nm_2) M NM
+:100644 100644 $(test_oid ss_1) $(test_oid ss_2) M SS
+:100644 100644 $(test_oid tt_1) $(test_oid tt_3) M TT
+:000000 100644 $(test_oid zero) $(test_oid zaa2) A Z/AA
+:100644 000000 $(test_oid zdd1) $(test_oid zero) D Z/DD
+:100644 100644 $(test_oid zdm1) $(test_oid zdm2) M Z/DM
+:100644 000000 $(test_oid zmd1) $(test_oid zero) D Z/MD
+:100644 100644 $(test_oid zmm1) $(test_oid zmm3) M Z/MM
+:000000 100644 $(test_oid zero) $(test_oid zna1) A Z/NA
+:100644 000000 $(test_oid znd1) $(test_oid zero) D Z/ND
+:100644 100644 $(test_oid znm1) $(test_oid znm2) M Z/NM
EOF
-cat >.test-plain-AB <<\EOF
-:100644 100644 ccba72ad3888a3520b39efcf780b9ee64167535d 6aa2b5335b16431a0ef71e5c0a28be69183cf6a2 M AA
-:100644 000000 7e426fb079479fd67f6d81f984e4ec649a44bc25 0000000000000000000000000000000000000000 D AN
-:000000 100644 0000000000000000000000000000000000000000 71420ab81e254145d26d6fc0cddee64c1acd4787 A DF
-:040000 000000 6d50f65d3bdab91c63444294d38f08aeff328e42 0000000000000000000000000000000000000000 D DF
-:000000 100644 0000000000000000000000000000000000000000 3c4d8de5fbad08572bab8e10eef8dbb264cf0231 A DM
-:000000 100644 0000000000000000000000000000000000000000 35abde1506ddf806572ff4d407bd06885d0f8ee9 A DN
-:100644 000000 103d9f89b50b9aad03054b579be5e7aa665f2d57 0000000000000000000000000000000000000000 D MD
-:100644 100644 b431b272d829ff3aa4d1a5085f4394ab4d3305b6 19989d4559aae417fedee240ccf2ba315ea4dc2b M MM
-:100644 100644 a716d58de4a570e0038f5c307bd8db34daea021f bd084b0c27c7b6cc34f11d6d0509a29be3caf970 M MN
-:000000 100644 0000000000000000000000000000000000000000 15885881ea69115351c09b38371f0348a3fb8c67 A NA
-:100644 000000 a4e179e4291e5536a5e1c82e091052772d2c5a93 0000000000000000000000000000000000000000 D ND
-:100644 100644 c8f25781e8f1792e3e40b74225e20553041b5226 cdb9a8c3da571502ac30225e9c17beccb8387983 M NM
-:100644 100644 4c86f9a85fbc5e6804ee2e17a797538fbe785bca c4e4a12231b9fa79a0053cb6077fcb21bb5b135a M TT
-:040000 040000 5e5f22072bb39f6e12cf663a57cb634c76eefb49 1ba523955d5160681af65cb776411f574c1e8155 M Z
+cat >.test-plain-AB <<EOF
+:100644 100644 $(test_oid aa_1) $(test_oid aa_2) M AA
+:100644 000000 $(test_oid an_1) $(test_oid zero) D AN
+:000000 100644 $(test_oid zero) $(test_oid df_2) A DF
+:040000 000000 $(test_oid df_1) $(test_oid zero) D DF
+:000000 100644 $(test_oid zero) $(test_oid dm_2) A DM
+:000000 100644 $(test_oid zero) $(test_oid dn_1) A DN
+:100644 000000 $(test_oid md_2) $(test_oid zero) D MD
+:100644 100644 $(test_oid mm_2) $(test_oid mm_3) M MM
+:100644 100644 $(test_oid mn_2) $(test_oid mn_1) M MN
+:000000 100644 $(test_oid zero) $(test_oid na_1) A NA
+:100644 000000 $(test_oid nd_1) $(test_oid zero) D ND
+:100644 100644 $(test_oid nm_1) $(test_oid nm_2) M NM
+:100644 100644 $(test_oid tt_2) $(test_oid tt_3) M TT
+:040000 040000 $(test_oid z__2) $(test_oid z__3) M Z
EOF
-cat >.test-recursive-AB <<\EOF
-:100644 100644 ccba72ad3888a3520b39efcf780b9ee64167535d 6aa2b5335b16431a0ef71e5c0a28be69183cf6a2 M AA
-:100644 000000 7e426fb079479fd67f6d81f984e4ec649a44bc25 0000000000000000000000000000000000000000 D AN
-:000000 100644 0000000000000000000000000000000000000000 71420ab81e254145d26d6fc0cddee64c1acd4787 A DF
-:100644 000000 68a6d8b91da11045cf4aa3a5ab9f2a781c701249 0000000000000000000000000000000000000000 D DF/DF
-:000000 100644 0000000000000000000000000000000000000000 3c4d8de5fbad08572bab8e10eef8dbb264cf0231 A DM
-:000000 100644 0000000000000000000000000000000000000000 35abde1506ddf806572ff4d407bd06885d0f8ee9 A DN
-:100644 000000 103d9f89b50b9aad03054b579be5e7aa665f2d57 0000000000000000000000000000000000000000 D MD
-:100644 100644 b431b272d829ff3aa4d1a5085f4394ab4d3305b6 19989d4559aae417fedee240ccf2ba315ea4dc2b M MM
-:100644 100644 a716d58de4a570e0038f5c307bd8db34daea021f bd084b0c27c7b6cc34f11d6d0509a29be3caf970 M MN
-:000000 100644 0000000000000000000000000000000000000000 15885881ea69115351c09b38371f0348a3fb8c67 A NA
-:100644 000000 a4e179e4291e5536a5e1c82e091052772d2c5a93 0000000000000000000000000000000000000000 D ND
-:100644 100644 c8f25781e8f1792e3e40b74225e20553041b5226 cdb9a8c3da571502ac30225e9c17beccb8387983 M NM
-:100644 100644 4c86f9a85fbc5e6804ee2e17a797538fbe785bca c4e4a12231b9fa79a0053cb6077fcb21bb5b135a M TT
-:100644 100644 8acb8e9750e3f644bf323fcf3d338849db106c77 6c0b99286d0bce551ac4a7b3dff8b706edff3715 M Z/AA
-:100644 000000 087494262084cefee7ed484d20c8dc0580791272 0000000000000000000000000000000000000000 D Z/AN
-:000000 100644 0000000000000000000000000000000000000000 d77371d15817fcaa57eeec27f770c505ba974ec1 A Z/DM
-:000000 100644 0000000000000000000000000000000000000000 beb5d38c55283d280685ea21a0e50cfcc0ca064a A Z/DN
-:100644 000000 a79ac3be9377639e1c7d1edf1ae1b3a5f0ccd8a9 0000000000000000000000000000000000000000 D Z/MD
-:100644 100644 61422ba9c2c873416061a88cd40a59a35b576474 697aad7715a1e7306ca76290a3dd4208fbaeddfa M Z/MM
-:100644 100644 a5c544c21cfcb07eb80a4d89a5b7d1570002edfd b16d7b25b869f2beb124efa53467d8a1550ad694 M Z/MN
-:000000 100644 0000000000000000000000000000000000000000 d12979c22fff69c59ca9409e7a8fe3ee25eaee80 A Z/NA
-:100644 000000 a18393c636b98e9bd7296b8b437ea4992b72440c 0000000000000000000000000000000000000000 D Z/ND
-:100644 100644 3fdbe17fd013303a2e981e1ca1c6cd6e72789087 7e09d6a3a14bd630913e8c75693cea32157b606d M Z/NM
+cat >.test-recursive-AB <<EOF
+:100644 100644 $(test_oid aa_1) $(test_oid aa_2) M AA
+:100644 000000 $(test_oid an_1) $(test_oid zero) D AN
+:000000 100644 $(test_oid zero) $(test_oid df_2) A DF
+:100644 000000 $(test_oid dfd1) $(test_oid zero) D DF/DF
+:000000 100644 $(test_oid zero) $(test_oid dm_2) A DM
+:000000 100644 $(test_oid zero) $(test_oid dn_1) A DN
+:100644 000000 $(test_oid md_2) $(test_oid zero) D MD
+:100644 100644 $(test_oid mm_2) $(test_oid mm_3) M MM
+:100644 100644 $(test_oid mn_2) $(test_oid mn_1) M MN
+:000000 100644 $(test_oid zero) $(test_oid na_1) A NA
+:100644 000000 $(test_oid nd_1) $(test_oid zero) D ND
+:100644 100644 $(test_oid nm_1) $(test_oid nm_2) M NM
+:100644 100644 $(test_oid tt_2) $(test_oid tt_3) M TT
+:100644 100644 $(test_oid zaa1) $(test_oid zaa2) M Z/AA
+:100644 000000 $(test_oid zan1) $(test_oid zero) D Z/AN
+:000000 100644 $(test_oid zero) $(test_oid zdm2) A Z/DM
+:000000 100644 $(test_oid zero) $(test_oid zdn1) A Z/DN
+:100644 000000 $(test_oid zmd2) $(test_oid zero) D Z/MD
+:100644 100644 $(test_oid zmm2) $(test_oid zmm3) M Z/MM
+:100644 100644 $(test_oid zmn2) $(test_oid zmn1) M Z/MN
+:000000 100644 $(test_oid zero) $(test_oid zna1) A Z/NA
+:100644 000000 $(test_oid znd1) $(test_oid zero) D Z/ND
+:100644 100644 $(test_oid znm1) $(test_oid znm2) M Z/NM
EOF
cmp_diff_files_output () {
diff --git a/t/t4009-diff-rename-4.sh b/t/t4009-diff-rename-4.sh
index 3641fd84d6..b63bdf031f 100755
--- a/t/t4009-diff-rename-4.sh
+++ b/t/t4009-diff-rename-4.sh
@@ -14,6 +14,7 @@ test_expect_success \
'cat "$TEST_DIRECTORY"/diff-lib/COPYING >COPYING &&
echo frotz >rezrov &&
git update-index --add COPYING rezrov &&
+ orig=$(git hash-object COPYING) &&
tree=$(git write-tree) &&
echo $tree'
@@ -22,6 +23,8 @@ test_expect_success \
'sed -e 's/HOWEVER/However/' <COPYING >COPYING.1 &&
sed -e 's/GPL/G.P.L/g' <COPYING >COPYING.2 &&
rm -f COPYING &&
+ c1=$(git hash-object COPYING.1) &&
+ c2=$(git hash-object COPYING.2) &&
git update-index --add --remove COPYING COPYING.?'
# tree has COPYING and rezrov. work tree has COPYING.1 and COPYING.2,
@@ -31,11 +34,11 @@ test_expect_success \
git diff-index -z -C $tree >current
-cat >expected <<\EOF
-:100644 100644 6ff87c4664981e4397625791c8ea3bbb5f2279a3 0603b3238a076dc6c8022aedc6648fa523a17178 C1234
+cat >expected <<EOF
+:100644 100644 $orig $c1 C1234
COPYING
COPYING.1
-:100644 100644 6ff87c4664981e4397625791c8ea3bbb5f2279a3 06c67961bbaed34a127f76d261f4c0bf73eda471 R1234
+:100644 100644 $orig $c2 R1234
COPYING
COPYING.2
EOF
@@ -57,10 +60,10 @@ test_expect_success \
# about rezrov.
git diff-index -z -C $tree >current
-cat >expected <<\EOF
-:100644 100644 6ff87c4664981e4397625791c8ea3bbb5f2279a3 06c67961bbaed34a127f76d261f4c0bf73eda471 M
+cat >expected <<EOF
+:100644 100644 $orig $c2 M
COPYING
-:100644 100644 6ff87c4664981e4397625791c8ea3bbb5f2279a3 0603b3238a076dc6c8022aedc6648fa523a17178 C1234
+:100644 100644 $orig $c1 C1234
COPYING
COPYING.1
EOF
@@ -82,8 +85,8 @@ test_expect_success \
git update-index --add --remove COPYING COPYING.1'
git diff-index -z -C --find-copies-harder $tree >current
-cat >expected <<\EOF
-:100644 100644 6ff87c4664981e4397625791c8ea3bbb5f2279a3 0603b3238a076dc6c8022aedc6648fa523a17178 C1234
+cat >expected <<EOF
+:100644 100644 $orig $c1 C1234
COPYING
COPYING.1
EOF
diff --git a/t/t4013-diff-various.sh b/t/t4013-diff-various.sh
index a9054d2db1..5ac94b390d 100755
--- a/t/t4013-diff-various.sh
+++ b/t/t4013-diff-various.sh
@@ -7,9 +7,6 @@ test_description='Various diff formatting options'
. ./test-lib.sh
-LF='
-'
-
test_expect_success setup '
GIT_AUTHOR_DATE="2006-06-26 00:00:00 +0000" &&
diff --git a/t/t4014-format-patch.sh b/t/t4014-format-patch.sh
index ca7debf1d4..72b09896cf 100755
--- a/t/t4014-format-patch.sh
+++ b/t/t4014-format-patch.sh
@@ -9,7 +9,6 @@ test_description='various format-patch tests'
. "$TEST_DIRECTORY"/lib-terminal.sh
test_expect_success setup '
-
for i in 1 2 3 4 5 6 7 8 9 10; do echo "$i"; done >file &&
cat file >elif &&
git add file elif &&
@@ -34,7 +33,8 @@ test_expect_success setup '
git commit -m "Side changes #3 with \\n backslash-n in it." &&
git checkout master &&
- git diff-tree -p C2 | git apply --index &&
+ git diff-tree -p C2 >patch &&
+ git apply --index <patch &&
test_tick &&
git commit -m "Master accepts moral equivalent of #2" &&
@@ -59,33 +59,28 @@ test_expect_success setup '
git checkout master
'
-test_expect_success "format-patch --ignore-if-in-upstream" '
-
+test_expect_success 'format-patch --ignore-if-in-upstream' '
git format-patch --stdout master..side >patch0 &&
- cnt=$(grep "^From " patch0 | wc -l) &&
- test $cnt = 3
-
+ grep "^From " patch0 >from0 &&
+ test_line_count = 3 from0
'
-test_expect_success "format-patch --ignore-if-in-upstream" '
-
+test_expect_success 'format-patch --ignore-if-in-upstream' '
git format-patch --stdout \
--ignore-if-in-upstream master..side >patch1 &&
- cnt=$(grep "^From " patch1 | wc -l) &&
- test $cnt = 2
-
+ grep "^From " patch1 >from1 &&
+ test_line_count = 2 from1
'
-test_expect_success "format-patch --ignore-if-in-upstream handles tags" '
+test_expect_success 'format-patch --ignore-if-in-upstream handles tags' '
git tag -a v1 -m tag side &&
git tag -a v2 -m tag master &&
git format-patch --stdout --ignore-if-in-upstream v2..v1 >patch1 &&
- cnt=$(grep "^From " patch1 | wc -l) &&
- test $cnt = 2
+ grep "^From " patch1 >from1 &&
+ test_line_count = 2 from1
'
test_expect_success "format-patch doesn't consider merge commits" '
-
git checkout -b slave master &&
echo "Another line" >>file &&
test_tick &&
@@ -96,148 +91,138 @@ test_expect_success "format-patch doesn't consider merge commits" '
git checkout -b merger master &&
test_tick &&
git merge --no-ff slave &&
- cnt=$(git format-patch -3 --stdout | grep "^From " | wc -l) &&
- test $cnt = 3
+ git format-patch -3 --stdout >patch &&
+ grep "^From " patch >from &&
+ test_line_count = 3 from
'
-test_expect_success "format-patch result applies" '
-
+test_expect_success 'format-patch result applies' '
git checkout -b rebuild-0 master &&
git am -3 patch0 &&
- cnt=$(git rev-list master.. | wc -l) &&
- test $cnt = 2
+ git rev-list master.. >list &&
+ test_line_count = 2 list
'
-test_expect_success "format-patch --ignore-if-in-upstream result applies" '
-
+test_expect_success 'format-patch --ignore-if-in-upstream result applies' '
git checkout -b rebuild-1 master &&
git am -3 patch1 &&
- cnt=$(git rev-list master.. | wc -l) &&
- test $cnt = 2
+ git rev-list master.. >list &&
+ test_line_count = 2 list
'
test_expect_success 'commit did not screw up the log message' '
-
- git cat-file commit side | grep "^Side .* with .* backslash-n"
-
+ git cat-file commit side >actual &&
+ grep "^Side .* with .* backslash-n" actual
'
test_expect_success 'format-patch did not screw up the log message' '
-
grep "^Subject: .*Side changes #3 with .* backslash-n" patch0 &&
grep "^Subject: .*Side changes #3 with .* backslash-n" patch1
-
'
test_expect_success 'replay did not screw up the log message' '
-
- git cat-file commit rebuild-1 | grep "^Side .* with .* backslash-n"
-
+ git cat-file commit rebuild-1 >actual &&
+ grep "^Side .* with .* backslash-n" actual
'
test_expect_success 'extra headers' '
-
git config format.headers "To: R E Cipient <rcipient@example.com>
" &&
git config --add format.headers "Cc: S E Cipient <scipient@example.com>
" &&
- git format-patch --stdout master..side > patch2 &&
- sed -e "/^\$/q" patch2 > hdrs2 &&
+ git format-patch --stdout master..side >patch2 &&
+ sed -e "/^\$/q" patch2 >hdrs2 &&
grep "^To: R E Cipient <rcipient@example.com>\$" hdrs2 &&
grep "^Cc: S E Cipient <scipient@example.com>\$" hdrs2
-
'
test_expect_success 'extra headers without newlines' '
-
git config --replace-all format.headers "To: R E Cipient <rcipient@example.com>" &&
git config --add format.headers "Cc: S E Cipient <scipient@example.com>" &&
git format-patch --stdout master..side >patch3 &&
- sed -e "/^\$/q" patch3 > hdrs3 &&
+ sed -e "/^\$/q" patch3 >hdrs3 &&
grep "^To: R E Cipient <rcipient@example.com>\$" hdrs3 &&
grep "^Cc: S E Cipient <scipient@example.com>\$" hdrs3
-
'
test_expect_success 'extra headers with multiple To:s' '
-
git config --replace-all format.headers "To: R E Cipient <rcipient@example.com>" &&
git config --add format.headers "To: S E Cipient <scipient@example.com>" &&
- git format-patch --stdout master..side > patch4 &&
- sed -e "/^\$/q" patch4 > hdrs4 &&
+ git format-patch --stdout master..side >patch4 &&
+ sed -e "/^\$/q" patch4 >hdrs4 &&
grep "^To: R E Cipient <rcipient@example.com>,\$" hdrs4 &&
grep "^ *S E Cipient <scipient@example.com>\$" hdrs4
'
test_expect_success 'additional command line cc (ascii)' '
-
git config --replace-all format.headers "Cc: R E Cipient <rcipient@example.com>" &&
- git format-patch --cc="S E Cipient <scipient@example.com>" --stdout master..side | sed -e "/^\$/q" >patch5 &&
- grep "^Cc: R E Cipient <rcipient@example.com>,\$" patch5 &&
- grep "^ *S E Cipient <scipient@example.com>\$" patch5
+ git format-patch --cc="S E Cipient <scipient@example.com>" --stdout master..side >patch5 &&
+ sed -e "/^\$/q" patch5 >hdrs5 &&
+ grep "^Cc: R E Cipient <rcipient@example.com>,\$" hdrs5 &&
+ grep "^ *S E Cipient <scipient@example.com>\$" hdrs5
'
test_expect_failure 'additional command line cc (rfc822)' '
-
git config --replace-all format.headers "Cc: R E Cipient <rcipient@example.com>" &&
- git format-patch --cc="S. E. Cipient <scipient@example.com>" --stdout master..side | sed -e "/^\$/q" >patch5 &&
- grep "^Cc: R E Cipient <rcipient@example.com>,\$" patch5 &&
- grep "^ *\"S. E. Cipient\" <scipient@example.com>\$" patch5
+ git format-patch --cc="S. E. Cipient <scipient@example.com>" --stdout master..side >patch5 &&
+ sed -e "/^\$/q" patch5 >hdrs5 &&
+ grep "^Cc: R E Cipient <rcipient@example.com>,\$" hdrs5 &&
+ grep "^ *\"S. E. Cipient\" <scipient@example.com>\$" hdrs5
'
test_expect_success 'command line headers' '
-
git config --unset-all format.headers &&
- git format-patch --add-header="Cc: R E Cipient <rcipient@example.com>" --stdout master..side | sed -e "/^\$/q" >patch6 &&
- grep "^Cc: R E Cipient <rcipient@example.com>\$" patch6
+ git format-patch --add-header="Cc: R E Cipient <rcipient@example.com>" --stdout master..side >patch6 &&
+ sed -e "/^\$/q" patch6 >hdrs6 &&
+ grep "^Cc: R E Cipient <rcipient@example.com>\$" hdrs6
'
test_expect_success 'configuration headers and command line headers' '
-
git config --replace-all format.headers "Cc: R E Cipient <rcipient@example.com>" &&
- git format-patch --add-header="Cc: S E Cipient <scipient@example.com>" --stdout master..side | sed -e "/^\$/q" >patch7 &&
- grep "^Cc: R E Cipient <rcipient@example.com>,\$" patch7 &&
- grep "^ *S E Cipient <scipient@example.com>\$" patch7
+ git format-patch --add-header="Cc: S E Cipient <scipient@example.com>" --stdout master..side >patch7 &&
+ sed -e "/^\$/q" patch7 >hdrs7 &&
+ grep "^Cc: R E Cipient <rcipient@example.com>,\$" hdrs7 &&
+ grep "^ *S E Cipient <scipient@example.com>\$" hdrs7
'
test_expect_success 'command line To: header (ascii)' '
-
git config --unset-all format.headers &&
- git format-patch --to="R E Cipient <rcipient@example.com>" --stdout master..side | sed -e "/^\$/q" >patch8 &&
- grep "^To: R E Cipient <rcipient@example.com>\$" patch8
+ git format-patch --to="R E Cipient <rcipient@example.com>" --stdout master..side >patch8 &&
+ sed -e "/^\$/q" patch8 >hdrs8 &&
+ grep "^To: R E Cipient <rcipient@example.com>\$" hdrs8
'
test_expect_failure 'command line To: header (rfc822)' '
-
- git format-patch --to="R. E. Cipient <rcipient@example.com>" --stdout master..side | sed -e "/^\$/q" >patch8 &&
- grep "^To: \"R. E. Cipient\" <rcipient@example.com>\$" patch8
+ git format-patch --to="R. E. Cipient <rcipient@example.com>" --stdout master..side >patch8 &&
+ sed -e "/^\$/q" patch8 >hdrs8 &&
+ grep "^To: \"R. E. Cipient\" <rcipient@example.com>\$" hdrs8
'
test_expect_failure 'command line To: header (rfc2047)' '
-
- git format-patch --to="R Ä Cipient <rcipient@example.com>" --stdout master..side | sed -e "/^\$/q" >patch8 &&
- grep "^To: =?UTF-8?q?R=20=C3=84=20Cipient?= <rcipient@example.com>\$" patch8
+ git format-patch --to="R Ä Cipient <rcipient@example.com>" --stdout master..side >patch8 &&
+ sed -e "/^\$/q" patch8 >hdrs8 &&
+ grep "^To: =?UTF-8?q?R=20=C3=84=20Cipient?= <rcipient@example.com>\$" hdrs8
'
test_expect_success 'configuration To: header (ascii)' '
-
git config format.to "R E Cipient <rcipient@example.com>" &&
- git format-patch --stdout master..side | sed -e "/^\$/q" >patch9 &&
- grep "^To: R E Cipient <rcipient@example.com>\$" patch9
+ git format-patch --stdout master..side >patch9 &&
+ sed -e "/^\$/q" patch9 >hdrs9 &&
+ grep "^To: R E Cipient <rcipient@example.com>\$" hdrs9
'
test_expect_failure 'configuration To: header (rfc822)' '
-
git config format.to "R. E. Cipient <rcipient@example.com>" &&
- git format-patch --stdout master..side | sed -e "/^\$/q" >patch9 &&
- grep "^To: \"R. E. Cipient\" <rcipient@example.com>\$" patch9
+ git format-patch --stdout master..side >patch9 &&
+ sed -e "/^\$/q" patch9 >hdrs9 &&
+ grep "^To: \"R. E. Cipient\" <rcipient@example.com>\$" hdrs9
'
test_expect_failure 'configuration To: header (rfc2047)' '
-
git config format.to "R Ä Cipient <rcipient@example.com>" &&
- git format-patch --stdout master..side | sed -e "/^\$/q" >patch9 &&
- grep "^To: =?UTF-8?q?R=20=C3=84=20Cipient?= <rcipient@example.com>\$" patch9
+ git format-patch --stdout master..side >patch9 &&
+ sed -e "/^\$/q" patch9 >hdrs9 &&
+ grep "^To: =?UTF-8?q?R=20=C3=84=20Cipient?= <rcipient@example.com>\$" hdrs9
'
# check_patch <patch>: Verify that <patch> looks like a half-sane
@@ -249,89 +234,79 @@ check_patch () {
}
test_expect_success 'format.from=false' '
-
- git -c format.from=false format-patch --stdout master..side |
- sed -e "/^\$/q" >patch &&
+ git -c format.from=false format-patch --stdout master..side >patch &&
+ sed -e "/^\$/q" patch >hdrs &&
check_patch patch &&
- ! grep "^From: C O Mitter <committer@example.com>\$" patch
+ ! grep "^From: C O Mitter <committer@example.com>\$" hdrs
'
test_expect_success 'format.from=true' '
-
- git -c format.from=true format-patch --stdout master..side |
- sed -e "/^\$/q" >patch &&
- check_patch patch &&
- grep "^From: C O Mitter <committer@example.com>\$" patch
+ git -c format.from=true format-patch --stdout master..side >patch &&
+ sed -e "/^\$/q" patch >hdrs &&
+ check_patch hdrs &&
+ grep "^From: C O Mitter <committer@example.com>\$" hdrs
'
test_expect_success 'format.from with address' '
-
- git -c format.from="F R Om <from@example.com>" format-patch --stdout master..side |
- sed -e "/^\$/q" >patch &&
- check_patch patch &&
- grep "^From: F R Om <from@example.com>\$" patch
+ git -c format.from="F R Om <from@example.com>" format-patch --stdout master..side >patch &&
+ sed -e "/^\$/q" patch >hdrs &&
+ check_patch hdrs &&
+ grep "^From: F R Om <from@example.com>\$" hdrs
'
test_expect_success '--no-from overrides format.from' '
-
- git -c format.from="F R Om <from@example.com>" format-patch --no-from --stdout master..side |
- sed -e "/^\$/q" >patch &&
- check_patch patch &&
- ! grep "^From: F R Om <from@example.com>\$" patch
+ git -c format.from="F R Om <from@example.com>" format-patch --no-from --stdout master..side >patch &&
+ sed -e "/^\$/q" patch >hdrs &&
+ check_patch hdrs &&
+ ! grep "^From: F R Om <from@example.com>\$" hdrs
'
test_expect_success '--from overrides format.from' '
-
- git -c format.from="F R Om <from@example.com>" format-patch --from --stdout master..side |
- sed -e "/^\$/q" >patch &&
- check_patch patch &&
- ! grep "^From: F R Om <from@example.com>\$" patch
+ git -c format.from="F R Om <from@example.com>" format-patch --from --stdout master..side >patch &&
+ sed -e "/^\$/q" patch >hdrs &&
+ check_patch hdrs &&
+ ! grep "^From: F R Om <from@example.com>\$" hdrs
'
test_expect_success '--no-to overrides config.to' '
-
git config --replace-all format.to \
"R E Cipient <rcipient@example.com>" &&
- git format-patch --no-to --stdout master..side |
- sed -e "/^\$/q" >patch10 &&
- check_patch patch10 &&
- ! grep "^To: R E Cipient <rcipient@example.com>\$" patch10
+ git format-patch --no-to --stdout master..side >patch10 &&
+ sed -e "/^\$/q" patch10 >hdrs10 &&
+ check_patch hdrs10 &&
+ ! grep "^To: R E Cipient <rcipient@example.com>\$" hdrs10
'
test_expect_success '--no-to and --to replaces config.to' '
-
git config --replace-all format.to \
"Someone <someone@out.there>" &&
git format-patch --no-to --to="Someone Else <else@out.there>" \
- --stdout master..side |
- sed -e "/^\$/q" >patch11 &&
- check_patch patch11 &&
- ! grep "^To: Someone <someone@out.there>\$" patch11 &&
- grep "^To: Someone Else <else@out.there>\$" patch11
+ --stdout master..side >patch11 &&
+ sed -e "/^\$/q" patch11 >hdrs11 &&
+ check_patch hdrs11 &&
+ ! grep "^To: Someone <someone@out.there>\$" hdrs11 &&
+ grep "^To: Someone Else <else@out.there>\$" hdrs11
'
test_expect_success '--no-cc overrides config.cc' '
-
git config --replace-all format.cc \
"C E Cipient <rcipient@example.com>" &&
- git format-patch --no-cc --stdout master..side |
- sed -e "/^\$/q" >patch12 &&
- check_patch patch12 &&
- ! grep "^Cc: C E Cipient <rcipient@example.com>\$" patch12
+ git format-patch --no-cc --stdout master..side >patch12 &&
+ sed -e "/^\$/q" patch12 >hdrs12 &&
+ check_patch hdrs12 &&
+ ! grep "^Cc: C E Cipient <rcipient@example.com>\$" hdrs12
'
test_expect_success '--no-add-header overrides config.headers' '
-
git config --replace-all format.headers \
"Header1: B E Cipient <rcipient@example.com>" &&
- git format-patch --no-add-header --stdout master..side |
- sed -e "/^\$/q" >patch13 &&
- check_patch patch13 &&
- ! grep "^Header1: B E Cipient <rcipient@example.com>\$" patch13
+ git format-patch --no-add-header --stdout master..side >patch13 &&
+ sed -e "/^\$/q" patch13 >hdrs13 &&
+ check_patch hdrs13 &&
+ ! grep "^Header1: B E Cipient <rcipient@example.com>\$" hdrs13
'
test_expect_success 'multiple files' '
-
rm -rf patches/ &&
git checkout side &&
git format-patch -o patches/ master &&
@@ -357,7 +332,7 @@ test_expect_success 'reroll count (-v)' '
check_threading () {
expect="$1" &&
shift &&
- (git format-patch --stdout "$@"; echo $? > status.out) |
+ git format-patch --stdout "$@" >patch &&
# Prints everything between the Message-ID and In-Reply-To,
# and replaces all Message-ID-lookalikes by a sequence number
perl -ne '
@@ -372,12 +347,11 @@ check_threading () {
print;
}
print "---\n" if /^From /i;
- ' > actual &&
- test 0 = "$(cat status.out)" &&
+ ' <patch >actual &&
test_cmp "$expect" actual
}
-cat >> expect.no-threading <<EOF
+cat >>expect.no-threading <<EOF
---
---
---
@@ -388,7 +362,7 @@ test_expect_success 'no threading' '
check_threading expect.no-threading master
'
-cat > expect.thread <<EOF
+cat >expect.thread <<EOF
---
Message-Id: <0>
---
@@ -405,7 +379,7 @@ test_expect_success 'thread' '
check_threading expect.thread --thread master
'
-cat > expect.in-reply-to <<EOF
+cat >expect.in-reply-to <<EOF
---
Message-Id: <0>
In-Reply-To: <1>
@@ -425,7 +399,7 @@ test_expect_success 'thread in-reply-to' '
--thread master
'
-cat > expect.cover-letter <<EOF
+cat >expect.cover-letter <<EOF
---
Message-Id: <0>
---
@@ -446,7 +420,7 @@ test_expect_success 'thread cover-letter' '
check_threading expect.cover-letter --cover-letter --thread master
'
-cat > expect.cl-irt <<EOF
+cat >expect.cl-irt <<EOF
---
Message-Id: <0>
In-Reply-To: <1>
@@ -478,7 +452,7 @@ test_expect_success 'thread explicit shallow' '
--in-reply-to="<test.message>" --thread=shallow master
'
-cat > expect.deep <<EOF
+cat >expect.deep <<EOF
---
Message-Id: <0>
---
@@ -496,7 +470,7 @@ test_expect_success 'thread deep' '
check_threading expect.deep --thread=deep master
'
-cat > expect.deep-irt <<EOF
+cat >expect.deep-irt <<EOF
---
Message-Id: <0>
In-Reply-To: <1>
@@ -519,7 +493,7 @@ test_expect_success 'thread deep in-reply-to' '
--in-reply-to="<test.message>" master
'
-cat > expect.deep-cl <<EOF
+cat >expect.deep-cl <<EOF
---
Message-Id: <0>
---
@@ -543,7 +517,7 @@ test_expect_success 'thread deep cover-letter' '
check_threading expect.deep-cl --cover-letter --thread=deep master
'
-cat > expect.deep-cl-irt <<EOF
+cat >expect.deep-cl-irt <<EOF
---
Message-Id: <0>
In-Reply-To: <1>
@@ -594,7 +568,6 @@ test_expect_success 'thread config + --no-thread' '
'
test_expect_success 'excessive subject' '
-
rm -rf patches/ &&
git checkout side &&
before=$(git hash-object file) &&
@@ -622,10 +595,9 @@ test_expect_success 'cover-letter inherits diff options' '
! grep "file => foo .* 0 *\$" 0000-cover-letter.patch &&
git format-patch --cover-letter -1 -M &&
grep "file => foo .* 0 *\$" 0000-cover-letter.patch
-
'
-cat > expect << EOF
+cat >expect <<EOF
This is an excessively long subject line for a message due to the
habit some projects have of not having a short, one-line subject at
the start of the commit message, but rather sticking a whole
@@ -636,14 +608,12 @@ cat > expect << EOF
EOF
test_expect_success 'shortlog of cover-letter wraps overly-long onelines' '
-
git format-patch --cover-letter -2 &&
- sed -e "1,/A U Thor/d" -e "/^\$/q" < 0000-cover-letter.patch > output &&
+ sed -e "1,/A U Thor/d" -e "/^\$/q" 0000-cover-letter.patch >output &&
test_cmp expect output
-
'
-cat > expect << EOF
+cat >expect <<EOF
index $before..$after 100644
--- a/file
+++ b/file
@@ -656,16 +626,14 @@ index $before..$after 100644
EOF
test_expect_success 'format-patch respects -U' '
-
git format-patch -U4 -2 &&
sed -e "1,/^diff/d" -e "/^+5/q" \
<0001-This-is-an-excessively-long-subject-line-for-a-messa.patch \
>output &&
test_cmp expect output
-
'
-cat > expect << EOF
+cat >expect <<EOF
diff --git a/file b/file
index $before..$after 100644
@@ -679,11 +647,9 @@ index $before..$after 100644
EOF
test_expect_success 'format-patch -p suppresses stat' '
-
git format-patch -p -2 &&
- sed -e "1,/^\$/d" -e "/^+5/q" < 0001-This-is-an-excessively-long-subject-line-for-a-messa.patch > output &&
+ sed -e "1,/^\$/d" -e "/^+5/q" 0001-This-is-an-excessively-long-subject-line-for-a-messa.patch >output &&
test_cmp expect output
-
'
test_expect_success 'format-patch from a subdirectory (1)' '
@@ -736,7 +702,7 @@ test_expect_success 'format-patch from a subdirectory (3)' '
'
test_expect_success 'format-patch --in-reply-to' '
- git format-patch -1 --stdout --in-reply-to "baz@foo.bar" > patch8 &&
+ git format-patch -1 --stdout --in-reply-to "baz@foo.bar" >patch8 &&
grep "^In-Reply-To: <baz@foo.bar>" patch8 &&
grep "^References: <baz@foo.bar>" patch8
'
@@ -827,21 +793,24 @@ test_expect_success 'format-patch with multiple notes refs' '
! grep "this is note 2" out
'
-echo "fatal: --name-only does not make sense" > expect.name-only
-echo "fatal: --name-status does not make sense" > expect.name-status
-echo "fatal: --check does not make sense" > expect.check
+echo "fatal: --name-only does not make sense" >expect.name-only
+echo "fatal: --name-status does not make sense" >expect.name-status
+echo "fatal: --check does not make sense" >expect.check
test_expect_success 'options no longer allowed for format-patch' '
- test_must_fail git format-patch --name-only 2> output &&
+ test_must_fail git format-patch --name-only 2>output &&
test_i18ncmp expect.name-only output &&
- test_must_fail git format-patch --name-status 2> output &&
+ test_must_fail git format-patch --name-status 2>output &&
test_i18ncmp expect.name-status output &&
- test_must_fail git format-patch --check 2> output &&
- test_i18ncmp expect.check output'
+ test_must_fail git format-patch --check 2>output &&
+ test_i18ncmp expect.check output
+'
test_expect_success 'format-patch --numstat should produce a patch' '
- git format-patch --numstat --stdout master..side > output &&
- test 5 = $(grep "^diff --git a/" output | wc -l)'
+ git format-patch --numstat --stdout master..side >output &&
+ grep "^diff --git a/" output >diff &&
+ test_line_count = 5 diff
+'
test_expect_success 'format-patch -- <path>' '
git format-patch master..side -- file 2>error &&
@@ -852,20 +821,25 @@ test_expect_success 'format-patch --ignore-if-in-upstream HEAD' '
git format-patch --ignore-if-in-upstream HEAD
'
-git_version="$(git --version | sed "s/.* //")"
+test_expect_success 'get git version' '
+ git_version=$(git --version) &&
+ git_version=${git_version##* }
+'
signature() {
printf "%s\n%s\n\n" "-- " "${1:-$git_version}"
}
test_expect_success 'format-patch default signature' '
- git format-patch --stdout -1 | tail -n 3 >output &&
+ git format-patch --stdout -1 >patch &&
+ tail -n 3 patch >output &&
signature >expect &&
test_cmp expect output
'
test_expect_success 'format-patch --signature' '
- git format-patch --stdout --signature="my sig" -1 | tail -n 3 >output &&
+ git format-patch --stdout --signature="my sig" -1 >patch &&
+ tail -n 3 patch >output &&
signature "my sig" >expect &&
test_cmp expect output
'
@@ -897,8 +871,8 @@ test_expect_success 'format-patch --signature --cover-letter' '
git config --unset-all format.signature &&
git format-patch --stdout --signature="my sig" --cover-letter \
-1 >output &&
- grep "my sig" output &&
- test 2 = $(grep "my sig" output | wc -l)
+ grep "my sig" output >sig &&
+ test_line_count = 2 sig
'
test_expect_success 'format.signature="" suppresses signatures' '
@@ -935,7 +909,7 @@ test_expect_success 'prepare mail-signature input' '
test_expect_success '--signature-file=file works' '
git format-patch --stdout --signature-file=mail-signature -1 >output &&
check_patch output &&
- sed -e "1,/^-- \$/d" <output >actual &&
+ sed -e "1,/^-- \$/d" output >actual &&
{
cat mail-signature && echo
} >expect &&
@@ -946,7 +920,7 @@ test_expect_success 'format.signaturefile works' '
test_config format.signaturefile mail-signature &&
git format-patch --stdout -1 >output &&
check_patch output &&
- sed -e "1,/^-- \$/d" <output >actual &&
+ sed -e "1,/^-- \$/d" output >actual &&
{
cat mail-signature && echo
} >expect &&
@@ -968,7 +942,7 @@ test_expect_success '--signature-file overrides format.signaturefile' '
git format-patch --stdout \
--signature-file=other-mail-signature -1 >output &&
check_patch output &&
- sed -e "1,/^-- \$/d" <output >actual &&
+ sed -e "1,/^-- \$/d" output >actual &&
{
cat other-mail-signature && echo
} >expect &&
@@ -1037,7 +1011,7 @@ test_expect_success 'format-patch wraps extremely long subject (ascii)' '
git add file &&
git commit -m "$M512" &&
git format-patch --stdout -1 >patch &&
- sed -n "/^Subject/p; /^ /p; /^$/q" <patch >subject &&
+ sed -n "/^Subject/p; /^ /p; /^$/q" patch >subject &&
test_cmp expect subject
'
@@ -1076,7 +1050,7 @@ test_expect_success 'format-patch wraps extremely long subject (rfc2047)' '
git add file &&
git commit -m "$M512" &&
git format-patch --stdout -1 >patch &&
- sed -n "/^Subject/p; /^ /p; /^$/q" <patch >subject &&
+ sed -n "/^Subject/p; /^ /p; /^$/q" patch >subject &&
test_cmp expect subject
'
@@ -1085,7 +1059,7 @@ check_author() {
git add file &&
GIT_AUTHOR_NAME=$1 git commit -m author-check &&
git format-patch --stdout -1 >patch &&
- sed -n "/^From: /p; /^ /p; /^$/q" <patch >actual &&
+ sed -n "/^From: /p; /^ /p; /^$/q" patch >actual &&
test_cmp expect actual
}
@@ -1205,7 +1179,7 @@ test_expect_success '--from=ident replaces author' '
From: A U Thor <author@example.com>
EOF
- sed -ne "/^From:/p; /^$/p; /^---$/q" <patch >patch.head &&
+ sed -ne "/^From:/p; /^$/p; /^---$/q" patch >patch.head &&
test_cmp expect patch.head
'
@@ -1217,7 +1191,7 @@ test_expect_success '--from uses committer ident' '
From: A U Thor <author@example.com>
EOF
- sed -ne "/^From:/p; /^$/p; /^---$/q" <patch >patch.head &&
+ sed -ne "/^From:/p; /^$/p; /^---$/q" patch >patch.head &&
test_cmp expect patch.head
'
@@ -1227,7 +1201,7 @@ test_expect_success '--from omits redundant in-body header' '
From: A U Thor <author@example.com>
EOF
- sed -ne "/^From:/p; /^$/p; /^---$/q" <patch >patch.head &&
+ sed -ne "/^From:/p; /^$/p; /^---$/q" patch >patch.head &&
test_cmp expect patch.head
'
@@ -1242,7 +1216,7 @@ test_expect_success 'in-body headers trigger content encoding' '
From: éxötìc <author@example.com>
EOF
- sed -ne "/^From:/p; /^$/p; /^Content-Type/p; /^---$/q" <patch >patch.head &&
+ sed -ne "/^From:/p; /^$/p; /^Content-Type/p; /^---$/q" patch >patch.head &&
test_cmp expect patch.head
'
@@ -1256,283 +1230,283 @@ append_signoff()
test_expect_success 'signoff: commit with no body' '
append_signoff </dev/null >actual &&
- cat <<\EOF | sed "s/EOL$//" >expected &&
-4:Subject: [PATCH] EOL
-8:
-9:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ cat <<-\EOF | sed "s/EOL$//" >expect &&
+ 4:Subject: [PATCH] EOL
+ 8:
+ 9:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: commit with only subject' '
echo subject | append_signoff >actual &&
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-9:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 9:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: commit with only subject that does not end with NL' '
printf subject | append_signoff >actual &&
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-9:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 9:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: no existing signoffs' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-body
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-10:
-11:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ body
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 10:
+ 11:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: no existing signoffs and no trailing NL' '
printf "subject\n\nbody" | append_signoff >actual &&
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-10:
-11:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 10:
+ 11:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: some random signoff' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-body
+ body
-Signed-off-by: my@house
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-10:
-11:Signed-off-by: my@house
-12:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ Signed-off-by: my@house
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 10:
+ 11:Signed-off-by: my@house
+ 12:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: misc conforming footer elements' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-body
+ body
-Signed-off-by: my@house
-(cherry picked from commit da39a3ee5e6b4b0d3255bfef95601890afd80709)
-Tested-by: Some One <someone@example.com>
-Bug: 1234
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-10:
-11:Signed-off-by: my@house
-15:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ Signed-off-by: my@house
+ (cherry picked from commit da39a3ee5e6b4b0d3255bfef95601890afd80709)
+ Tested-by: Some One <someone@example.com>
+ Bug: 1234
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 10:
+ 11:Signed-off-by: my@house
+ 15:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: some random signoff-alike' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-body
-Fooled-by-me: my@house
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-11:
-12:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ body
+ Fooled-by-me: my@house
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 11:
+ 12:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: not really a signoff' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-I want to mention about Signed-off-by: here.
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-9:I want to mention about Signed-off-by: here.
-10:
-11:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ I want to mention about Signed-off-by: here.
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 9:I want to mention about Signed-off-by: here.
+ 10:
+ 11:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: not really a signoff (2)' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-My unfortunate
-Signed-off-by: example happens to be wrapped here.
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-10:Signed-off-by: example happens to be wrapped here.
-11:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ My unfortunate
+ Signed-off-by: example happens to be wrapped here.
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 10:Signed-off-by: example happens to be wrapped here.
+ 11:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: valid S-o-b paragraph in the middle' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-Signed-off-by: my@house
-Signed-off-by: your@house
+ Signed-off-by: my@house
+ Signed-off-by: your@house
-A lot of houses.
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-9:Signed-off-by: my@house
-10:Signed-off-by: your@house
-11:
-13:
-14:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ A lot of houses.
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 9:Signed-off-by: my@house
+ 10:Signed-off-by: your@house
+ 11:
+ 13:
+ 14:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: the same signoff at the end' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-body
+ body
-Signed-off-by: C O Mitter <committer@example.com>
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-10:
-11:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 10:
+ 11:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: the same signoff at the end, no trailing NL' '
printf "subject\n\nSigned-off-by: C O Mitter <committer@example.com>" |
append_signoff >actual &&
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-9:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 9:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: the same signoff NOT at the end' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-body
+ body
-Signed-off-by: C O Mitter <committer@example.com>
-Signed-off-by: my@house
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-10:
-11:Signed-off-by: C O Mitter <committer@example.com>
-12:Signed-off-by: my@house
-EOF
- test_cmp expected actual
+ Signed-off-by: C O Mitter <committer@example.com>
+ Signed-off-by: my@house
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 10:
+ 11:Signed-off-by: C O Mitter <committer@example.com>
+ 12:Signed-off-by: my@house
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: tolerate garbage in conforming footer' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-body
+ body
-Tested-by: my@house
-Some Trash
-Signed-off-by: C O Mitter <committer@example.com>
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-10:
-13:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ Tested-by: my@house
+ Some Trash
+ Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 10:
+ 13:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: respect trailer config' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-Myfooter: x
-Some Trash
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-11:
-12:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual &&
+ Myfooter: x
+ Some Trash
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 11:
+ 12:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual &&
test_config trailer.Myfooter.ifexists add &&
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-Myfooter: x
-Some Trash
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-11:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ Myfooter: x
+ Some Trash
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 11:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'signoff: footer begins with non-signoff without @ sign' '
- append_signoff <<\EOF >actual &&
-subject
+ append_signoff <<-\EOF >actual &&
+ subject
-body
+ body
-Reviewed-id: Noone
-Tested-by: my@house
-Change-id: Ideadbeef
-Signed-off-by: C O Mitter <committer@example.com>
-Bug: 1234
-EOF
- cat >expected <<\EOF &&
-4:Subject: [PATCH] subject
-8:
-10:
-14:Signed-off-by: C O Mitter <committer@example.com>
-EOF
- test_cmp expected actual
+ Reviewed-id: Noone
+ Tested-by: my@house
+ Change-id: Ideadbeef
+ Signed-off-by: C O Mitter <committer@example.com>
+ Bug: 1234
+ EOF
+ cat >expect <<-\EOF &&
+ 4:Subject: [PATCH] subject
+ 8:
+ 10:
+ 14:Signed-off-by: C O Mitter <committer@example.com>
+ EOF
+ test_cmp expect actual
'
test_expect_success 'format patch ignores color.ui' '
@@ -1547,42 +1521,42 @@ test_expect_success 'cover letter using branch description (1)' '
git checkout rebuild-1 &&
test_config branch.rebuild-1.description hello &&
git format-patch --stdout --cover-letter master >actual &&
- grep hello actual >/dev/null
+ grep hello actual
'
test_expect_success 'cover letter using branch description (2)' '
git checkout rebuild-1 &&
test_config branch.rebuild-1.description hello &&
git format-patch --stdout --cover-letter rebuild-1~2..rebuild-1 >actual &&
- grep hello actual >/dev/null
+ grep hello actual
'
test_expect_success 'cover letter using branch description (3)' '
git checkout rebuild-1 &&
test_config branch.rebuild-1.description hello &&
git format-patch --stdout --cover-letter ^master rebuild-1 >actual &&
- grep hello actual >/dev/null
+ grep hello actual
'
test_expect_success 'cover letter using branch description (4)' '
git checkout rebuild-1 &&
test_config branch.rebuild-1.description hello &&
git format-patch --stdout --cover-letter master.. >actual &&
- grep hello actual >/dev/null
+ grep hello actual
'
test_expect_success 'cover letter using branch description (5)' '
git checkout rebuild-1 &&
test_config branch.rebuild-1.description hello &&
git format-patch --stdout --cover-letter -2 HEAD >actual &&
- grep hello actual >/dev/null
+ grep hello actual
'
test_expect_success 'cover letter using branch description (6)' '
git checkout rebuild-1 &&
test_config branch.rebuild-1.description hello &&
git format-patch --stdout --cover-letter -2 >actual &&
- grep hello actual >/dev/null
+ grep hello actual
'
test_expect_success 'cover letter with nothing' '
@@ -1636,7 +1610,9 @@ test_expect_success 'format-patch format.outputDirectory option' '
test_config format.outputDirectory patches &&
rm -fr patches &&
git format-patch master..side &&
- test $(git rev-list master..side | wc -l) -eq $(ls patches | wc -l)
+ count=$(git rev-list --count master..side) &&
+ ls patches >list &&
+ test_line_count = $count list
'
test_expect_success 'format-patch -o overrides format.outputDirectory' '
@@ -1649,20 +1625,41 @@ test_expect_success 'format-patch -o overrides format.outputDirectory' '
test_expect_success 'format-patch --base' '
git checkout patchid &&
- git format-patch --stdout --base=HEAD~3 -1 | tail -n 7 >actual1 &&
- git format-patch --stdout --base=HEAD~3 HEAD~.. | tail -n 7 >actual2 &&
- echo >expected &&
- echo "base-commit: $(git rev-parse HEAD~3)" >>expected &&
- echo "prerequisite-patch-id: $(git show --patch HEAD~2 | git patch-id --stable | awk "{print \$1}")" >>expected &&
- echo "prerequisite-patch-id: $(git show --patch HEAD~1 | git patch-id --stable | awk "{print \$1}")" >>expected &&
- signature >> expected &&
- test_cmp expected actual1 &&
- test_cmp expected actual2 &&
+
+ git format-patch --stdout --base=HEAD~3 -1 >patch &&
+ tail -n 7 patch >actual1 &&
+
+ git format-patch --stdout --base=HEAD~3 HEAD~.. >patch &&
+ tail -n 7 patch >actual2 &&
+
+ echo >expect &&
+ git rev-parse HEAD~3 >commit-id-base &&
+ echo "base-commit: $(cat commit-id-base)" >>expect &&
+
+ git show --patch HEAD~2 >patch &&
+ git patch-id --stable <patch >patch.id.raw &&
+ awk "{print \"prerequisite-patch-id:\", \$1}" <patch.id.raw >>expect &&
+
+ git show --patch HEAD~1 >patch &&
+ git patch-id --stable <patch >patch.id.raw &&
+ awk "{print \"prerequisite-patch-id:\", \$1}" <patch.id.raw >>expect &&
+
+ signature >>expect &&
+ test_cmp expect actual1 &&
+ test_cmp expect actual2 &&
+
echo >fail &&
- echo "base-commit: $(git rev-parse HEAD~3)" >>fail &&
- echo "prerequisite-patch-id: $(git show --patch HEAD~2 | git patch-id --unstable | awk "{print \$1}")" >>fail &&
- echo "prerequisite-patch-id: $(git show --patch HEAD~1 | git patch-id --unstable | awk "{print \$1}")" >>fail &&
- signature >> fail &&
+ echo "base-commit: $(cat commit-id-base)" >>fail &&
+
+ git show --patch HEAD~2 >patch &&
+ git patch-id --unstable <patch >patch.id.raw &&
+ awk "{print \"prerequisite-patch-id:\", \$1}" <patch.id.raw >>fail &&
+
+ git show --patch HEAD~1 >patch &&
+ git patch-id --unstable <patch >patch.id.raw &&
+ awk "{print \"prerequisite-patch-id:\", \$1}" <patch.id.raw >>fail &&
+
+ signature >>fail &&
! test_cmp fail actual1 &&
! test_cmp fail actual2
'
@@ -1672,8 +1669,9 @@ test_expect_success 'format-patch --base errors out when base commit is in revis
test_must_fail git format-patch --base=HEAD~1 -2 &&
git format-patch --stdout --base=HEAD~2 -2 >patch &&
grep "^base-commit:" patch >actual &&
- echo "base-commit: $(git rev-parse HEAD~2)" >expected &&
- test_cmp expected actual
+ git rev-parse HEAD~2 >commit-id-base &&
+ echo "base-commit: $(cat commit-id-base)" >expect &&
+ test_cmp expect actual
'
test_expect_success 'format-patch --base errors out when base commit is not ancestor of revision list' '
@@ -1699,8 +1697,8 @@ test_expect_success 'format-patch --base errors out when base commit is not ance
test_must_fail git format-patch --base=$(cat commit-id-Z) -3 &&
git format-patch --stdout --base=$(cat commit-id-base) -3 >patch &&
grep "^base-commit:" patch >actual &&
- echo "base-commit: $(cat commit-id-base)" >expected &&
- test_cmp expected actual
+ echo "base-commit: $(cat commit-id-base)" >expect &&
+ test_cmp expect actual
'
test_expect_success 'format-patch --base=auto' '
@@ -1711,8 +1709,9 @@ test_expect_success 'format-patch --base=auto' '
test_commit N2 &&
git format-patch --stdout --base=auto -2 >patch &&
grep "^base-commit:" patch >actual &&
- echo "base-commit: $(git rev-parse upstream)" >expected &&
- test_cmp expected actual
+ git rev-parse upstream >commit-id-base &&
+ echo "base-commit: $(cat commit-id-base)" >expect &&
+ test_cmp expect actual
'
test_expect_success 'format-patch errors out when history involves criss-cross' '
@@ -1748,8 +1747,9 @@ test_expect_success 'format-patch format.useAutoBaseoption' '
git config format.useAutoBase true &&
git format-patch --stdout -1 >patch &&
grep "^base-commit:" patch >actual &&
- echo "base-commit: $(git rev-parse upstream)" >expected &&
- test_cmp expected actual
+ git rev-parse upstream >commit-id-base &&
+ echo "base-commit: $(cat commit-id-base)" >expect &&
+ test_cmp expect actual
'
test_expect_success 'format-patch --base overrides format.useAutoBase' '
@@ -1757,8 +1757,9 @@ test_expect_success 'format-patch --base overrides format.useAutoBase' '
git config format.useAutoBase true &&
git format-patch --stdout --base=HEAD~1 -1 >patch &&
grep "^base-commit:" patch >actual &&
- echo "base-commit: $(git rev-parse HEAD~1)" >expected &&
- test_cmp expected actual
+ git rev-parse HEAD~1 >commit-id-base &&
+ echo "base-commit: $(cat commit-id-base)" >expect &&
+ test_cmp expect actual
'
test_expect_success 'format-patch --base with --attach' '
@@ -1833,7 +1834,7 @@ test_expect_success 'interdiff: cover-letter' '
git format-patch --cover-letter --interdiff=boop~2 -1 boop &&
test_i18ngrep "^Interdiff:$" 0000-cover-letter.patch &&
test_i18ngrep ! "^Interdiff:$" 0001-fleep.patch &&
- sed "1,/^@@ /d; /^-- $/q" <0000-cover-letter.patch >actual &&
+ sed "1,/^@@ /d; /^-- $/q" 0000-cover-letter.patch >actual &&
test_cmp expect actual
'
@@ -1849,7 +1850,7 @@ test_expect_success 'interdiff: solo-patch' '
EOF
git format-patch --interdiff=boop~2 -1 boop &&
test_i18ngrep "^Interdiff:$" 0001-fleep.patch &&
- sed "1,/^ @@ /d; /^$/q" <0001-fleep.patch >actual &&
+ sed "1,/^ @@ /d; /^$/q" 0001-fleep.patch >actual &&
test_cmp expect actual
'
diff --git a/t/t4018-diff-funcname.sh b/t/t4018-diff-funcname.sh
index 9261d6d3a0..6f5ef0035e 100755
--- a/t/t4018-diff-funcname.sh
+++ b/t/t4018-diff-funcname.sh
@@ -31,6 +31,7 @@ diffpatterns="
cpp
csharp
css
+ dts
fortran
fountain
golang
diff --git a/t/t4018/dts-labels b/t/t4018/dts-labels
new file mode 100644
index 0000000000..b21ef8737b
--- /dev/null
+++ b/t/t4018/dts-labels
@@ -0,0 +1,9 @@
+/ {
+ label_1: node1@ff00 {
+ label2: RIGHT {
+ vendor,some-property;
+
+ ChangeMe = <0x45-30>;
+ };
+ };
+};
diff --git a/t/t4018/dts-node-unitless b/t/t4018/dts-node-unitless
new file mode 100644
index 0000000000..c5287d9141
--- /dev/null
+++ b/t/t4018/dts-node-unitless
@@ -0,0 +1,8 @@
+/ {
+ label_1: node1 {
+ RIGHT {
+ prop-array = <1>, <4>;
+ ChangeMe = <0xffeedd00>;
+ };
+ };
+};
diff --git a/t/t4018/dts-nodes b/t/t4018/dts-nodes
new file mode 100644
index 0000000000..5a4334bb16
--- /dev/null
+++ b/t/t4018/dts-nodes
@@ -0,0 +1,8 @@
+/ {
+ label_1: node1@ff00 {
+ RIGHT@deadf00,4000 {
+ #size-cells = <1>;
+ ChangeMe = <0xffeedd00>;
+ };
+ };
+};
diff --git a/t/t4018/dts-nodes-comment1 b/t/t4018/dts-nodes-comment1
new file mode 100644
index 0000000000..559dfce9b3
--- /dev/null
+++ b/t/t4018/dts-nodes-comment1
@@ -0,0 +1,8 @@
+/ {
+ label_1: node1@ff00 {
+ RIGHT@deadf00,4000 /* &a comment */ {
+ #size-cells = <1>;
+ ChangeMe = <0xffeedd00>;
+ };
+ };
+};
diff --git a/t/t4018/dts-nodes-comment2 b/t/t4018/dts-nodes-comment2
new file mode 100644
index 0000000000..27e9718b31
--- /dev/null
+++ b/t/t4018/dts-nodes-comment2
@@ -0,0 +1,8 @@
+/ {
+ label_1: node1@ff00 {
+ RIGHT@deadf00,4000 { /* a trailing comment */
+ #size-cells = <1>;
+ ChangeMe = <0xffeedd00>;
+ };
+ };
+};
diff --git a/t/t4018/dts-reference b/t/t4018/dts-reference
new file mode 100644
index 0000000000..8f0c87d863
--- /dev/null
+++ b/t/t4018/dts-reference
@@ -0,0 +1,9 @@
+&label_1 {
+ TEST = <455>;
+};
+
+&RIGHT {
+ vendor,some-property;
+
+ ChangeMe = <0x45-30>;
+};
diff --git a/t/t4018/dts-root b/t/t4018/dts-root
new file mode 100644
index 0000000000..2ef9e6ffaa
--- /dev/null
+++ b/t/t4018/dts-root
@@ -0,0 +1,5 @@
+/RIGHT { /* Technically just supposed to be a slash */
+ #size-cells = <1>;
+
+ ChangeMe = <0xffeedd00>;
+};
diff --git a/t/t4034-diff-words.sh b/t/t4034-diff-words.sh
index 912df91226..9a93c2a3e0 100755
--- a/t/t4034-diff-words.sh
+++ b/t/t4034-diff-words.sh
@@ -303,6 +303,7 @@ test_language_driver bibtex
test_language_driver cpp
test_language_driver csharp
test_language_driver css
+test_language_driver dts
test_language_driver fortran
test_language_driver html
test_language_driver java
diff --git a/t/t4034/dts/expect b/t/t4034/dts/expect
new file mode 100644
index 0000000000..560fc99184
--- /dev/null
+++ b/t/t4034/dts/expect
@@ -0,0 +1,37 @@
+<BOLD>diff --git a/pre b/post<RESET>
+<BOLD>index b6a9051..7803aee 100644<RESET>
+<BOLD>--- a/pre<RESET>
+<BOLD>+++ b/post<RESET>
+<CYAN>@@ -1,32 +1,32 @@<RESET>
+/ {<RESET>
+ <RED>this_handle<RESET><GREEN>HANDLE_2<RESET>: <RED>node<RESET><GREEN>new-node<RESET>@<RED>f00<RESET><GREEN>eeda<RESET> {
+ compatible = "<RED>mydev<RESET><GREEN>vendor,compat<RESET>";
+ string-prop = <RED>start<RESET><GREEN>end<RESET>: "hello <RED>world!<RESET><GREEN>world?<RESET>" <RED>end<RESET><GREEN>start<RESET>: ;
+ <RED>#size-cells<RESET><GREEN>#address-cells<RESET> = <<RED>0+0<RESET><GREEN>0+40<RESET>>;
+ reg = <<RED>0xf00<RESET><GREEN>0xeeda<RESET>>;
+ prop = <<GREEN>(<RESET>1<GREEN>)<RESET>>;
+ prop = <<GREEN>(<RESET>-1e10<GREEN>)<RESET>>;
+ prop = <(!<RED>3<RESET><GREEN>1<RESET>)>;
+ prop = <(~<RED>3<RESET><GREEN>1<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>*<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>&<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>*<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>/<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>%<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3+4<RESET><GREEN>1+2<RESET>)>;
+ prop = <(<RED>3-4<RESET><GREEN>1-2<RESET>)>;
+ prop = /bits/ <RED>64<RESET><GREEN>32<RESET> <(<RED>3<RESET><GREEN>1<RESET><<<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>>><RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>&<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>^<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>|<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>&&<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>3<RESET><GREEN>1<RESET>||<RED>4<RESET><GREEN>2<RESET>)>;
+ prop = <(<RED>4?5<RESET><GREEN>1?2<RESET>:3)>;
+ list = <&<RED>this_handle<RESET><GREEN>HANDLE_2<RESET>>, <0 0 0 <RED>0<RESET><GREEN>1<RESET>>;
+ };<RESET>
+
+ &<RED>phandle<RESET><GREEN>phandle2<RESET> {
+ <RED>pre-phandle<RESET><GREEN>prop_handle<RESET> = <&<RED>this_handle<RESET><GREEN>HANDLE_2<RESET>>;
+ };<RESET>
+};<RESET>
diff --git a/t/t4034/dts/post b/t/t4034/dts/post
new file mode 100644
index 0000000000..7803aee280
--- /dev/null
+++ b/t/t4034/dts/post
@@ -0,0 +1,32 @@
+/ {
+ HANDLE_2: new-node@eeda {
+ compatible = "vendor,compat";
+ string-prop = end: "hello world?" start: ;
+ #address-cells = <0+40>;
+ reg = <0xeeda>;
+ prop = <(1)>;
+ prop = <(-1e10)>;
+ prop = <(!1)>;
+ prop = <(~1)>;
+ prop = <(1*2)>;
+ prop = <(1&2)>;
+ prop = <(1*2)>;
+ prop = <(1/2)>;
+ prop = <(1%2)>;
+ prop = <(1+2)>;
+ prop = <(1-2)>;
+ prop = /bits/ 32 <(1<<2)>;
+ prop = <(1>>2)>;
+ prop = <(1&2)>;
+ prop = <(1^2)>;
+ prop = <(1|2)>;
+ prop = <(1&&2)>;
+ prop = <(1||2)>;
+ prop = <(1?2:3)>;
+ list = <&HANDLE_2>, <0 0 0 1>;
+ };
+
+ &phandle2 {
+ prop_handle = <&HANDLE_2>;
+ };
+};
diff --git a/t/t4034/dts/pre b/t/t4034/dts/pre
new file mode 100644
index 0000000000..b6a905113c
--- /dev/null
+++ b/t/t4034/dts/pre
@@ -0,0 +1,32 @@
+/ {
+ this_handle: node@f00 {
+ compatible = "mydev";
+ string-prop = start: "hello world!" end: ;
+ #size-cells = <0+0>;
+ reg = <0xf00>;
+ prop = <1>;
+ prop = <-1e10>;
+ prop = <(!3)>;
+ prop = <(~3)>;
+ prop = <(3*4)>;
+ prop = <(3&4)>;
+ prop = <(3*4)>;
+ prop = <(3/4)>;
+ prop = <(3%4)>;
+ prop = <(3+4)>;
+ prop = <(3-4)>;
+ prop = /bits/ 64 <(3<<4)>;
+ prop = <(3>>4)>;
+ prop = <(3&4)>;
+ prop = <(3^4)>;
+ prop = <(3|4)>;
+ prop = <(3&&4)>;
+ prop = <(3||4)>;
+ prop = <(4?5:3)>;
+ list = <&this_handle>, <0 0 0 0>;
+ };
+
+ &phandle {
+ pre-phandle = <&this_handle>;
+ };
+};
diff --git a/t/t4038-diff-combined.sh b/t/t4038-diff-combined.sh
index d4afe12554..b9d876efa2 100755
--- a/t/t4038-diff-combined.sh
+++ b/t/t4038-diff-combined.sh
@@ -509,7 +509,7 @@ test_expect_success FUNNYNAMES '--combined-all-paths and --raw and funny names'
test_expect_success FUNNYNAMES '--combined-all-paths and --raw -and -z and funny names' '
printf "aaf8087c3cbd4db8e185a2d074cf27c53cfb75d7\0::100644 100644 100644 f00c965d8307308469e537302baa73048488f162 088bd5d92c2a8e0203ca8e7e4c2a5c692f6ae3f7 333b9c62519f285e1854830ade0fe1ef1d40ee1b RR\0file\twith\ttabs\0i\tam\ttabbed\0fickle\tnaming\0" >expect &&
git diff-tree -c -M --raw --combined-all-paths -z HEAD >actual &&
- test_cmp -a expect actual
+ test_cmp expect actual
'
test_expect_success FUNNYNAMES '--combined-all-paths and --cc and funny names' '
diff --git a/t/t4067-diff-partial-clone.sh b/t/t4067-diff-partial-clone.sh
index 90c8fb2901..4831ad35e6 100755
--- a/t/t4067-diff-partial-clone.sh
+++ b/t/t4067-diff-partial-clone.sh
@@ -75,6 +75,37 @@ test_expect_success 'diff skips same-OID blobs' '
! grep "want $(cat hash-b)" trace
'
+test_expect_success 'when fetching missing objects, diff skips GITLINKs' '
+ test_when_finished "rm -rf sub server client trace" &&
+
+ test_create_repo sub &&
+ test_commit -C sub first &&
+
+ test_create_repo server &&
+ echo a >server/a &&
+ git -C server add a &&
+ git -C server submodule add "file://$(pwd)/sub" &&
+ git -C server commit -m x &&
+
+ test_commit -C server/sub second &&
+ echo another-a >server/a &&
+ git -C server add a sub &&
+ git -C server commit -m x &&
+
+ test_config -C server uploadpack.allowfilter 1 &&
+ test_config -C server uploadpack.allowanysha1inwant 1 &&
+ git clone --bare --filter=blob:limit=0 "file://$(pwd)/server" client &&
+
+ echo a | git hash-object --stdin >hash-old-a &&
+ echo another-a | git hash-object --stdin >hash-new-a &&
+
+ # Ensure that a and another-a are fetched, and check (by successful
+ # execution of the diff) that no invalid OIDs are sent.
+ GIT_TRACE_PACKET="$(pwd)/trace" git -C client diff HEAD^ HEAD &&
+ grep "want $(cat hash-old-a)" trace &&
+ grep "want $(cat hash-new-a)" trace
+'
+
test_expect_success 'diff with rename detection batches blobs' '
test_when_finished "rm -rf server client trace" &&
diff --git a/t/t4150-am.sh b/t/t4150-am.sh
index 3f7f750cc8..4f1e24ecbe 100755
--- a/t/t4150-am.sh
+++ b/t/t4150-am.sh
@@ -1061,4 +1061,56 @@ test_expect_success 'am --quit keeps HEAD where it is' '
test_cmp expected actual
'
+test_expect_success 'am and .gitattibutes' '
+ test_create_repo attributes &&
+ (
+ cd attributes &&
+ test_commit init &&
+ git config filter.test.clean "sed -e '\''s/smudged/clean/g'\''" &&
+ git config filter.test.smudge "sed -e '\''s/clean/smudged/g'\''" &&
+
+ test_commit second &&
+ git checkout -b test HEAD^ &&
+
+ echo "*.txt filter=test conflict-marker-size=10" >.gitattributes &&
+ git add .gitattributes &&
+ test_commit third &&
+
+ echo "This text is smudged." >a.txt &&
+ git add a.txt &&
+ test_commit fourth &&
+
+ git checkout -b removal HEAD^ &&
+ git rm .gitattributes &&
+ git add -u &&
+ test_commit fifth &&
+ git cherry-pick test &&
+
+ git checkout -b conflict third &&
+ echo "This text is different." >a.txt &&
+ git add a.txt &&
+ test_commit sixth &&
+
+ git checkout test &&
+ git format-patch --stdout master..HEAD >patches &&
+ git reset --hard master &&
+ git am patches &&
+ grep "smudged" a.txt &&
+
+ git checkout removal &&
+ git reset --hard &&
+ git format-patch --stdout master..HEAD >patches &&
+ git reset --hard master &&
+ git am patches &&
+ grep "clean" a.txt &&
+
+ git checkout conflict &&
+ git reset --hard &&
+ git format-patch --stdout master..HEAD >patches &&
+ git reset --hard fourth &&
+ test_must_fail git am -3 patches &&
+ grep "<<<<<<<<<<" a.txt
+ )
+'
+
test_done
diff --git a/t/t4202-log.sh b/t/t4202-log.sh
index c20209324c..e803ba402e 100755
--- a/t/t4202-log.sh
+++ b/t/t4202-log.sh
@@ -837,6 +837,21 @@ test_expect_success 'decorate-refs and decorate-refs-exclude' '
test_cmp expect.decorate actual
'
+test_expect_success 'decorate-refs-exclude and simplify-by-decoration' '
+ cat >expect.decorate <<-\EOF &&
+ Merge-tag-reach (HEAD -> master)
+ reach (tag: reach, reach)
+ seventh (tag: seventh)
+ Merge-branch-tangle
+ Merge-branch-side-early-part-into-tangle (tangle)
+ tangle-a (tag: tangle-a)
+ EOF
+ git log -n6 --decorate=short --pretty="tformat:%f%d" \
+ --decorate-refs-exclude="*octopus*" \
+ --simplify-by-decoration >actual &&
+ test_cmp expect.decorate actual
+'
+
test_expect_success 'log.decorate config parsing' '
git log --oneline --decorate=full >expect.full &&
git log --oneline --decorate=short >expect.short &&
@@ -1707,4 +1722,11 @@ test_expect_success '--exclude-promisor-objects does not BUG-crash' '
test_must_fail git log --exclude-promisor-objects source-a
'
+test_expect_success 'log --end-of-options' '
+ git update-ref refs/heads/--source HEAD &&
+ git log --end-of-options --source >actual &&
+ git log >expect &&
+ test_cmp expect actual
+'
+
test_done
diff --git a/t/t4210-log-i18n.sh b/t/t4210-log-i18n.sh
index 7c519436ef..6e61f57f09 100755
--- a/t/t4210-log-i18n.sh
+++ b/t/t4210-log-i18n.sh
@@ -1,12 +1,15 @@
#!/bin/sh
test_description='test log with i18n features'
-. ./test-lib.sh
+. ./lib-gettext.sh
# two forms of é
utf8_e=$(printf '\303\251')
latin1_e=$(printf '\351')
+# invalid UTF-8
+invalid_e=$(printf '\303\50)') # ")" at end to close opening "("
+
test_expect_success 'create commits in different encodings' '
test_tick &&
cat >msg <<-EOF &&
@@ -48,9 +51,43 @@ test_expect_success !MINGW 'log --grep does not find non-reencoded values (utf8)
test_must_be_empty actual
'
-test_expect_success 'log --grep does not find non-reencoded values (latin1)' '
+test_expect_success !MINGW 'log --grep does not find non-reencoded values (latin1)' '
git log --encoding=ISO-8859-1 --format=%s --grep=$utf8_e >actual &&
test_must_be_empty actual
'
+for engine in fixed basic extended perl
+do
+ prereq=
+ if test $engine = "perl"
+ then
+ prereq="PCRE"
+ else
+ prereq=""
+ fi
+ force_regex=
+ if test $engine != "fixed"
+ then
+ force_regex=.*
+ fi
+ test_expect_success !MINGW,GETTEXT_LOCALE,$prereq "-c grep.patternType=$engine log --grep does not find non-reencoded values (latin1 + locale)" "
+ cat >expect <<-\EOF &&
+ latin1
+ utf8
+ EOF
+ LC_ALL=\"$is_IS_locale\" git -c grep.patternType=$engine log --encoding=ISO-8859-1 --format=%s --grep=\"$force_regex$latin1_e\" >actual &&
+ test_cmp expect actual
+ "
+
+ test_expect_success !MINGW,GETTEXT_LOCALE,$prereq "-c grep.patternType=$engine log --grep does not find non-reencoded values (latin1 + locale)" "
+ LC_ALL=\"$is_IS_locale\" git -c grep.patternType=$engine log --encoding=ISO-8859-1 --format=%s --grep=\"$force_regex$utf8_e\" >actual &&
+ test_must_be_empty actual
+ "
+
+ test_expect_success !MINGW,GETTEXT_LOCALE,$prereq "-c grep.patternType=$engine log --grep does not die on invalid UTF-8 value (latin1 + locale + invalid needle)" "
+ LC_ALL=\"$is_IS_locale\" git -c grep.patternType=$engine log --encoding=ISO-8859-1 --format=%s --grep=\"$force_regex$invalid_e\" >actual &&
+ test_must_be_empty actual
+ "
+done
+
test_done
diff --git a/t/t4211-line-log.sh b/t/t4211-line-log.sh
index 1db7bd0f59..8319163744 100755
--- a/t/t4211-line-log.sh
+++ b/t/t4211-line-log.sh
@@ -132,4 +132,86 @@ test_expect_success '--raw is forbidden' '
test_must_fail git log -L1,24:b.c --raw
'
+test_expect_success 'setup for checking fancy rename following' '
+ git checkout --orphan moves-start &&
+ git reset --hard &&
+
+ printf "%s\n" 12 13 14 15 b c d e >file-1 &&
+ printf "%s\n" 22 23 24 25 B C D E >file-2 &&
+ git add file-1 file-2 &&
+ test_tick &&
+ git commit -m "Add file-1 and file-2" &&
+ oid_add_f1_f2=$(git rev-parse --short HEAD) &&
+
+ git checkout -b moves-main &&
+ printf "%s\n" 11 12 13 14 15 b c d e >file-1 &&
+ git commit -a -m "Modify file-1 on main" &&
+ oid_mod_f1_main=$(git rev-parse --short HEAD) &&
+
+ printf "%s\n" 21 22 23 24 25 B C D E >file-2 &&
+ git commit -a -m "Modify file-2 on main #1" &&
+ oid_mod_f2_main_1=$(git rev-parse --short HEAD) &&
+
+ git mv file-1 renamed-1 &&
+ git commit -m "Rename file-1 to renamed-1 on main" &&
+
+ printf "%s\n" 11 12 13 14 15 b c d e f >renamed-1 &&
+ git commit -a -m "Modify renamed-1 on main" &&
+ oid_mod_r1_main=$(git rev-parse --short HEAD) &&
+
+ printf "%s\n" 21 22 23 24 25 B C D E F >file-2 &&
+ git commit -a -m "Modify file-2 on main #2" &&
+ oid_mod_f2_main_2=$(git rev-parse --short HEAD) &&
+
+ git checkout -b moves-side moves-start &&
+ printf "%s\n" 12 13 14 15 16 b c d e >file-1 &&
+ git commit -a -m "Modify file-1 on side #1" &&
+ oid_mod_f1_side_1=$(git rev-parse --short HEAD) &&
+
+ printf "%s\n" 22 23 24 25 26 B C D E >file-2 &&
+ git commit -a -m "Modify file-2 on side" &&
+ oid_mod_f2_side=$(git rev-parse --short HEAD) &&
+
+ git mv file-2 renamed-2 &&
+ git commit -m "Rename file-2 to renamed-2 on side" &&
+
+ printf "%s\n" 12 13 14 15 16 a b c d e >file-1 &&
+ git commit -a -m "Modify file-1 on side #2" &&
+ oid_mod_f1_side_2=$(git rev-parse --short HEAD) &&
+
+ printf "%s\n" 22 23 24 25 26 A B C D E >renamed-2 &&
+ git commit -a -m "Modify renamed-2 on side" &&
+ oid_mod_r2_side=$(git rev-parse --short HEAD) &&
+
+ git checkout moves-main &&
+ git merge moves-side &&
+ oid_merge=$(git rev-parse --short HEAD)
+'
+
+test_expect_success 'fancy rename following #1' '
+ cat >expect <<-EOF &&
+ $oid_merge Merge branch '\''moves-side'\'' into moves-main
+ $oid_mod_f1_side_2 Modify file-1 on side #2
+ $oid_mod_f1_side_1 Modify file-1 on side #1
+ $oid_mod_r1_main Modify renamed-1 on main
+ $oid_mod_f1_main Modify file-1 on main
+ $oid_add_f1_f2 Add file-1 and file-2
+ EOF
+ git log -L1:renamed-1 --oneline --no-patch >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'fancy rename following #2' '
+ cat >expect <<-EOF &&
+ $oid_merge Merge branch '\''moves-side'\'' into moves-main
+ $oid_mod_r2_side Modify renamed-2 on side
+ $oid_mod_f2_side Modify file-2 on side
+ $oid_mod_f2_main_2 Modify file-2 on main #2
+ $oid_mod_f2_main_1 Modify file-2 on main #1
+ $oid_add_f1_f2 Add file-1 and file-2
+ EOF
+ git log -L1:renamed-2 --oneline --no-patch >actual &&
+ test_cmp expect actual
+'
+
test_done
diff --git a/t/t4214-log-graph-octopus.sh b/t/t4214-log-graph-octopus.sh
index dab96c89aa..3ae8e51e50 100755
--- a/t/t4214-log-graph-octopus.sh
+++ b/t/t4214-log-graph-octopus.sh
@@ -5,6 +5,25 @@ test_description='git log --graph of skewed left octopus merge.'
. ./test-lib.sh
test_expect_success 'set up merge history' '
+ test_commit initial &&
+ for i in 1 2 3 4 ; do
+ git checkout master -b $i || return $?
+ # Make tag name different from branch name, to avoid
+ # ambiguity error when calling checkout.
+ test_commit $i $i $i tag$i || return $?
+ done &&
+ git checkout 1 -b merge &&
+ test_merge octopus-merge 1 2 3 4 &&
+ test_commit after-merge &&
+ git checkout 1 -b L &&
+ test_commit left &&
+ git checkout 4 -b crossover &&
+ test_commit after-4 &&
+ git checkout initial -b more-L &&
+ test_commit after-initial
+'
+
+test_expect_success 'log --graph with tricky octopus merge, no color' '
cat >expect.uncolored <<-\EOF &&
* left
| *---. octopus-merge
@@ -19,6 +38,13 @@ test_expect_success 'set up merge history' '
|/
* initial
EOF
+ git log --color=never --graph --date-order --pretty=tformat:%s left octopus-merge >actual.raw &&
+ sed "s/ *\$//" actual.raw >actual &&
+ test_cmp expect.uncolored actual
+'
+
+test_expect_success 'log --graph with tricky octopus merge with colors' '
+ test_config log.graphColors red,green,yellow,blue,magenta,cyan &&
cat >expect.colors <<-\EOF &&
* left
<RED>|<RESET> *<BLUE>-<RESET><BLUE>-<RESET><MAGENTA>-<RESET><MAGENTA>.<RESET> octopus-merge
@@ -33,33 +59,11 @@ test_expect_success 'set up merge history' '
<MAGENTA>|<RESET><MAGENTA>/<RESET>
* initial
EOF
- test_commit initial &&
- for i in 1 2 3 4 ; do
- git checkout master -b $i || return $?
- # Make tag name different from branch name, to avoid
- # ambiguity error when calling checkout.
- test_commit $i $i $i tag$i || return $?
- done &&
- git checkout 1 -b merge &&
- test_tick &&
- git merge -m octopus-merge 1 2 3 4 &&
- git checkout 1 -b L &&
- test_commit left
-'
-
-test_expect_success 'log --graph with tricky octopus merge with colors' '
- test_config log.graphColors red,green,yellow,blue,magenta,cyan &&
- git log --color=always --graph --date-order --pretty=tformat:%s --all >actual.colors.raw &&
+ git log --color=always --graph --date-order --pretty=tformat:%s left octopus-merge >actual.colors.raw &&
test_decode_color <actual.colors.raw | sed "s/ *\$//" >actual.colors &&
test_cmp expect.colors actual.colors
'
-test_expect_success 'log --graph with tricky octopus merge, no color' '
- git log --color=never --graph --date-order --pretty=tformat:%s --all >actual.raw &&
- sed "s/ *\$//" actual.raw >actual &&
- test_cmp expect.uncolored actual
-'
-
# Repeat the previous two tests with "normal" octopus merge (i.e.,
# without the first parent skewing to the "left" branch column).
@@ -76,7 +80,7 @@ test_expect_success 'log --graph with normal octopus merge, no color' '
|/
* initial
EOF
- git log --color=never --graph --date-order --pretty=tformat:%s merge >actual.raw &&
+ git log --color=never --graph --date-order --pretty=tformat:%s octopus-merge >actual.raw &&
sed "s/ *\$//" actual.raw >actual &&
test_cmp expect.uncolored actual
'
@@ -95,8 +99,283 @@ test_expect_success 'log --graph with normal octopus merge with colors' '
* initial
EOF
test_config log.graphColors red,green,yellow,blue,magenta,cyan &&
- git log --color=always --graph --date-order --pretty=tformat:%s merge >actual.colors.raw &&
+ git log --color=always --graph --date-order --pretty=tformat:%s octopus-merge >actual.colors.raw &&
+ test_decode_color <actual.colors.raw | sed "s/ *\$//" >actual.colors &&
+ test_cmp expect.colors actual.colors
+'
+
+test_expect_success 'log --graph with normal octopus merge and child, no color' '
+ cat >expect.uncolored <<-\EOF &&
+ * after-merge
+ *---. octopus-merge
+ |\ \ \
+ | | | * 4
+ | | * | 3
+ | | |/
+ | * | 2
+ | |/
+ * | 1
+ |/
+ * initial
+ EOF
+ git log --color=never --graph --date-order --pretty=tformat:%s after-merge >actual.raw &&
+ sed "s/ *\$//" actual.raw >actual &&
+ test_cmp expect.uncolored actual
+'
+
+test_expect_failure 'log --graph with normal octopus and child merge with colors' '
+ cat >expect.colors <<-\EOF &&
+ * after-merge
+ *<BLUE>-<RESET><BLUE>-<RESET><MAGENTA>-<RESET><MAGENTA>.<RESET> octopus-merge
+ <GREEN>|<RESET><YELLOW>\<RESET> <BLUE>\<RESET> <MAGENTA>\<RESET>
+ <GREEN>|<RESET> <YELLOW>|<RESET> <BLUE>|<RESET> * 4
+ <GREEN>|<RESET> <YELLOW>|<RESET> * <MAGENTA>|<RESET> 3
+ <GREEN>|<RESET> <YELLOW>|<RESET> <MAGENTA>|<RESET><MAGENTA>/<RESET>
+ <GREEN>|<RESET> * <MAGENTA>|<RESET> 2
+ <GREEN>|<RESET> <MAGENTA>|<RESET><MAGENTA>/<RESET>
+ * <MAGENTA>|<RESET> 1
+ <MAGENTA>|<RESET><MAGENTA>/<RESET>
+ * initial
+ EOF
+ test_config log.graphColors red,green,yellow,blue,magenta,cyan &&
+ git log --color=always --graph --date-order --pretty=tformat:%s after-merge >actual.colors.raw &&
+ test_decode_color <actual.colors.raw | sed "s/ *\$//" >actual.colors &&
+ test_cmp expect.colors actual.colors
+'
+
+test_expect_success 'log --graph with tricky octopus merge and its child, no color' '
+ cat >expect.uncolored <<-\EOF &&
+ * left
+ | * after-merge
+ | *---. octopus-merge
+ | |\ \ \
+ |/ / / /
+ | | | * 4
+ | | * | 3
+ | | |/
+ | * | 2
+ | |/
+ * | 1
+ |/
+ * initial
+ EOF
+ git log --color=never --graph --date-order --pretty=tformat:%s left after-merge >actual.raw &&
+ sed "s/ *\$//" actual.raw >actual &&
+ test_cmp expect.uncolored actual
+'
+
+test_expect_failure 'log --graph with tricky octopus merge and its child with colors' '
+ test_config log.graphColors red,green,yellow,blue,magenta,cyan &&
+ cat >expect.colors <<-\EOF &&
+ * left
+ <RED>|<RESET> * after-merge
+ <RED>|<RESET> *<MAGENTA>-<RESET><MAGENTA>-<RESET><CYAN>-<RESET><CYAN>.<RESET> octopus-merge
+ <RED>|<RESET> <RED>|<RESET><BLUE>\<RESET> <MAGENTA>\<RESET> <CYAN>\<RESET>
+ <RED>|<RESET><RED>/<RESET> <BLUE>/<RESET> <MAGENTA>/<RESET> <CYAN>/<RESET>
+ <RED>|<RESET> <BLUE>|<RESET> <MAGENTA>|<RESET> * 4
+ <RED>|<RESET> <BLUE>|<RESET> * <CYAN>|<RESET> 3
+ <RED>|<RESET> <BLUE>|<RESET> <CYAN>|<RESET><CYAN>/<RESET>
+ <RED>|<RESET> * <CYAN>|<RESET> 2
+ <RED>|<RESET> <CYAN>|<RESET><CYAN>/<RESET>
+ * <CYAN>|<RESET> 1
+ <CYAN>|<RESET><CYAN>/<RESET>
+ * initial
+ EOF
+ git log --color=always --graph --date-order --pretty=tformat:%s left after-merge >actual.colors.raw &&
+ test_decode_color <actual.colors.raw | sed "s/ *\$//" >actual.colors &&
+ test_cmp expect.colors actual.colors
+'
+
+test_expect_success 'log --graph with crossover in octopus merge, no color' '
+ cat >expect.uncolored <<-\EOF &&
+ * after-4
+ | *---. octopus-merge
+ | |\ \ \
+ | |_|_|/
+ |/| | |
+ * | | | 4
+ | | | * 3
+ | |_|/
+ |/| |
+ | | * 2
+ | |/
+ |/|
+ | * 1
+ |/
+ * initial
+ EOF
+ git log --color=never --graph --date-order --pretty=tformat:%s after-4 octopus-merge >actual.raw &&
+ sed "s/ *\$//" actual.raw >actual &&
+ test_cmp expect.uncolored actual
+'
+
+test_expect_failure 'log --graph with crossover in octopus merge with colors' '
+ test_config log.graphColors red,green,yellow,blue,magenta,cyan &&
+ cat >expect.colors <<-\EOF &&
+ * after-4
+ <RED>|<RESET> *<BLUE>-<RESET><BLUE>-<RESET><RED>-<RESET><RED>.<RESET> octopus-merge
+ <RED>|<RESET> <GREEN>|<RESET><YELLOW>\<RESET> <BLUE>\<RESET> <RED>\<RESET>
+ <RED>|<RESET> <GREEN>|<RESET><RED>_<RESET><YELLOW>|<RESET><RED>_<RESET><BLUE>|<RESET><RED>/<RESET>
+ <RED>|<RESET><RED>/<RESET><GREEN>|<RESET> <YELLOW>|<RESET> <BLUE>|<RESET>
+ * <GREEN>|<RESET> <YELLOW>|<RESET> <BLUE>|<RESET> 4
+ <MAGENTA>|<RESET> <GREEN>|<RESET> <YELLOW>|<RESET> * 3
+ <MAGENTA>|<RESET> <GREEN>|<RESET><MAGENTA>_<RESET><YELLOW>|<RESET><MAGENTA>/<RESET>
+ <MAGENTA>|<RESET><MAGENTA>/<RESET><GREEN>|<RESET> <YELLOW>|<RESET>
+ <MAGENTA>|<RESET> <GREEN>|<RESET> * 2
+ <MAGENTA>|<RESET> <GREEN>|<RESET><MAGENTA>/<RESET>
+ <MAGENTA>|<RESET><MAGENTA>/<RESET><GREEN>|<RESET>
+ <MAGENTA>|<RESET> * 1
+ <MAGENTA>|<RESET><MAGENTA>/<RESET>
+ * initial
+ EOF
+ git log --color=always --graph --date-order --pretty=tformat:%s after-4 octopus-merge >actual.colors.raw &&
+ test_decode_color <actual.colors.raw | sed "s/ *\$//" >actual.colors &&
+ test_cmp expect.colors actual.colors
+'
+
+test_expect_success 'log --graph with crossover in octopus merge and its child, no color' '
+ cat >expect.uncolored <<-\EOF &&
+ * after-4
+ | * after-merge
+ | *---. octopus-merge
+ | |\ \ \
+ | |_|_|/
+ |/| | |
+ * | | | 4
+ | | | * 3
+ | |_|/
+ |/| |
+ | | * 2
+ | |/
+ |/|
+ | * 1
+ |/
+ * initial
+ EOF
+ git log --color=never --graph --date-order --pretty=tformat:%s after-4 after-merge >actual.raw &&
+ sed "s/ *\$//" actual.raw >actual &&
+ test_cmp expect.uncolored actual
+'
+
+test_expect_failure 'log --graph with crossover in octopus merge and its child with colors' '
+ test_config log.graphColors red,green,yellow,blue,magenta,cyan &&
+ cat >expect.colors <<-\EOF &&
+ * after-4
+ <RED>|<RESET> * after-merge
+ <RED>|<RESET> *<MAGENTA>-<RESET><MAGENTA>-<RESET><RED>-<RESET><RED>.<RESET> octopus-merge
+ <RED>|<RESET> <YELLOW>|<RESET><BLUE>\<RESET> <MAGENTA>\<RESET> <RED>\<RESET>
+ <RED>|<RESET> <YELLOW>|<RESET><RED>_<RESET><BLUE>|<RESET><RED>_<RESET><MAGENTA>|<RESET><RED>/<RESET>
+ <RED>|<RESET><RED>/<RESET><YELLOW>|<RESET> <BLUE>|<RESET> <MAGENTA>|<RESET>
+ * <YELLOW>|<RESET> <BLUE>|<RESET> <MAGENTA>|<RESET> 4
+ <CYAN>|<RESET> <YELLOW>|<RESET> <BLUE>|<RESET> * 3
+ <CYAN>|<RESET> <YELLOW>|<RESET><CYAN>_<RESET><BLUE>|<RESET><CYAN>/<RESET>
+ <CYAN>|<RESET><CYAN>/<RESET><YELLOW>|<RESET> <BLUE>|<RESET>
+ <CYAN>|<RESET> <YELLOW>|<RESET> * 2
+ <CYAN>|<RESET> <YELLOW>|<RESET><CYAN>/<RESET>
+ <CYAN>|<RESET><CYAN>/<RESET><YELLOW>|<RESET>
+ <CYAN>|<RESET> * 1
+ <CYAN>|<RESET><CYAN>/<RESET>
+ * initial
+ EOF
+ git log --color=always --graph --date-order --pretty=tformat:%s after-4 after-merge >actual.colors.raw &&
+ test_decode_color <actual.colors.raw | sed "s/ *\$//" >actual.colors &&
+ test_cmp expect.colors actual.colors
+'
+
+test_expect_success 'log --graph with unrelated commit and octopus tip, no color' '
+ cat >expect.uncolored <<-\EOF &&
+ * after-initial
+ | *---. octopus-merge
+ | |\ \ \
+ | | | | * 4
+ | |_|_|/
+ |/| | |
+ | | | * 3
+ | |_|/
+ |/| |
+ | | * 2
+ | |/
+ |/|
+ | * 1
+ |/
+ * initial
+ EOF
+ git log --color=never --graph --date-order --pretty=tformat:%s after-initial octopus-merge >actual.raw &&
+ sed "s/ *\$//" actual.raw >actual &&
+ test_cmp expect.uncolored actual
+'
+
+test_expect_success 'log --graph with unrelated commit and octopus tip with colors' '
+ test_config log.graphColors red,green,yellow,blue,magenta,cyan &&
+ cat >expect.colors <<-\EOF &&
+ * after-initial
+ <RED>|<RESET> *<BLUE>-<RESET><BLUE>-<RESET><MAGENTA>-<RESET><MAGENTA>.<RESET> octopus-merge
+ <RED>|<RESET> <GREEN>|<RESET><YELLOW>\<RESET> <BLUE>\<RESET> <MAGENTA>\<RESET>
+ <RED>|<RESET> <GREEN>|<RESET> <YELLOW>|<RESET> <BLUE>|<RESET> * 4
+ <RED>|<RESET> <GREEN>|<RESET><RED>_<RESET><YELLOW>|<RESET><RED>_<RESET><BLUE>|<RESET><RED>/<RESET>
+ <RED>|<RESET><RED>/<RESET><GREEN>|<RESET> <YELLOW>|<RESET> <BLUE>|<RESET>
+ <RED>|<RESET> <GREEN>|<RESET> <YELLOW>|<RESET> * 3
+ <RED>|<RESET> <GREEN>|<RESET><RED>_<RESET><YELLOW>|<RESET><RED>/<RESET>
+ <RED>|<RESET><RED>/<RESET><GREEN>|<RESET> <YELLOW>|<RESET>
+ <RED>|<RESET> <GREEN>|<RESET> * 2
+ <RED>|<RESET> <GREEN>|<RESET><RED>/<RESET>
+ <RED>|<RESET><RED>/<RESET><GREEN>|<RESET>
+ <RED>|<RESET> * 1
+ <RED>|<RESET><RED>/<RESET>
+ * initial
+ EOF
+ git log --color=always --graph --date-order --pretty=tformat:%s after-initial octopus-merge >actual.colors.raw &&
+ test_decode_color <actual.colors.raw | sed "s/ *\$//" >actual.colors &&
+ test_cmp expect.colors actual.colors
+'
+
+test_expect_success 'log --graph with unrelated commit and octopus child, no color' '
+ cat >expect.uncolored <<-\EOF &&
+ * after-initial
+ | * after-merge
+ | *---. octopus-merge
+ | |\ \ \
+ | | | | * 4
+ | |_|_|/
+ |/| | |
+ | | | * 3
+ | |_|/
+ |/| |
+ | | * 2
+ | |/
+ |/|
+ | * 1
+ |/
+ * initial
+ EOF
+ git log --color=never --graph --date-order --pretty=tformat:%s after-initial after-merge >actual.raw &&
+ sed "s/ *\$//" actual.raw >actual &&
+ test_cmp expect.uncolored actual
+'
+
+test_expect_failure 'log --graph with unrelated commit and octopus child with colors' '
+ test_config log.graphColors red,green,yellow,blue,magenta,cyan &&
+ cat >expect.colors <<-\EOF &&
+ * after-initial
+ <RED>|<RESET> * after-merge
+ <RED>|<RESET> *<MAGENTA>-<RESET><MAGENTA>-<RESET><CYAN>-<RESET><CYAN>.<RESET> octopus-merge
+ <RED>|<RESET> <YELLOW>|<RESET><BLUE>\<RESET> <MAGENTA>\<RESET> <CYAN>\<RESET>
+ <RED>|<RESET> <YELLOW>|<RESET> <BLUE>|<RESET> <MAGENTA>|<RESET> * 4
+ <RED>|<RESET> <YELLOW>|<RESET><RED>_<RESET><BLUE>|<RESET><RED>_<RESET><MAGENTA>|<RESET><RED>/<RESET>
+ <RED>|<RESET><RED>/<RESET><YELLOW>|<RESET> <BLUE>|<RESET> <MAGENTA>|<RESET>
+ <RED>|<RESET> <YELLOW>|<RESET> <BLUE>|<RESET> * 3
+ <RED>|<RESET> <YELLOW>|<RESET><RED>_<RESET><BLUE>|<RESET><RED>/<RESET>
+ <RED>|<RESET><RED>/<RESET><YELLOW>|<RESET> <BLUE>|<RESET>
+ <RED>|<RESET> <YELLOW>|<RESET> * 2
+ <RED>|<RESET> <YELLOW>|<RESET><RED>/<RESET>
+ <RED>|<RESET><RED>/<RESET><YELLOW>|<RESET>
+ <RED>|<RESET> * 1
+ <RED>|<RESET><RED>/<RESET>
+ * initial
+ EOF
+ git log --color=always --graph --date-order --pretty=tformat:%s after-initial after-merge >actual.colors.raw &&
test_decode_color <actual.colors.raw | sed "s/ *\$//" >actual.colors &&
test_cmp expect.colors actual.colors
'
+
test_done
diff --git a/t/t5004-archive-corner-cases.sh b/t/t5004-archive-corner-cases.sh
index 271eb5a1fd..3e7b23cb32 100755
--- a/t/t5004-archive-corner-cases.sh
+++ b/t/t5004-archive-corner-cases.sh
@@ -204,4 +204,23 @@ test_expect_success EXPENSIVE,LONG_IS_64BIT,UNZIP,UNZIP_ZIP64_SUPPORT,ZIPINFO \
grep $size big.lst
'
+build_tree() {
+ perl -e '
+ my $hash = $ARGV[0];
+ foreach my $order (2..6) {
+ $first = 10 ** $order;
+ foreach my $i (-13..-9) {
+ my $name = "a" x ($first + $i);
+ print "100644 blob $hash\t$name\n"
+ }
+ }
+ ' "$1"
+}
+
+test_expect_success 'tar archive with long paths' '
+ blob=$(echo foo | git hash-object -w --stdin) &&
+ tree=$(build_tree $blob | git mktree) &&
+ git archive -o long_paths.tar $tree
+'
+
test_done
diff --git a/t/t5307-pack-missing-commit.sh b/t/t5307-pack-missing-commit.sh
index dacb440b27..f4338abb78 100755
--- a/t/t5307-pack-missing-commit.sh
+++ b/t/t5307-pack-missing-commit.sh
@@ -24,11 +24,11 @@ test_expect_success 'check corruption' '
'
test_expect_success 'rev-list notices corruption (1)' '
- test_must_fail env GIT_TEST_COMMIT_GRAPH=0 git rev-list HEAD
+ test_must_fail env GIT_TEST_COMMIT_GRAPH=0 git -c core.commitGraph=false rev-list HEAD
'
test_expect_success 'rev-list notices corruption (2)' '
- test_must_fail env GIT_TEST_COMMIT_GRAPH=0 git rev-list --objects HEAD
+ test_must_fail env GIT_TEST_COMMIT_GRAPH=0 git -c core.commitGraph=false rev-list --objects HEAD
'
test_expect_success 'pack-objects notices corruption' '
diff --git a/t/t5318-commit-graph.sh b/t/t5318-commit-graph.sh
index 22cb9d6643..d42b3efe39 100755
--- a/t/t5318-commit-graph.sh
+++ b/t/t5318-commit-graph.sh
@@ -23,11 +23,10 @@ test_expect_success 'write graph with no packs' '
test_path_is_missing info/commit-graph
'
-test_expect_success 'close with correct error on bad input' '
+test_expect_success 'exit with correct error on bad input to --stdin-packs' '
cd "$TRASH_DIRECTORY/full" &&
echo doesnotexist >in &&
- { git commit-graph write --stdin-packs <in 2>stderr; ret=$?; } &&
- test "$ret" = 1 &&
+ test_expect_code 1 git commit-graph write --stdin-packs <in 2>stderr &&
test_i18ngrep "error adding pack" stderr
'
@@ -41,6 +40,15 @@ test_expect_success 'create commits and repack' '
git repack
'
+test_expect_success 'exit with correct error on bad input to --stdin-commits' '
+ cd "$TRASH_DIRECTORY/full" &&
+ echo HEAD | test_expect_code 1 git commit-graph write --stdin-commits 2>stderr &&
+ test_i18ngrep "invalid commit object id" stderr &&
+ # valid tree OID, but not a commit OID
+ git rev-parse HEAD^{tree} | test_expect_code 1 git commit-graph write --stdin-commits 2>stderr &&
+ test_i18ngrep "invalid commit object id" stderr
+'
+
graph_git_two_modes() {
git -c core.commitGraph=true $1 >output
git -c core.commitGraph=false $1 >expect
@@ -116,6 +124,42 @@ test_expect_success 'Add more commits' '
git repack
'
+test_expect_success 'commit-graph write progress off for redirected stderr' '
+ cd "$TRASH_DIRECTORY/full" &&
+ git commit-graph write 2>err &&
+ test_line_count = 0 err
+'
+
+test_expect_success 'commit-graph write force progress on for stderr' '
+ cd "$TRASH_DIRECTORY/full" &&
+ git commit-graph write --progress 2>err &&
+ test_file_not_empty err
+'
+
+test_expect_success 'commit-graph write with the --no-progress option' '
+ cd "$TRASH_DIRECTORY/full" &&
+ git commit-graph write --no-progress 2>err &&
+ test_line_count = 0 err
+'
+
+test_expect_success 'commit-graph verify progress off for redirected stderr' '
+ cd "$TRASH_DIRECTORY/full" &&
+ git commit-graph verify 2>err &&
+ test_line_count = 0 err
+'
+
+test_expect_success 'commit-graph verify force progress on for stderr' '
+ cd "$TRASH_DIRECTORY/full" &&
+ git commit-graph verify --progress 2>err &&
+ test_file_not_empty err
+'
+
+test_expect_success 'commit-graph verify with the --no-progress option' '
+ cd "$TRASH_DIRECTORY/full" &&
+ git commit-graph verify --no-progress 2>err &&
+ test_line_count = 0 err
+'
+
# Current graph structure:
#
# __M3___
@@ -577,4 +621,47 @@ test_expect_success 'get_commit_tree_in_graph works for non-the_repository' '
test_cmp expect actual
'
+test_expect_success 'corrupt commit-graph write (broken parent)' '
+ rm -rf repo &&
+ git init repo &&
+ (
+ cd repo &&
+ empty="$(git mktree </dev/null)" &&
+ cat >broken <<-EOF &&
+ tree $empty
+ parent 0000000000000000000000000000000000000000
+ author whatever <whatever@example.com> 1234 -0000
+ committer whatever <whatever@example.com> 1234 -0000
+
+ broken commit
+ EOF
+ broken="$(git hash-object -w -t commit --literally broken)" &&
+ git commit-tree -p "$broken" -m "good commit" "$empty" >good &&
+ test_must_fail git commit-graph write --stdin-commits \
+ <good 2>test_err &&
+ test_i18ngrep "unable to parse commit" test_err
+ )
+'
+
+test_expect_success 'corrupt commit-graph write (missing tree)' '
+ rm -rf repo &&
+ git init repo &&
+ (
+ cd repo &&
+ tree="$(git mktree </dev/null)" &&
+ cat >broken <<-EOF &&
+ parent 0000000000000000000000000000000000000000
+ author whatever <whatever@example.com> 1234 -0000
+ committer whatever <whatever@example.com> 1234 -0000
+
+ broken commit
+ EOF
+ broken="$(git hash-object -w -t commit --literally broken)" &&
+ git commit-tree -p "$broken" -m "good" "$tree" >good &&
+ test_must_fail git commit-graph write --stdin-commits \
+ <good 2>test_err &&
+ test_i18ngrep "unable to get tree for" test_err
+ )
+'
+
test_done
diff --git a/t/t5324-split-commit-graph.sh b/t/t5324-split-commit-graph.sh
index 99f4ef4c19..115aabd141 100755
--- a/t/t5324-split-commit-graph.sh
+++ b/t/t5324-split-commit-graph.sh
@@ -8,6 +8,7 @@ GIT_TEST_COMMIT_GRAPH=0
test_expect_success 'setup repo' '
git init &&
git config core.commitGraph true &&
+ git config gc.writeCommitGraph false &&
infodir=".git/objects/info" &&
graphdir="$infodir/commit-graphs" &&
test_oid_init
@@ -319,7 +320,7 @@ test_expect_success 'add octopus merge' '
git merge commits/3 commits/4 &&
git branch merge/octopus &&
git commit-graph write --reachable --split &&
- git commit-graph verify 2>err &&
+ git commit-graph verify --progress 2>err &&
test_line_count = 3 err &&
test_i18ngrep ! warning err &&
test_line_count = 3 $graphdir/commit-graph-chain
@@ -334,6 +335,7 @@ test_expect_success 'split across alternate where alternate is not split' '
git clone --no-hardlinks . alt-split &&
(
cd alt-split &&
+ rm -f .git/objects/info/commit-graph &&
echo "$(pwd)"/../.git/objects >.git/objects/info/alternates &&
test_commit 18 &&
git commit-graph write --reachable --split &&
diff --git a/t/t5500-fetch-pack.sh b/t/t5500-fetch-pack.sh
index 1c71c0ec77..6b97923964 100755
--- a/t/t5500-fetch-pack.sh
+++ b/t/t5500-fetch-pack.sh
@@ -708,13 +708,22 @@ do
# file with scheme
for p in file
do
- test_expect_success "fetch-pack --diag-url $p://$h/$r" '
+ test_expect_success !MINGW "fetch-pack --diag-url $p://$h/$r" '
check_prot_path $p://$h/$r $p "/$r"
'
+ test_expect_success MINGW "fetch-pack --diag-url $p://$h/$r" '
+ check_prot_path $p://$h/$r $p "//$h/$r"
+ '
+ test_expect_success MINGW "fetch-pack --diag-url $p:///$r" '
+ check_prot_path $p:///$r $p "/$r"
+ '
# No "/~" -> "~" conversion for file
- test_expect_success "fetch-pack --diag-url $p://$h/~$r" '
+ test_expect_success !MINGW "fetch-pack --diag-url $p://$h/~$r" '
check_prot_path $p://$h/~$r $p "/~$r"
'
+ test_expect_success MINGW "fetch-pack --diag-url $p://$h/~$r" '
+ check_prot_path $p://$h/~$r $p "//$h/~$r"
+ '
done
# file without scheme
for h in nohost nohost:12 [::1] [::1]:23 [ [:aa
@@ -783,6 +792,44 @@ test_expect_success 'clone shallow since selects no commits' '
)
'
+# A few subtle things about the request in this test:
+#
+# - the server must have commit-graphs present and enabled
+#
+# - the history is such that our want/have share a common ancestor ("base"
+# here)
+#
+# - we send only a single have, which is fewer than a normal client would
+# send. This ensures that we don't parse "base" up front with
+# parse_object(), but rather traverse to it as a parent while deciding if we
+# can stop the "have" negotiation, and call parse_commit(). The former
+# sees the actual object data and so always loads the three oid, whereas the
+# latter will try to load it lazily.
+#
+# - we must use protocol v2, because it handles the "have" negotiation before
+# processing the shallow directives
+#
+test_expect_success 'shallow since with commit graph and already-seen commit' '
+ test_create_repo shallow-since-graph &&
+ (
+ cd shallow-since-graph &&
+ test_commit base &&
+ test_commit master &&
+ git checkout -b other HEAD^ &&
+ test_commit other &&
+ git commit-graph write --reachable &&
+ git config core.commitGraph true &&
+
+ GIT_PROTOCOL=version=2 git upload-pack . <<-EOF >/dev/null
+ 0012command=fetch
+ 00010013deepen-since 1
+ 0032want $(git rev-parse other)
+ 0032have $(git rev-parse master)
+ 0000
+ EOF
+ )
+'
+
test_expect_success 'shallow clone exclude tag two' '
test_create_repo shallow-exclude &&
(
@@ -920,4 +967,7 @@ test_expect_success 'fetch with --filter=blob:limit=0 and HTTP' '
fetch_filter_blob_limit_zero "$HTTPD_DOCUMENT_ROOT_PATH/server" "$HTTPD_URL/smart/server"
'
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
+
test_done
diff --git a/t/t5510-fetch.sh b/t/t5510-fetch.sh
index 139f7106f7..ecabbe1616 100755
--- a/t/t5510-fetch.sh
+++ b/t/t5510-fetch.sh
@@ -570,6 +570,19 @@ test_expect_success 'LHS of refspec follows ref disambiguation rules' '
)
'
+test_expect_success 'fetch.writeCommitGraph' '
+ git clone three write &&
+ (
+ cd three &&
+ test_commit new
+ ) &&
+ (
+ cd write &&
+ git -c fetch.writeCommitGraph fetch origin &&
+ test_path_is_file .git/objects/info/commit-graphs/commit-graph-chain
+ )
+'
+
# configured prune tests
set_config_tristate () {
@@ -902,6 +915,29 @@ test_expect_success C_LOCALE_OUTPUT 'fetch compact output' '
test_cmp expect actual
'
+test_expect_success '--no-show-forced-updates' '
+ mkdir forced-updates &&
+ (
+ cd forced-updates &&
+ git init &&
+ test_commit 1 &&
+ test_commit 2
+ ) &&
+ git clone forced-updates forced-update-clone &&
+ git clone forced-updates no-forced-update-clone &&
+ git -C forced-updates reset --hard HEAD~1 &&
+ (
+ cd forced-update-clone &&
+ git fetch --show-forced-updates origin 2>output &&
+ test_i18ngrep "(forced update)" output
+ ) &&
+ (
+ cd no-forced-update-clone &&
+ git fetch --no-show-forced-updates origin 2>output &&
+ test_i18ngrep ! "(forced update)" output
+ )
+'
+
setup_negotiation_tip () {
SERVER="$1"
URL="$2"
@@ -978,27 +1014,7 @@ test_expect_success '--negotiation-tip limits "have" lines sent with HTTP protoc
check_negotiation_tip
'
-test_expect_success '--no-show-forced-updates' '
- mkdir forced-updates &&
- (
- cd forced-updates &&
- git init &&
- test_commit 1 &&
- test_commit 2
- ) &&
- git clone forced-updates forced-update-clone &&
- git clone forced-updates no-forced-update-clone &&
- git -C forced-updates reset --hard HEAD~1 &&
- (
- cd forced-update-clone &&
- git fetch --show-forced-updates origin 2>output &&
- test_i18ngrep "(forced update)" output
- ) &&
- (
- cd no-forced-update-clone &&
- git fetch --no-show-forced-updates origin 2>output &&
- ! test_i18ngrep "(forced update)" output
- )
-'
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
test_done
diff --git a/t/t5514-fetch-multiple.sh b/t/t5514-fetch-multiple.sh
index 5426d4b5ab..de8e2f1531 100755
--- a/t/t5514-fetch-multiple.sh
+++ b/t/t5514-fetch-multiple.sh
@@ -183,4 +183,15 @@ test_expect_success 'git fetch --all --tags' '
test_cmp expect test8/output
'
+test_expect_success 'parallel' '
+ git remote add one ./bogus1 &&
+ git remote add two ./bogus2 &&
+
+ test_must_fail env GIT_TRACE="$PWD/trace" \
+ git fetch --jobs=2 --multiple one two 2>err &&
+ grep "preparing to run up to 2 tasks" trace &&
+ test_i18ngrep "could not fetch .one.*128" err &&
+ test_i18ngrep "could not fetch .two.*128" err
+'
+
test_done
diff --git a/t/t5515-fetch-merge-logic.sh b/t/t5515-fetch-merge-logic.sh
index e55d8474ef..961eb35c99 100755
--- a/t/t5515-fetch-merge-logic.sh
+++ b/t/t5515-fetch-merge-logic.sh
@@ -12,9 +12,6 @@ GIT_TEST_PROTOCOL_VERSION=
. ./test-lib.sh
-LF='
-'
-
test_expect_success setup '
GIT_AUTHOR_DATE="2006-06-26 00:00:00 +0000" &&
GIT_COMMITTER_DATE="2006-06-26 00:00:00 +0000" &&
diff --git a/t/t5517-push-mirror.sh b/t/t5517-push-mirror.sh
index c05a661400..e4edd56404 100755
--- a/t/t5517-push-mirror.sh
+++ b/t/t5517-push-mirror.sh
@@ -265,4 +265,14 @@ test_expect_success 'remote.foo.mirror=no has no effect' '
'
+test_expect_success 'push to mirrored repository with refspec fails' '
+ mk_repo_pair &&
+ (
+ cd master &&
+ echo one >foo && git add foo && git commit -m one &&
+ git config --add remote.up.mirror true &&
+ test_must_fail git push up master
+ )
+'
+
test_done
diff --git a/t/t5537-fetch-shallow.sh b/t/t5537-fetch-shallow.sh
index 66f0b64d39..97a67728ca 100755
--- a/t/t5537-fetch-shallow.sh
+++ b/t/t5537-fetch-shallow.sh
@@ -255,4 +255,7 @@ test_expect_success 'shallow fetches check connectivity before writing shallow f
git -C client fsck
'
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
+
test_done
diff --git a/t/t5541-http-push-smart.sh b/t/t5541-http-push-smart.sh
index b86ddb60f2..92bac43257 100755
--- a/t/t5541-http-push-smart.sh
+++ b/t/t5541-http-push-smart.sh
@@ -262,7 +262,7 @@ test_expect_success TTY 'push shows progress when stderr is a tty' '
cd "$ROOT_PATH"/test_repo_clone &&
test_commit noisy &&
test_terminal git push >output 2>&1 &&
- test_i18ngrep "Writing objects" output
+ test_i18ngrep "^Writing objects" output
'
test_expect_success TTY 'push --quiet silences status and progress' '
@@ -277,7 +277,7 @@ test_expect_success TTY 'push --no-progress silences progress but not status' '
test_commit no-progress &&
test_terminal git push --no-progress >output 2>&1 &&
test_i18ngrep "^To http" output &&
- test_i18ngrep ! "Writing objects" output
+ test_i18ngrep ! "^Writing objects" output
'
test_expect_success 'push --progress shows progress to non-tty' '
@@ -285,7 +285,7 @@ test_expect_success 'push --progress shows progress to non-tty' '
test_commit progress &&
git push --progress >output 2>&1 &&
test_i18ngrep "^To http" output &&
- test_i18ngrep "Writing objects" output
+ test_i18ngrep "^Writing objects" output
'
test_expect_success 'http push gives sane defaults to reflog' '
diff --git a/t/t5545-push-options.sh b/t/t5545-push-options.sh
index 6d1d59c9b1..04b34c4de1 100755
--- a/t/t5545-push-options.sh
+++ b/t/t5545-push-options.sh
@@ -278,4 +278,7 @@ test_expect_success 'push options keep quoted characters intact (http)' '
test_cmp expect "$HTTPD_DOCUMENT_ROOT_PATH"/upstream.git/hooks/pre-receive.push_options
'
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
+
test_done
diff --git a/t/t5552-skipping-fetch-negotiator.sh b/t/t5552-skipping-fetch-negotiator.sh
index 8a14be51a1..f70cbcc9ca 100755
--- a/t/t5552-skipping-fetch-negotiator.sh
+++ b/t/t5552-skipping-fetch-negotiator.sh
@@ -60,29 +60,6 @@ test_expect_success 'commits with no parents are sent regardless of skip distanc
have_not_sent c6 c4 c3
'
-test_expect_success 'unknown fetch.negotiationAlgorithm values error out' '
- rm -rf server client trace &&
- git init server &&
- test_commit -C server to_fetch &&
-
- git init client &&
- test_commit -C client on_client &&
- git -C client checkout on_client &&
-
- test_config -C client fetch.negotiationAlgorithm invalid &&
- test_must_fail git -C client fetch "$(pwd)/server" 2>err &&
- test_i18ngrep "unknown fetch negotiation algorithm" err &&
-
- # Explicit "default" value
- test_config -C client fetch.negotiationAlgorithm default &&
- git -C client -c fetch.negotiationAlgorithm=default fetch "$(pwd)/server" &&
-
- # Implementation detail: If there is nothing to fetch, we will not error out
- test_config -C client fetch.negotiationAlgorithm invalid &&
- git -C client fetch "$(pwd)/server" 2>err &&
- test_i18ngrep ! "unknown fetch negotiation algorithm" err
-'
-
test_expect_success 'when two skips collide, favor the larger one' '
rm -rf server client trace &&
git init server &&
diff --git a/t/t5553-set-upstream.sh b/t/t5553-set-upstream.sh
new file mode 100755
index 0000000000..81975ad8f9
--- /dev/null
+++ b/t/t5553-set-upstream.sh
@@ -0,0 +1,178 @@
+#!/bin/sh
+
+test_description='"git fetch/pull --set-upstream" basic tests.'
+. ./test-lib.sh
+
+check_config () {
+ printf "%s\n" "$2" "$3" >"expect.$1" &&
+ {
+ git config "branch.$1.remote" && git config "branch.$1.merge"
+ } >"actual.$1" &&
+ test_cmp "expect.$1" "actual.$1"
+}
+
+check_config_missing () {
+ test_expect_code 1 git config "branch.$1.remote" &&
+ test_expect_code 1 git config "branch.$1.merge"
+}
+
+clear_config () {
+ for branch in "$@"; do
+ test_might_fail git config --unset-all "branch.$branch.remote"
+ test_might_fail git config --unset-all "branch.$branch.merge"
+ done
+}
+
+ensure_fresh_upstream () {
+ rm -rf parent && git init --bare parent
+}
+
+test_expect_success 'setup bare parent fetch' '
+ ensure_fresh_upstream &&
+ git remote add upstream parent
+'
+
+test_expect_success 'setup commit on master and other fetch' '
+ test_commit one &&
+ git push upstream master &&
+ git checkout -b other &&
+ test_commit two &&
+ git push upstream other
+'
+
+# tests for fetch --set-upstream
+
+test_expect_success 'fetch --set-upstream does not set upstream w/o branch' '
+ clear_config master other &&
+ git checkout master &&
+ git fetch --set-upstream upstream &&
+ check_config_missing master &&
+ check_config_missing other
+'
+
+test_expect_success 'fetch --set-upstream upstream master sets branch master but not other' '
+ clear_config master other &&
+ git fetch --set-upstream upstream master &&
+ check_config master upstream refs/heads/master &&
+ check_config_missing other
+'
+
+test_expect_success 'fetch --set-upstream upstream other sets branch other' '
+ clear_config master other &&
+ git fetch --set-upstream upstream other &&
+ check_config master upstream refs/heads/other &&
+ check_config_missing other
+'
+
+test_expect_success 'fetch --set-upstream master:other does not set the branch other2' '
+ clear_config other2 &&
+ git fetch --set-upstream upstream master:other2 &&
+ check_config_missing other2
+'
+
+test_expect_success 'fetch --set-upstream http://nosuchdomain.example.com fails with invalid url' '
+ # master explicitly not cleared, we check that it is not touched from previous value
+ clear_config other other2 &&
+ test_must_fail git fetch --set-upstream http://nosuchdomain.example.com &&
+ check_config master upstream refs/heads/other &&
+ check_config_missing other &&
+ check_config_missing other2
+'
+
+test_expect_success 'fetch --set-upstream with valid URL sets upstream to URL' '
+ clear_config other other2 &&
+ url="file://'"$PWD"'" &&
+ git fetch --set-upstream "$url" &&
+ check_config master "$url" HEAD &&
+ check_config_missing other &&
+ check_config_missing other2
+'
+
+# tests for pull --set-upstream
+
+test_expect_success 'setup bare parent pull' '
+ git remote rm upstream &&
+ ensure_fresh_upstream &&
+ git remote add upstream parent
+'
+
+test_expect_success 'setup commit on master and other pull' '
+ test_commit three &&
+ git push --tags upstream master &&
+ test_commit four &&
+ git push upstream other
+'
+
+test_expect_success 'pull --set-upstream upstream master sets branch master but not other' '
+ clear_config master other &&
+ git pull --set-upstream upstream master &&
+ check_config master upstream refs/heads/master &&
+ check_config_missing other
+'
+
+test_expect_success 'pull --set-upstream master:other2 does not set the branch other2' '
+ clear_config other2 &&
+ git pull --set-upstream upstream master:other2 &&
+ check_config_missing other2
+'
+
+test_expect_success 'pull --set-upstream upstream other sets branch master' '
+ clear_config master other &&
+ git pull --set-upstream upstream other &&
+ check_config master upstream refs/heads/other &&
+ check_config_missing other
+'
+
+test_expect_success 'pull --set-upstream upstream tag does not set the tag' '
+ clear_config three &&
+ git pull --tags --set-upstream upstream three &&
+ check_config_missing three
+'
+
+test_expect_success 'pull --set-upstream http://nosuchdomain.example.com fails with invalid url' '
+ # master explicitly not cleared, we check that it is not touched from previous value
+ clear_config other other2 three &&
+ test_must_fail git pull --set-upstream http://nosuchdomain.example.com &&
+ check_config master upstream refs/heads/other &&
+ check_config_missing other &&
+ check_config_missing other2 &&
+ check_config_missing three
+'
+
+test_expect_success 'pull --set-upstream upstream HEAD sets branch HEAD' '
+ clear_config master other &&
+ git pull --set-upstream upstream HEAD &&
+ check_config master upstream HEAD &&
+ git checkout other &&
+ git pull --set-upstream upstream HEAD &&
+ check_config other upstream HEAD
+'
+
+test_expect_success 'pull --set-upstream upstream with more than one branch does nothing' '
+ clear_config master three &&
+ git pull --set-upstream upstream master three &&
+ check_config_missing master &&
+ check_config_missing three
+'
+
+test_expect_success 'pull --set-upstream with valid URL sets upstream to URL' '
+ clear_config master other other2 &&
+ git checkout master &&
+ url="file://'"$PWD"'" &&
+ git pull --set-upstream "$url" &&
+ check_config master "$url" HEAD &&
+ check_config_missing other &&
+ check_config_missing other2
+'
+
+test_expect_success 'pull --set-upstream with valid URL and branch sets branch' '
+ clear_config master other other2 &&
+ git checkout master &&
+ url="file://'"$PWD"'" &&
+ git pull --set-upstream "$url" master &&
+ check_config master "$url" refs/heads/master &&
+ check_config_missing other &&
+ check_config_missing other2
+'
+
+test_done
diff --git a/t/t5601-clone.sh b/t/t5601-clone.sh
index 37d76808d4..ad8c41176e 100755
--- a/t/t5601-clone.sh
+++ b/t/t5601-clone.sh
@@ -434,7 +434,6 @@ test_expect_success 'double quoted plink.exe in GIT_SSH_COMMAND' '
expect_ssh "-v -P 123" myhost src
'
-SQ="'"
test_expect_success 'single quoted plink.exe in GIT_SSH_COMMAND' '
copy_ssh_wrapper_as "$TRASH_DIRECTORY/plink.exe" &&
GIT_SSH_COMMAND="$SQ$TRASH_DIRECTORY/plink.exe$SQ -v" \
@@ -654,7 +653,8 @@ partial_clone () {
git -C client fsck &&
# Ensure that unneeded blobs are not inadvertently fetched.
- test_config -C client extensions.partialclone "not a remote" &&
+ test_config -C client remote.origin.promisor "false" &&
+ git -C client config --unset remote.origin.partialclonefilter &&
test_must_fail git -C client cat-file -e "$HASH1" &&
# But this blob was fetched, because clone performs an initial checkout
@@ -739,4 +739,7 @@ test_expect_success 'partial clone using HTTP' '
partial_clone "$HTTPD_DOCUMENT_ROOT_PATH/server" "$HTTPD_URL/smart/server"
'
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
+
test_done
diff --git a/t/t5607-clone-bundle.sh b/t/t5607-clone-bundle.sh
index 2a0fb15cf1..b7a3fdf02d 100755
--- a/t/t5607-clone-bundle.sh
+++ b/t/t5607-clone-bundle.sh
@@ -83,4 +83,15 @@ test_expect_success 'failed bundle creation does not leave cruft' '
test_path_is_missing fail.bundle.lock
'
+test_expect_success 'fetch SHA-1 from bundle' '
+ test_create_repo foo &&
+ test_commit -C foo x &&
+ git -C foo bundle create tip.bundle -1 master &&
+ git -C foo rev-parse HEAD >hash &&
+
+ # Exercise to ensure that fetching a SHA-1 from a bundle works with no
+ # errors
+ git fetch --no-tags foo/tip.bundle "$(cat hash)"
+'
+
test_done
diff --git a/t/t5616-partial-clone.sh b/t/t5616-partial-clone.sh
index b91ef548f8..79f7b65f8c 100755
--- a/t/t5616-partial-clone.sh
+++ b/t/t5616-partial-clone.sh
@@ -42,8 +42,8 @@ test_expect_success 'do partial clone 1' '
test_cmp expect_1.oids observed.oids &&
test "$(git -C pc1 config --local core.repositoryformatversion)" = "1" &&
- test "$(git -C pc1 config --local extensions.partialclone)" = "origin" &&
- test "$(git -C pc1 config --local core.partialclonefilter)" = "blob:none"
+ test "$(git -C pc1 config --local remote.origin.promisor)" = "true" &&
+ test "$(git -C pc1 config --local remote.origin.partialclonefilter)" = "blob:none"
'
# checkout master to force dynamic object fetch of blobs at HEAD.
@@ -208,6 +208,25 @@ test_expect_success 'use fsck before and after manually fetching a missing subtr
test_cmp unique_types.expected unique_types.observed
'
+test_expect_success 'implicitly construct combine: filter with repeated flags' '
+ GIT_TRACE=$(pwd)/trace git clone --bare \
+ --filter=blob:none --filter=tree:1 \
+ "file://$(pwd)/srv.bare" pc2 &&
+ grep "trace:.* git pack-objects .*--filter=combine:blob:none+tree:1" \
+ trace &&
+ git -C pc2 rev-list --objects --missing=allow-any HEAD >objects &&
+
+ # We should have gotten some root trees.
+ grep " $" objects &&
+ # Should not have gotten any non-root trees or blobs.
+ ! grep " ." objects &&
+
+ xargs -n 1 git -C pc2 cat-file -t <objects >types &&
+ sort -u types >unique_types.actual &&
+ test_write_lines commit tree >unique_types.expected &&
+ test_cmp unique_types.expected unique_types.actual
+'
+
test_expect_success 'partial clone fetches blobs pointed to by refs even if normally filtered out' '
rm -rf src dst &&
git init src &&
@@ -241,6 +260,42 @@ test_expect_success 'fetch what is specified on CLI even if already promised' '
! grep "?$(cat blob)" missing_after
'
+test_expect_success 'setup src repo for sparse filter' '
+ git init sparse-src &&
+ git -C sparse-src config --local uploadpack.allowfilter 1 &&
+ git -C sparse-src config --local uploadpack.allowanysha1inwant 1 &&
+ test_commit -C sparse-src one &&
+ test_commit -C sparse-src two &&
+ echo /one.t >sparse-src/only-one &&
+ git -C sparse-src add . &&
+ git -C sparse-src commit -m "add sparse checkout files"
+'
+
+test_expect_success 'partial clone with sparse filter succeeds' '
+ rm -rf dst.git &&
+ git clone --no-local --bare \
+ --filter=sparse:oid=master:only-one \
+ sparse-src dst.git &&
+ (
+ cd dst.git &&
+ git rev-list --objects --missing=print HEAD >out &&
+ grep "^$(git rev-parse HEAD:one.t)" out &&
+ grep "^?$(git rev-parse HEAD:two.t)" out
+ )
+'
+
+test_expect_success 'partial clone with unresolvable sparse filter fails cleanly' '
+ rm -rf dst.git &&
+ test_must_fail git clone --no-local --bare \
+ --filter=sparse:oid=master:no-such-name \
+ sparse-src dst.git 2>err &&
+ test_i18ngrep "unable to access sparse blob in .master:no-such-name" err &&
+ test_must_fail git clone --no-local --bare \
+ --filter=sparse:oid=master \
+ sparse-src dst.git 2>err &&
+ test_i18ngrep "unable to parse sparse filter data in" err
+'
+
. "$TEST_DIRECTORY"/lib-httpd.sh
start_httpd
@@ -417,4 +472,7 @@ test_expect_success 'tolerate server sending REF_DELTA against missing promisor
! test -e "$HTTPD_ROOT_PATH/one-time-sed"
'
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
+
test_done
diff --git a/t/t5700-protocol-v1.sh b/t/t5700-protocol-v1.sh
index 7c9511c593..2571eb90b7 100755
--- a/t/t5700-protocol-v1.sh
+++ b/t/t5700-protocol-v1.sh
@@ -292,4 +292,7 @@ test_expect_success 'push with http:// using protocol v1' '
grep "git< version 1" log
'
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
+
test_done
diff --git a/t/t5702-protocol-v2.sh b/t/t5702-protocol-v2.sh
index 011b81d4fc..ae9175cedf 100755
--- a/t/t5702-protocol-v2.sh
+++ b/t/t5702-protocol-v2.sh
@@ -631,6 +631,19 @@ test_expect_success 'fetch with http:// using protocol v2' '
grep "git< version 2" log
'
+test_expect_success 'fetch with http:// by hash without tag following with protocol v2 does not list refs' '
+ test_when_finished "rm -f log" &&
+
+ test_commit -C "$HTTPD_DOCUMENT_ROOT_PATH/http_parent" two_a &&
+ git -C "$HTTPD_DOCUMENT_ROOT_PATH/http_parent" rev-parse two_a >two_a_hash &&
+
+ GIT_TRACE_PACKET="$(pwd)/log" git -C http_child -c protocol.version=2 \
+ fetch --no-tags origin $(cat two_a_hash) &&
+
+ grep "fetch< version 2" log &&
+ ! grep "fetch> command=ls-refs" log
+'
+
test_expect_success 'fetch from namespaced repo respects namespaces' '
test_when_finished "rm -f log" &&
@@ -723,4 +736,7 @@ test_expect_success 'when server does not send "ready", expect FLUSH' '
test_i18ngrep "expected no other sections to be sent after no .ready." err
'
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
+
test_done
diff --git a/t/t5703-upload-pack-ref-in-want.sh b/t/t5703-upload-pack-ref-in-want.sh
index de4b6106ef..3a2c143c6d 100755
--- a/t/t5703-upload-pack-ref-in-want.sh
+++ b/t/t5703-upload-pack-ref-in-want.sh
@@ -157,106 +157,6 @@ test_expect_success 'want-ref with ref we already have commit for' '
check_output
'
-. "$TEST_DIRECTORY"/lib-httpd.sh
-start_httpd
-
-REPO="$HTTPD_DOCUMENT_ROOT_PATH/repo"
-LOCAL_PRISTINE="$(pwd)/local_pristine"
-
-test_expect_success 'setup repos for change-while-negotiating test' '
- (
- git init "$REPO" &&
- cd "$REPO" &&
- >.git/git-daemon-export-ok &&
- test_commit m1 &&
- git tag -d m1 &&
-
- # Local repo with many commits (so that negotiation will take
- # more than 1 request/response pair)
- git clone "http://127.0.0.1:$LIB_HTTPD_PORT/smart/repo" "$LOCAL_PRISTINE" &&
- cd "$LOCAL_PRISTINE" &&
- git checkout -b side &&
- test_commit_bulk --id=s 33 &&
-
- # Add novel commits to upstream
- git checkout master &&
- cd "$REPO" &&
- test_commit m2 &&
- test_commit m3 &&
- git tag -d m2 m3
- ) &&
- git -C "$LOCAL_PRISTINE" remote set-url origin "http://127.0.0.1:$LIB_HTTPD_PORT/one_time_sed/repo" &&
- git -C "$LOCAL_PRISTINE" config protocol.version 2
-'
-
-inconsistency () {
- # Simulate that the server initially reports $2 as the ref
- # corresponding to $1, and after that, $1 as the ref corresponding to
- # $1. This corresponds to the real-life situation where the server's
- # repository appears to change during negotiation, for example, when
- # different servers in a load-balancing arrangement serve (stateless)
- # RPCs during a single negotiation.
- printf "s/%s/%s/" \
- $(git -C "$REPO" rev-parse $1 | tr -d "\n") \
- $(git -C "$REPO" rev-parse $2 | tr -d "\n") \
- >"$HTTPD_ROOT_PATH/one-time-sed"
-}
-
-test_expect_success 'server is initially ahead - no ref in want' '
- git -C "$REPO" config uploadpack.allowRefInWant false &&
- rm -rf local &&
- cp -r "$LOCAL_PRISTINE" local &&
- inconsistency master 1234567890123456789012345678901234567890 &&
- test_must_fail git -C local fetch 2>err &&
- test_i18ngrep "fatal: remote error: upload-pack: not our ref" err
-'
-
-test_expect_success 'server is initially ahead - ref in want' '
- git -C "$REPO" config uploadpack.allowRefInWant true &&
- rm -rf local &&
- cp -r "$LOCAL_PRISTINE" local &&
- inconsistency master 1234567890123456789012345678901234567890 &&
- git -C local fetch &&
-
- git -C "$REPO" rev-parse --verify master >expected &&
- git -C local rev-parse --verify refs/remotes/origin/master >actual &&
- test_cmp expected actual
-'
-
-test_expect_success 'server is initially behind - no ref in want' '
- git -C "$REPO" config uploadpack.allowRefInWant false &&
- rm -rf local &&
- cp -r "$LOCAL_PRISTINE" local &&
- inconsistency master "master^" &&
- git -C local fetch &&
-
- git -C "$REPO" rev-parse --verify "master^" >expected &&
- git -C local rev-parse --verify refs/remotes/origin/master >actual &&
- test_cmp expected actual
-'
-
-test_expect_success 'server is initially behind - ref in want' '
- git -C "$REPO" config uploadpack.allowRefInWant true &&
- rm -rf local &&
- cp -r "$LOCAL_PRISTINE" local &&
- inconsistency master "master^" &&
- git -C local fetch &&
-
- git -C "$REPO" rev-parse --verify "master" >expected &&
- git -C local rev-parse --verify refs/remotes/origin/master >actual &&
- test_cmp expected actual
-'
-
-test_expect_success 'server loses a ref - ref in want' '
- git -C "$REPO" config uploadpack.allowRefInWant true &&
- rm -rf local &&
- cp -r "$LOCAL_PRISTINE" local &&
- echo "s/master/raster/" >"$HTTPD_ROOT_PATH/one-time-sed" &&
- test_must_fail git -C local fetch 2>err &&
-
- test_i18ngrep "fatal: remote error: unknown ref refs/heads/raster" err
-'
-
REPO="$(pwd)/repo"
LOCAL_PRISTINE="$(pwd)/local_pristine"
@@ -372,4 +272,108 @@ test_expect_success 'fetching with wildcard that matches multiple refs' '
grep "want-ref refs/heads/o/bar" log
'
+. "$TEST_DIRECTORY"/lib-httpd.sh
+start_httpd
+
+REPO="$HTTPD_DOCUMENT_ROOT_PATH/repo"
+LOCAL_PRISTINE="$(pwd)/local_pristine"
+
+test_expect_success 'setup repos for change-while-negotiating test' '
+ (
+ git init "$REPO" &&
+ cd "$REPO" &&
+ >.git/git-daemon-export-ok &&
+ test_commit m1 &&
+ git tag -d m1 &&
+
+ # Local repo with many commits (so that negotiation will take
+ # more than 1 request/response pair)
+ rm -rf "$LOCAL_PRISTINE" &&
+ git clone "http://127.0.0.1:$LIB_HTTPD_PORT/smart/repo" "$LOCAL_PRISTINE" &&
+ cd "$LOCAL_PRISTINE" &&
+ git checkout -b side &&
+ test_commit_bulk --id=s 33 &&
+
+ # Add novel commits to upstream
+ git checkout master &&
+ cd "$REPO" &&
+ test_commit m2 &&
+ test_commit m3 &&
+ git tag -d m2 m3
+ ) &&
+ git -C "$LOCAL_PRISTINE" remote set-url origin "http://127.0.0.1:$LIB_HTTPD_PORT/one_time_sed/repo" &&
+ git -C "$LOCAL_PRISTINE" config protocol.version 2
+'
+
+inconsistency () {
+ # Simulate that the server initially reports $2 as the ref
+ # corresponding to $1, and after that, $1 as the ref corresponding to
+ # $1. This corresponds to the real-life situation where the server's
+ # repository appears to change during negotiation, for example, when
+ # different servers in a load-balancing arrangement serve (stateless)
+ # RPCs during a single negotiation.
+ printf "s/%s/%s/" \
+ $(git -C "$REPO" rev-parse $1 | tr -d "\n") \
+ $(git -C "$REPO" rev-parse $2 | tr -d "\n") \
+ >"$HTTPD_ROOT_PATH/one-time-sed"
+}
+
+test_expect_success 'server is initially ahead - no ref in want' '
+ git -C "$REPO" config uploadpack.allowRefInWant false &&
+ rm -rf local &&
+ cp -r "$LOCAL_PRISTINE" local &&
+ inconsistency master 1234567890123456789012345678901234567890 &&
+ test_must_fail git -C local fetch 2>err &&
+ test_i18ngrep "fatal: remote error: upload-pack: not our ref" err
+'
+
+test_expect_success 'server is initially ahead - ref in want' '
+ git -C "$REPO" config uploadpack.allowRefInWant true &&
+ rm -rf local &&
+ cp -r "$LOCAL_PRISTINE" local &&
+ inconsistency master 1234567890123456789012345678901234567890 &&
+ git -C local fetch &&
+
+ git -C "$REPO" rev-parse --verify master >expected &&
+ git -C local rev-parse --verify refs/remotes/origin/master >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success 'server is initially behind - no ref in want' '
+ git -C "$REPO" config uploadpack.allowRefInWant false &&
+ rm -rf local &&
+ cp -r "$LOCAL_PRISTINE" local &&
+ inconsistency master "master^" &&
+ git -C local fetch &&
+
+ git -C "$REPO" rev-parse --verify "master^" >expected &&
+ git -C local rev-parse --verify refs/remotes/origin/master >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success 'server is initially behind - ref in want' '
+ git -C "$REPO" config uploadpack.allowRefInWant true &&
+ rm -rf local &&
+ cp -r "$LOCAL_PRISTINE" local &&
+ inconsistency master "master^" &&
+ git -C local fetch &&
+
+ git -C "$REPO" rev-parse --verify "master" >expected &&
+ git -C local rev-parse --verify refs/remotes/origin/master >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success 'server loses a ref - ref in want' '
+ git -C "$REPO" config uploadpack.allowRefInWant true &&
+ rm -rf local &&
+ cp -r "$LOCAL_PRISTINE" local &&
+ echo "s/master/raster/" >"$HTTPD_ROOT_PATH/one-time-sed" &&
+ test_must_fail git -C local fetch 2>err &&
+
+ test_i18ngrep "fatal: remote error: unknown ref refs/heads/raster" err
+'
+
+# DO NOT add non-httpd-specific tests here, because the last part of this
+# test script is only executed when httpd is available and enabled.
+
test_done
diff --git a/t/t5801-remote-helpers.sh b/t/t5801-remote-helpers.sh
index 2d6c4a281e..121e5c6edb 100755
--- a/t/t5801-remote-helpers.sh
+++ b/t/t5801-remote-helpers.sh
@@ -247,7 +247,6 @@ clean_mark () {
test_expect_success 'proper failure checks for fetching' '
(cd local &&
test_must_fail env GIT_REMOTE_TESTGIT_FAILURE=1 git fetch 2>error &&
- cat error &&
test_i18ngrep -q "error while running fast-import" error
)
'
diff --git a/t/t6000-rev-list-misc.sh b/t/t6000-rev-list-misc.sh
index 52a9e38d66..b8cf82349b 100755
--- a/t/t6000-rev-list-misc.sh
+++ b/t/t6000-rev-list-misc.sh
@@ -140,4 +140,12 @@ test_expect_success '--header shows a NUL after each commit' '
test_cmp expect actual
'
+test_expect_success 'rev-list --end-of-options' '
+ git update-ref refs/heads/--output=yikes HEAD &&
+ git rev-list --end-of-options --output=yikes >actual &&
+ test_path_is_missing yikes &&
+ git rev-list HEAD >expect &&
+ test_cmp expect actual
+'
+
test_done
diff --git a/t/t6006-rev-list-format.sh b/t/t6006-rev-list-format.sh
index da113d975b..cfb74d0e03 100755
--- a/t/t6006-rev-list-format.sh
+++ b/t/t6006-rev-list-format.sh
@@ -501,9 +501,8 @@ test_expect_success 'reflog identity' '
'
test_expect_success 'oneline with empty message' '
- git commit -m "dummy" --allow-empty &&
- git commit -m "dummy" --allow-empty &&
- git filter-branch --msg-filter "sed -e s/dummy//" HEAD^^.. &&
+ git commit --allow-empty --cleanup=verbatim -m "$LF" &&
+ git commit --allow-empty --allow-empty-message &&
git rev-list --oneline HEAD >test.txt &&
test_line_count = 5 test.txt &&
git rev-list --oneline --graph HEAD >testg.txt &&
diff --git a/t/t6011-rev-list-with-bad-commit.sh b/t/t6011-rev-list-with-bad-commit.sh
index 545b461e51..bad02cf5b8 100755
--- a/t/t6011-rev-list-with-bad-commit.sh
+++ b/t/t6011-rev-list-with-bad-commit.sh
@@ -42,7 +42,7 @@ test_expect_success 'corrupt second commit object' \
'
test_expect_success 'rev-list should fail' '
- test_must_fail env GIT_TEST_COMMIT_GRAPH=0 git rev-list --all > /dev/null
+ test_must_fail env GIT_TEST_COMMIT_GRAPH=0 git -c core.commitGraph=false rev-list --all > /dev/null
'
test_expect_success 'git repack _MUST_ fail' \
diff --git a/t/t6036-recursive-corner-cases.sh b/t/t6036-recursive-corner-cases.sh
index d23b948f27..7fddcc8c73 100755
--- a/t/t6036-recursive-corner-cases.sh
+++ b/t/t6036-recursive-corner-cases.sh
@@ -1562,6 +1562,7 @@ test_expect_success 'check nested conflicts' '
cd nested_conflicts &&
git clean -f &&
+ MASTER=$(git rev-parse --short master) &&
git checkout L2^0 &&
# Merge must fail; there is a conflict
@@ -1582,7 +1583,7 @@ test_expect_success 'check nested conflicts' '
git cat-file -p R1:a >theirs &&
test_must_fail git merge-file --diff3 \
-L "Temporary merge branch 1" \
- -L "merged common ancestors" \
+ -L "$MASTER" \
-L "Temporary merge branch 2" \
ours \
base \
@@ -1594,7 +1595,7 @@ test_expect_success 'check nested conflicts' '
git cat-file -p R1:b >theirs &&
test_must_fail git merge-file --diff3 \
-L "Temporary merge branch 1" \
- -L "merged common ancestors" \
+ -L "$MASTER" \
-L "Temporary merge branch 2" \
ours \
base \
@@ -1732,6 +1733,7 @@ test_expect_success 'check virtual merge base with nested conflicts' '
(
cd virtual_merge_base_has_nested_conflicts &&
+ MASTER=$(git rev-parse --short master) &&
git checkout L3^0 &&
# Merge must fail; there is a conflict
@@ -1760,7 +1762,7 @@ test_expect_success 'check virtual merge base with nested conflicts' '
cp left merged-once &&
test_must_fail git merge-file --diff3 \
-L "Temporary merge branch 1" \
- -L "merged common ancestors" \
+ -L "$MASTER" \
-L "Temporary merge branch 2" \
merged-once \
base \
diff --git a/t/t6047-diff3-conflict-markers.sh b/t/t6047-diff3-conflict-markers.sh
new file mode 100755
index 0000000000..860542aad0
--- /dev/null
+++ b/t/t6047-diff3-conflict-markers.sh
@@ -0,0 +1,202 @@
+#!/bin/sh
+
+test_description='recursive merge diff3 style conflict markers'
+
+. ./test-lib.sh
+
+# Setup:
+# L1
+# \
+# ?
+# /
+# R1
+#
+# Where:
+# L1 and R1 both have a file named 'content' but have no common history
+#
+
+test_expect_success 'setup no merge base' '
+ test_create_repo no_merge_base &&
+ (
+ cd no_merge_base &&
+
+ git checkout -b L &&
+ test_commit A content A &&
+
+ git checkout --orphan R &&
+ test_commit B content B
+ )
+'
+
+test_expect_success 'check no merge base' '
+ (
+ cd no_merge_base &&
+
+ git checkout L^0 &&
+
+ test_must_fail git -c merge.conflictstyle=diff3 merge --allow-unrelated-histories -s recursive R^0 &&
+
+ grep "|||||| empty tree" content
+ )
+'
+
+# Setup:
+# L1
+# / \
+# master ?
+# \ /
+# R1
+#
+# Where:
+# L1 and R1 have modified the same file ('content') in conflicting ways
+#
+
+test_expect_success 'setup unique merge base' '
+ test_create_repo unique_merge_base &&
+ (
+ cd unique_merge_base &&
+
+ test_commit base content "1
+2
+3
+4
+5
+" &&
+
+ git branch L &&
+ git branch R &&
+
+ git checkout L &&
+ test_commit L content "1
+2
+3
+4
+5
+7" &&
+
+ git checkout R &&
+ git rm content &&
+ test_commit R renamed "1
+2
+3
+4
+5
+six"
+ )
+'
+
+test_expect_success 'check unique merge base' '
+ (
+ cd unique_merge_base &&
+
+ git checkout L^0 &&
+ MASTER=$(git rev-parse --short master) &&
+
+ test_must_fail git -c merge.conflictstyle=diff3 merge -s recursive R^0 &&
+
+ grep "|||||| $MASTER:content" renamed
+ )
+'
+
+# Setup:
+# L1---L2--L3
+# / \ / \
+# master X1 ?
+# \ / \ /
+# R1---R2--R3
+#
+# Where:
+# commits L1 and R1 have modified the same file in non-conflicting ways
+# X1 is an auto-generated merge-base used when merging L1 and R1
+# commits L2 and R2 are merges of R1 and L1 into L1 and R1, respectively
+# commits L3 and R3 both modify 'content' in conflicting ways
+#
+
+test_expect_success 'setup multiple merge bases' '
+ test_create_repo multiple_merge_bases &&
+ (
+ cd multiple_merge_bases &&
+
+ test_commit initial content "1
+2
+3
+4
+5" &&
+
+ git branch L &&
+ git branch R &&
+
+ # Create L1
+ git checkout L &&
+ test_commit L1 content "0
+1
+2
+3
+4
+5" &&
+
+ # Create R1
+ git checkout R &&
+ test_commit R1 content "1
+2
+3
+4
+5
+6" &&
+
+ # Create L2
+ git checkout L &&
+ git merge R1 &&
+
+ # Create R2
+ git checkout R &&
+ git merge L1 &&
+
+ # Create L3
+ git checkout L &&
+ test_commit L3 content "0
+1
+2
+3
+4
+5
+A" &&
+
+ # Create R3
+ git checkout R &&
+ git rm content &&
+ test_commit R3 renamed "0
+2
+3
+4
+5
+six"
+ )
+'
+
+test_expect_success 'check multiple merge bases' '
+ (
+ cd multiple_merge_bases &&
+
+ git checkout L^0 &&
+
+ test_must_fail git -c merge.conflictstyle=diff3 merge -s recursive R^0 &&
+
+ grep "|||||| merged common ancestors:content" renamed
+ )
+'
+
+test_expect_success 'rebase describes fake ancestor base' '
+ test_create_repo rebase &&
+ (
+ cd rebase &&
+ test_commit base file &&
+ test_commit master file &&
+ git checkout -b side HEAD^ &&
+ test_commit side file &&
+ test_must_fail git -c merge.conflictstyle=diff3 rebase master &&
+ grep "||||||| constructed merge base" file
+ )
+'
+
+test_done
diff --git a/t/t6112-rev-list-filters-objects.sh b/t/t6112-rev-list-filters-objects.sh
index acd7f5ab80..de0e5a5d36 100755
--- a/t/t6112-rev-list-filters-objects.sh
+++ b/t/t6112-rev-list-filters-objects.sh
@@ -278,7 +278,19 @@ test_expect_success 'verify skipping tree iteration when not collecting omits' '
test_line_count = 2 actual &&
# Make sure no other trees were considered besides the root.
- ! grep "Skipping contents of tree [^.]" filter_trace
+ ! grep "Skipping contents of tree [^.]" filter_trace &&
+
+ # Try this again with "combine:". If both sub-filters are skipping
+ # trees, the composite filter should also skip trees. This is not
+ # important unless the user does combine:tree:X+tree:Y or another filter
+ # besides "tree:" is implemented in the future which can skip trees.
+ GIT_TRACE=1 git -C r3 rev-list \
+ --objects --filter=combine:tree:1+tree:3 HEAD 2>filter_trace &&
+
+ # Only skip the dir1/ tree, which is shared between the two commits.
+ grep "Skipping contents of tree " filter_trace >actual &&
+ test_write_lines "Skipping contents of tree dir1/..." >expected &&
+ test_cmp expected actual
'
# Test tree:# filters.
@@ -330,6 +342,148 @@ test_expect_success 'verify tree:3 includes everything expected' '
test_line_count = 10 actual
'
+test_expect_success 'combine:... for a simple combination' '
+ git -C r3 rev-list --objects --filter=combine:tree:2+blob:none HEAD \
+ >actual &&
+
+ expect_has HEAD "" &&
+ expect_has HEAD~1 "" &&
+ expect_has HEAD dir1 &&
+
+ # There are also 2 commit objects
+ test_line_count = 5 actual &&
+
+ cp actual expected &&
+
+ # Try again using repeated --filter - this is equivalent to a manual
+ # combine with "combine:...+..."
+ git -C r3 rev-list --objects --filter=combine:tree:2 \
+ --filter=blob:none HEAD >actual &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'combine:... with URL encoding' '
+ git -C r3 rev-list --objects \
+ --filter=combine:tree%3a2+blob:%6Eon%65 HEAD >actual &&
+
+ expect_has HEAD "" &&
+ expect_has HEAD~1 "" &&
+ expect_has HEAD dir1 &&
+
+ # There are also 2 commit objects
+ test_line_count = 5 actual
+'
+
+expect_invalid_filter_spec () {
+ spec="$1" &&
+ err="$2" &&
+
+ test_must_fail git -C r3 rev-list --objects --filter="$spec" HEAD \
+ >actual 2>actual_stderr &&
+ test_must_be_empty actual &&
+ test_i18ngrep "$err" actual_stderr
+}
+
+test_expect_success 'combine:... while URL-encoding things that should not be' '
+ expect_invalid_filter_spec combine%3Atree:2+blob:none \
+ "invalid filter-spec"
+'
+
+test_expect_success 'combine: with nothing after the :' '
+ expect_invalid_filter_spec combine: "expected something after combine:"
+'
+
+test_expect_success 'parse error in first sub-filter in combine:' '
+ expect_invalid_filter_spec combine:tree:asdf+blob:none \
+ "expected .tree:<depth>."
+'
+
+test_expect_success 'combine:... with non-encoded reserved chars' '
+ expect_invalid_filter_spec combine:tree:2+sparse:@xyz \
+ "must escape char in sub-filter-spec: .@." &&
+ expect_invalid_filter_spec combine:tree:2+sparse:\` \
+ "must escape char in sub-filter-spec: .\`." &&
+ expect_invalid_filter_spec combine:tree:2+sparse:~abc \
+ "must escape char in sub-filter-spec: .\~."
+'
+
+test_expect_success 'validate err msg for "combine:<valid-filter>+"' '
+ expect_invalid_filter_spec combine:tree:2+ "expected .tree:<depth>."
+'
+
+test_expect_success 'combine:... with edge-case hex digits: Ff Aa 0 9' '
+ git -C r3 rev-list --objects --filter="combine:tree:2+bl%6Fb:n%6fne" \
+ HEAD >actual &&
+ test_line_count = 5 actual &&
+ git -C r3 rev-list --objects --filter="combine:tree%3A2+blob%3anone" \
+ HEAD >actual &&
+ test_line_count = 5 actual &&
+ git -C r3 rev-list --objects --filter="combine:tree:%30" HEAD >actual &&
+ test_line_count = 2 actual &&
+ git -C r3 rev-list --objects --filter="combine:tree:%39+blob:none" \
+ HEAD >actual &&
+ test_line_count = 5 actual
+'
+
+test_expect_success 'add sparse pattern blobs whose paths have reserved chars' '
+ cp r3/pattern r3/pattern1+renamed% &&
+ cp r3/pattern "r3/p;at%ter+n" &&
+ cp r3/pattern r3/^~pattern &&
+ git -C r3 add pattern1+renamed% "p;at%ter+n" ^~pattern &&
+ git -C r3 commit -m "add sparse pattern files with reserved chars"
+'
+
+test_expect_success 'combine:... with more than two sub-filters' '
+ git -C r3 rev-list --objects \
+ --filter=combine:tree:3+blob:limit=40+sparse:oid=master:pattern \
+ HEAD >actual &&
+
+ expect_has HEAD "" &&
+ expect_has HEAD~1 "" &&
+ expect_has HEAD~2 "" &&
+ expect_has HEAD dir1 &&
+ expect_has HEAD dir1/sparse1 &&
+ expect_has HEAD dir1/sparse2 &&
+
+ # Should also have 3 commits
+ test_line_count = 9 actual &&
+
+ # Try again, this time making sure the last sub-filter is only
+ # URL-decoded once.
+ cp actual expect &&
+
+ git -C r3 rev-list --objects \
+ --filter=combine:tree:3+blob:limit=40+sparse:oid=master:pattern1%2brenamed%25 \
+ HEAD >actual &&
+ test_cmp expect actual &&
+
+ # Use the same composite filter again, but with a pattern file name that
+ # requires encoding multiple characters, and use implicit filter
+ # combining.
+ test_when_finished "rm -f trace1" &&
+ GIT_TRACE=$(pwd)/trace1 git -C r3 rev-list --objects \
+ --filter=tree:3 --filter=blob:limit=40 \
+ --filter=sparse:oid="master:p;at%ter+n" \
+ HEAD >actual &&
+
+ test_cmp expect actual &&
+ grep "Add to combine filter-spec: sparse:oid=master:p%3bat%25ter%2bn" \
+ trace1 &&
+
+ # Repeat the above test, but this time, the characters to encode are in
+ # the LHS of the combined filter.
+ test_when_finished "rm -f trace2" &&
+ GIT_TRACE=$(pwd)/trace2 git -C r3 rev-list --objects \
+ --filter=sparse:oid=master:^~pattern \
+ --filter=tree:3 --filter=blob:limit=40 \
+ HEAD >actual &&
+
+ test_cmp expect actual &&
+ grep "Add to combine filter-spec: sparse:oid=master:%5e%7epattern" \
+ trace2
+'
+
# Test provisional omit collection logic with a repo that has objects appearing
# at multiple depths - first deeper than the filter's threshold, then shallow.
@@ -373,6 +527,37 @@ test_expect_success 'verify skipping tree iteration when collecting omits' '
test_cmp expect actual
'
+test_expect_success 'setup r5' '
+ git init r5 &&
+ mkdir -p r5/subdir &&
+
+ echo 1 >r5/short-root &&
+ echo 12345 >r5/long-root &&
+ echo a >r5/subdir/short-subdir &&
+ echo abcde >r5/subdir/long-subdir &&
+
+ git -C r5 add short-root long-root subdir &&
+ git -C r5 commit -m "commit msg"
+'
+
+test_expect_success 'verify collecting omits in combined: filter' '
+ # Note that this test guards against the naive implementation of simply
+ # giving both filters the same "omits" set and expecting it to
+ # automatically merge them.
+ git -C r5 rev-list --objects --quiet --filter-print-omitted \
+ --filter=combine:tree:2+blob:limit=3 HEAD >actual &&
+
+ # Expect 0 trees/commits, 3 blobs omitted (all blobs except short-root)
+ omitted_1=$(echo 12345 | git hash-object --stdin) &&
+ omitted_2=$(echo a | git hash-object --stdin) &&
+ omitted_3=$(echo abcde | git hash-object --stdin) &&
+
+ grep ~$omitted_1 actual &&
+ grep ~$omitted_2 actual &&
+ grep ~$omitted_3 actual &&
+ test_line_count = 3 actual
+'
+
# Test tree:<depth> where a tree is iterated to twice - once where a subentry is
# too deep to be included, and again where the blob inside it is shallow enough
# to be included. This makes sure we don't use LOFR_MARK_SEEN incorrectly (we
@@ -441,11 +626,4 @@ test_expect_success 'expand blob limit in protocol' '
grep "blob:limit=1024" trace
'
-test_expect_success 'expand tree depth limit in protocol' '
- GIT_TRACE_PACKET="$(pwd)/tree_trace" git -c protocol.version=2 clone \
- --filter=tree:0k "file://$(pwd)/r2" tree &&
- ! grep "tree:0k" tree_trace &&
- grep "tree:0" tree_trace
-'
-
test_done
diff --git a/t/t6120-describe.sh b/t/t6120-describe.sh
index 2b883d8174..45047d0a72 100755
--- a/t/t6120-describe.sh
+++ b/t/t6120-describe.sh
@@ -424,4 +424,19 @@ test_expect_success 'describe complains about missing object' '
test_must_fail git describe $ZERO_OID
'
+test_expect_success 'name-rev a rev shortly after epoch' '
+ test_when_finished "git checkout master" &&
+
+ git checkout --orphan no-timestamp-underflow &&
+ # Any date closer to epoch than the CUTOFF_DATE_SLOP constant
+ # in builtin/name-rev.c.
+ GIT_COMMITTER_DATE="@1234 +0000" \
+ git commit -m "committer date shortly after epoch" &&
+ old_commit_oid=$(git rev-parse HEAD) &&
+
+ echo "$old_commit_oid no-timestamp-underflow" >expect &&
+ git name-rev $old_commit_oid >actual &&
+ test_cmp expect actual
+'
+
test_done
diff --git a/t/t6300-for-each-ref.sh b/t/t6300-for-each-ref.sh
index ab69aa176d..9c910ce746 100755
--- a/t/t6300-for-each-ref.sh
+++ b/t/t6300-for-each-ref.sh
@@ -526,6 +526,25 @@ test_expect_success 'Check ambiguous head and tag refs II (loose)' '
test_cmp expected actual
'
+test_expect_success 'create tag without tagger' '
+ git tag -a -m "Broken tag" taggerless &&
+ git tag -f taggerless $(git cat-file tag taggerless |
+ sed -e "/^tagger /d" |
+ git hash-object --stdin -w -t tag)
+'
+
+test_atom refs/tags/taggerless type 'commit'
+test_atom refs/tags/taggerless tag 'taggerless'
+test_atom refs/tags/taggerless tagger ''
+test_atom refs/tags/taggerless taggername ''
+test_atom refs/tags/taggerless taggeremail ''
+test_atom refs/tags/taggerless taggerdate ''
+test_atom refs/tags/taggerless committer ''
+test_atom refs/tags/taggerless committername ''
+test_atom refs/tags/taggerless committeremail ''
+test_atom refs/tags/taggerless committerdate ''
+test_atom refs/tags/taggerless subject 'Broken tag'
+
test_expect_success 'an unusual tag with an incomplete line' '
git tag -m "bogo" bogo &&
diff --git a/t/t6501-freshen-objects.sh b/t/t6501-freshen-objects.sh
index 033871ee5f..f30b4849b6 100755
--- a/t/t6501-freshen-objects.sh
+++ b/t/t6501-freshen-objects.sh
@@ -137,7 +137,7 @@ test_expect_success 'do not complain about existing broken links (commit)' '
some message
EOF
commit=$(git hash-object -t commit -w broken-commit) &&
- git gc 2>stderr &&
+ git gc -q 2>stderr &&
verbose git cat-file -e $commit &&
test_must_be_empty stderr
'
@@ -147,7 +147,7 @@ test_expect_success 'do not complain about existing broken links (tree)' '
100644 blob 0000000000000000000000000000000000000003 foo
EOF
tree=$(git mktree --missing <broken-tree) &&
- git gc 2>stderr &&
+ git gc -q 2>stderr &&
git cat-file -e $tree &&
test_must_be_empty stderr
'
@@ -162,7 +162,7 @@ test_expect_success 'do not complain about existing broken links (tag)' '
this is a broken tag
EOF
tag=$(git hash-object -t tag -w broken-tag) &&
- git gc 2>stderr &&
+ git gc -q 2>stderr &&
git cat-file -e $tag &&
test_must_be_empty stderr
'
diff --git a/t/t7009-filter-branch-null-sha1.sh b/t/t7008-filter-branch-null-sha1.sh
index 9ba9f24ad2..9ba9f24ad2 100755
--- a/t/t7009-filter-branch-null-sha1.sh
+++ b/t/t7008-filter-branch-null-sha1.sh
diff --git a/t/t7300-clean.sh b/t/t7300-clean.sh
index a2c45d1902..6e6d24c1c3 100755
--- a/t/t7300-clean.sh
+++ b/t/t7300-clean.sh
@@ -117,6 +117,7 @@ test_expect_success C_LOCALE_OUTPUT 'git clean with relative prefix' '
would_clean=$(
cd docs &&
git clean -n ../src |
+ grep part3 |
sed -n -e "s|^Would remove ||p"
) &&
verbose test "$would_clean" = ../src/part3.c
@@ -129,6 +130,7 @@ test_expect_success C_LOCALE_OUTPUT 'git clean with absolute path' '
would_clean=$(
cd docs &&
git clean -n "$(pwd)/../src" |
+ grep part3 |
sed -n -e "s|^Would remove ||p"
) &&
verbose test "$would_clean" = ../src/part3.c
@@ -547,7 +549,7 @@ test_expect_failure 'nested (non-empty) bare repositories should be cleaned even
test_path_is_missing strange_bare
'
-test_expect_success 'giving path in nested git work tree will remove it' '
+test_expect_success 'giving path in nested git work tree will NOT remove it' '
rm -fr repo &&
mkdir repo &&
(
@@ -559,7 +561,7 @@ test_expect_success 'giving path in nested git work tree will remove it' '
git clean -f -d repo/bar/baz &&
test_path_is_file repo/.git/HEAD &&
test_path_is_dir repo/bar/ &&
- test_path_is_missing repo/bar/baz
+ test_path_is_file repo/bar/baz/hello.world
'
test_expect_success 'giving path to nested .git will not remove it' '
@@ -577,7 +579,7 @@ test_expect_success 'giving path to nested .git will not remove it' '
test_path_is_dir untracked/
'
-test_expect_success 'giving path to nested .git/ will remove contents' '
+test_expect_success 'giving path to nested .git/ will NOT remove contents' '
rm -fr repo untracked &&
mkdir repo untracked &&
(
@@ -587,7 +589,7 @@ test_expect_success 'giving path to nested .git/ will remove contents' '
) &&
git clean -f -d repo/.git/ &&
test_path_is_dir repo/.git &&
- test_dir_is_empty repo/.git &&
+ test_path_is_file repo/.git/HEAD &&
test_path_is_dir untracked/
'
@@ -669,6 +671,60 @@ test_expect_success 'git clean -d skips untracked dirs containing ignored files'
test_path_is_missing foo/b/bb
'
+test_expect_success 'git clean -d skips nested repo containing ignored files' '
+ test_when_finished "rm -rf nested-repo-with-ignored-file" &&
+
+ git init nested-repo-with-ignored-file &&
+ (
+ cd nested-repo-with-ignored-file &&
+ >file &&
+ git add file &&
+ git commit -m Initial &&
+
+ # This file is ignored by a .gitignore rule in the outer repo
+ # added in the previous test.
+ >ignoreme
+ ) &&
+
+ git clean -fd &&
+
+ test_path_is_file nested-repo-with-ignored-file/.git/index &&
+ test_path_is_file nested-repo-with-ignored-file/ignoreme &&
+ test_path_is_file nested-repo-with-ignored-file/file
+'
+
+test_expect_success 'git clean handles being told what to clean' '
+ mkdir -p d1 d2 &&
+ touch d1/ut d2/ut &&
+ git clean -f */ut &&
+ test_path_is_missing d1/ut &&
+ test_path_is_missing d2/ut
+'
+
+test_expect_success 'git clean handles being told what to clean, with -d' '
+ mkdir -p d1 d2 &&
+ touch d1/ut d2/ut &&
+ git clean -ffd */ut &&
+ test_path_is_missing d1/ut &&
+ test_path_is_missing d2/ut
+'
+
+test_expect_success 'git clean works if a glob is passed without -d' '
+ mkdir -p d1 d2 &&
+ touch d1/ut d2/ut &&
+ git clean -f "*ut" &&
+ test_path_is_missing d1/ut &&
+ test_path_is_missing d2/ut
+'
+
+test_expect_success 'git clean works if a glob is passed with -d' '
+ mkdir -p d1 d2 &&
+ touch d1/ut d2/ut &&
+ git clean -ffd "*ut" &&
+ test_path_is_missing d1/ut &&
+ test_path_is_missing d2/ut
+'
+
test_expect_success MINGW 'handle clean & core.longpaths = false nicely' '
test_config core.longpaths false &&
a50=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa &&
diff --git a/t/t7400-submodule-basic.sh b/t/t7400-submodule-basic.sh
index a208cb26e1..691b5fc3bf 100755
--- a/t/t7400-submodule-basic.sh
+++ b/t/t7400-submodule-basic.sh
@@ -527,7 +527,6 @@ test_expect_success 'update --init' '
test_must_fail git config submodule.example.url &&
git submodule update init 2> update.out &&
- cat update.out &&
test_i18ngrep "not initialized" update.out &&
test_must_fail git rev-parse --resolve-git-dir init/.git &&
@@ -545,7 +544,6 @@ test_expect_success 'update --init from subdirectory' '
(
cd sub &&
git submodule update ../init 2>update.out &&
- cat update.out &&
test_i18ngrep "not initialized" update.out &&
test_must_fail git rev-parse --resolve-git-dir ../init/.git &&
diff --git a/t/t7406-submodule-update.sh b/t/t7406-submodule-update.sh
index c973278300..df34c994d2 100755
--- a/t/t7406-submodule-update.sh
+++ b/t/t7406-submodule-update.sh
@@ -158,7 +158,6 @@ test_expect_success 'submodule update --init from and of subdirectory' '
test_i18ncmp expect2 actual2
'
-apos="'";
test_expect_success 'submodule update does not fetch already present commits' '
(cd submodule &&
echo line3 >> file &&
@@ -168,7 +167,7 @@ test_expect_success 'submodule update does not fetch already present commits' '
) &&
(cd super/submodule &&
head=$(git rev-parse --verify HEAD) &&
- echo "Submodule path ${apos}submodule$apos: checked out $apos$head$apos" > ../../expected &&
+ echo "Submodule path ${SQ}submodule$SQ: checked out $SQ$head$SQ" > ../../expected &&
git reset --hard HEAD~1
) &&
(cd super &&
diff --git a/t/t7503-pre-commit-and-pre-merge-commit-hooks.sh b/t/t7503-pre-commit-and-pre-merge-commit-hooks.sh
new file mode 100755
index 0000000000..b3485450a2
--- /dev/null
+++ b/t/t7503-pre-commit-and-pre-merge-commit-hooks.sh
@@ -0,0 +1,281 @@
+#!/bin/sh
+
+test_description='pre-commit and pre-merge-commit hooks'
+
+. ./test-lib.sh
+
+HOOKDIR="$(git rev-parse --git-dir)/hooks"
+PRECOMMIT="$HOOKDIR/pre-commit"
+PREMERGE="$HOOKDIR/pre-merge-commit"
+
+# Prepare sample scripts that write their $0 to actual_hooks
+test_expect_success 'sample script setup' '
+ mkdir -p "$HOOKDIR" &&
+ write_script "$HOOKDIR/success.sample" <<-\EOF &&
+ echo $0 >>actual_hooks
+ exit 0
+ EOF
+ write_script "$HOOKDIR/fail.sample" <<-\EOF &&
+ echo $0 >>actual_hooks
+ exit 1
+ EOF
+ write_script "$HOOKDIR/non-exec.sample" <<-\EOF &&
+ echo $0 >>actual_hooks
+ exit 1
+ EOF
+ chmod -x "$HOOKDIR/non-exec.sample" &&
+ write_script "$HOOKDIR/require-prefix.sample" <<-\EOF &&
+ echo $0 >>actual_hooks
+ test $GIT_PREFIX = "success/"
+ EOF
+ write_script "$HOOKDIR/check-author.sample" <<-\EOF
+ echo $0 >>actual_hooks
+ test "$GIT_AUTHOR_NAME" = "New Author" &&
+ test "$GIT_AUTHOR_EMAIL" = "newauthor@example.com"
+ EOF
+'
+
+test_expect_success 'root commit' '
+ echo "root" >file &&
+ git add file &&
+ git commit -m "zeroth" &&
+ git checkout -b side &&
+ echo "foo" >foo &&
+ git add foo &&
+ git commit -m "make it non-ff" &&
+ git branch side-orig side &&
+ git checkout master
+'
+
+test_expect_success 'setup conflicting branches' '
+ test_when_finished "git checkout master" &&
+ git checkout -b conflicting-a master &&
+ echo a >conflicting &&
+ git add conflicting &&
+ git commit -m conflicting-a &&
+ git checkout -b conflicting-b master &&
+ echo b >conflicting &&
+ git add conflicting &&
+ git commit -m conflicting-b
+'
+
+test_expect_success 'with no hook' '
+ test_when_finished "rm -f actual_hooks" &&
+ echo "foo" >file &&
+ git add file &&
+ git commit -m "first" &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success 'with no hook (merge)' '
+ test_when_finished "rm -f actual_hooks" &&
+ git branch -f side side-orig &&
+ git checkout side &&
+ git merge -m "merge master" master &&
+ git checkout master &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success '--no-verify with no hook' '
+ test_when_finished "rm -f actual_hooks" &&
+ echo "bar" >file &&
+ git add file &&
+ git commit --no-verify -m "bar" &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success '--no-verify with no hook (merge)' '
+ test_when_finished "rm -f actual_hooks" &&
+ git branch -f side side-orig &&
+ git checkout side &&
+ git merge --no-verify -m "merge master" master &&
+ git checkout master &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success 'with succeeding hook' '
+ test_when_finished "rm -f \"$PRECOMMIT\" expected_hooks actual_hooks" &&
+ cp "$HOOKDIR/success.sample" "$PRECOMMIT" &&
+ echo "$PRECOMMIT" >expected_hooks &&
+ echo "more" >>file &&
+ git add file &&
+ git commit -m "more" &&
+ test_cmp expected_hooks actual_hooks
+'
+
+test_expect_success 'with succeeding hook (merge)' '
+ test_when_finished "rm -f \"$PREMERGE\" expected_hooks actual_hooks" &&
+ cp "$HOOKDIR/success.sample" "$PREMERGE" &&
+ echo "$PREMERGE" >expected_hooks &&
+ git checkout side &&
+ git merge -m "merge master" master &&
+ git checkout master &&
+ test_cmp expected_hooks actual_hooks
+'
+
+test_expect_success 'automatic merge fails; both hooks are available' '
+ test_when_finished "rm -f \"$PREMERGE\" \"$PRECOMMIT\"" &&
+ test_when_finished "rm -f expected_hooks actual_hooks" &&
+ test_when_finished "git checkout master" &&
+ cp "$HOOKDIR/success.sample" "$PREMERGE" &&
+ cp "$HOOKDIR/success.sample" "$PRECOMMIT" &&
+
+ git checkout conflicting-a &&
+ test_must_fail git merge -m "merge conflicting-b" conflicting-b &&
+ test_path_is_missing actual_hooks &&
+
+ echo "$PRECOMMIT" >expected_hooks &&
+ echo a+b >conflicting &&
+ git add conflicting &&
+ git commit -m "resolve conflict" &&
+ test_cmp expected_hooks actual_hooks
+'
+
+test_expect_success '--no-verify with succeeding hook' '
+ test_when_finished "rm -f \"$PRECOMMIT\" actual_hooks" &&
+ cp "$HOOKDIR/success.sample" "$PRECOMMIT" &&
+ echo "even more" >>file &&
+ git add file &&
+ git commit --no-verify -m "even more" &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success '--no-verify with succeeding hook (merge)' '
+ test_when_finished "rm -f \"$PREMERGE\" actual_hooks" &&
+ cp "$HOOKDIR/success.sample" "$PREMERGE" &&
+ git branch -f side side-orig &&
+ git checkout side &&
+ git merge --no-verify -m "merge master" master &&
+ git checkout master &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success 'with failing hook' '
+ test_when_finished "rm -f \"$PRECOMMIT\" expected_hooks actual_hooks" &&
+ cp "$HOOKDIR/fail.sample" "$PRECOMMIT" &&
+ echo "$PRECOMMIT" >expected_hooks &&
+ echo "another" >>file &&
+ git add file &&
+ test_must_fail git commit -m "another" &&
+ test_cmp expected_hooks actual_hooks
+'
+
+test_expect_success '--no-verify with failing hook' '
+ test_when_finished "rm -f \"$PRECOMMIT\" actual_hooks" &&
+ cp "$HOOKDIR/fail.sample" "$PRECOMMIT" &&
+ echo "stuff" >>file &&
+ git add file &&
+ git commit --no-verify -m "stuff" &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success 'with failing hook (merge)' '
+ test_when_finished "rm -f \"$PREMERGE\" expected_hooks actual_hooks" &&
+ cp "$HOOKDIR/fail.sample" "$PREMERGE" &&
+ echo "$PREMERGE" >expected_hooks &&
+ git checkout side &&
+ test_must_fail git merge -m "merge master" master &&
+ git checkout master &&
+ test_cmp expected_hooks actual_hooks
+'
+
+test_expect_success '--no-verify with failing hook (merge)' '
+ test_when_finished "rm -f \"$PREMERGE\" actual_hooks" &&
+ cp "$HOOKDIR/fail.sample" "$PREMERGE" &&
+ git branch -f side side-orig &&
+ git checkout side &&
+ git merge --no-verify -m "merge master" master &&
+ git checkout master &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success POSIXPERM 'with non-executable hook' '
+ test_when_finished "rm -f \"$PRECOMMIT\" actual_hooks" &&
+ cp "$HOOKDIR/non-exec.sample" "$PRECOMMIT" &&
+ echo "content" >>file &&
+ git add file &&
+ git commit -m "content" &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success POSIXPERM '--no-verify with non-executable hook' '
+ test_when_finished "rm -f \"$PRECOMMIT\" actual_hooks" &&
+ cp "$HOOKDIR/non-exec.sample" "$PRECOMMIT" &&
+ echo "more content" >>file &&
+ git add file &&
+ git commit --no-verify -m "more content" &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success POSIXPERM 'with non-executable hook (merge)' '
+ test_when_finished "rm -f \"$PREMERGE\" actual_hooks" &&
+ cp "$HOOKDIR/non-exec.sample" "$PREMERGE" &&
+ git branch -f side side-orig &&
+ git checkout side &&
+ git merge -m "merge master" master &&
+ git checkout master &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success POSIXPERM '--no-verify with non-executable hook (merge)' '
+ test_when_finished "rm -f \"$PREMERGE\" actual_hooks" &&
+ cp "$HOOKDIR/non-exec.sample" "$PREMERGE" &&
+ git branch -f side side-orig &&
+ git checkout side &&
+ git merge --no-verify -m "merge master" master &&
+ git checkout master &&
+ test_path_is_missing actual_hooks
+'
+
+test_expect_success 'with hook requiring GIT_PREFIX' '
+ test_when_finished "rm -rf \"$PRECOMMIT\" expected_hooks actual_hooks success" &&
+ cp "$HOOKDIR/require-prefix.sample" "$PRECOMMIT" &&
+ echo "$PRECOMMIT" >expected_hooks &&
+ echo "more content" >>file &&
+ git add file &&
+ mkdir success &&
+ (
+ cd success &&
+ git commit -m "hook requires GIT_PREFIX = success/"
+ ) &&
+ test_cmp expected_hooks actual_hooks
+'
+
+test_expect_success 'with failing hook requiring GIT_PREFIX' '
+ test_when_finished "rm -rf \"$PRECOMMIT\" expected_hooks actual_hooks fail" &&
+ cp "$HOOKDIR/require-prefix.sample" "$PRECOMMIT" &&
+ echo "$PRECOMMIT" >expected_hooks &&
+ echo "more content" >>file &&
+ git add file &&
+ mkdir fail &&
+ (
+ cd fail &&
+ test_must_fail git commit -m "hook must fail"
+ ) &&
+ git checkout -- file &&
+ test_cmp expected_hooks actual_hooks
+'
+
+test_expect_success 'check the author in hook' '
+ test_when_finished "rm -f \"$PRECOMMIT\" expected_hooks actual_hooks" &&
+ cp "$HOOKDIR/check-author.sample" "$PRECOMMIT" &&
+ cat >expected_hooks <<-EOF &&
+ $PRECOMMIT
+ $PRECOMMIT
+ $PRECOMMIT
+ EOF
+ test_must_fail git commit --allow-empty -m "by a.u.thor" &&
+ (
+ GIT_AUTHOR_NAME="New Author" &&
+ GIT_AUTHOR_EMAIL="newauthor@example.com" &&
+ export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL &&
+ git commit --allow-empty -m "by new.author via env" &&
+ git show -s
+ ) &&
+ git commit --author="New Author <newauthor@example.com>" \
+ --allow-empty -m "by new.author via command line" &&
+ git show -s &&
+ test_cmp expected_hooks actual_hooks
+'
+
+test_done
diff --git a/t/t7503-pre-commit-hook.sh b/t/t7503-pre-commit-hook.sh
deleted file mode 100755
index 984889b39d..0000000000
--- a/t/t7503-pre-commit-hook.sh
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/bin/sh
-
-test_description='pre-commit hook'
-
-. ./test-lib.sh
-
-test_expect_success 'with no hook' '
-
- echo "foo" > file &&
- git add file &&
- git commit -m "first"
-
-'
-
-test_expect_success '--no-verify with no hook' '
-
- echo "bar" > file &&
- git add file &&
- git commit --no-verify -m "bar"
-
-'
-
-# now install hook that always succeeds
-HOOKDIR="$(git rev-parse --git-dir)/hooks"
-HOOK="$HOOKDIR/pre-commit"
-mkdir -p "$HOOKDIR"
-cat > "$HOOK" <<EOF
-#!/bin/sh
-exit 0
-EOF
-chmod +x "$HOOK"
-
-test_expect_success 'with succeeding hook' '
-
- echo "more" >> file &&
- git add file &&
- git commit -m "more"
-
-'
-
-test_expect_success '--no-verify with succeeding hook' '
-
- echo "even more" >> file &&
- git add file &&
- git commit --no-verify -m "even more"
-
-'
-
-# now a hook that fails
-cat > "$HOOK" <<EOF
-#!/bin/sh
-exit 1
-EOF
-
-test_expect_success 'with failing hook' '
-
- echo "another" >> file &&
- git add file &&
- test_must_fail git commit -m "another"
-
-'
-
-test_expect_success '--no-verify with failing hook' '
-
- echo "stuff" >> file &&
- git add file &&
- git commit --no-verify -m "stuff"
-
-'
-
-chmod -x "$HOOK"
-test_expect_success POSIXPERM 'with non-executable hook' '
-
- echo "content" >> file &&
- git add file &&
- git commit -m "content"
-
-'
-
-test_expect_success POSIXPERM '--no-verify with non-executable hook' '
-
- echo "more content" >> file &&
- git add file &&
- git commit --no-verify -m "more content"
-
-'
-chmod +x "$HOOK"
-
-# a hook that checks $GIT_PREFIX and succeeds inside the
-# success/ subdirectory only
-cat > "$HOOK" <<EOF
-#!/bin/sh
-test \$GIT_PREFIX = success/
-EOF
-
-test_expect_success 'with hook requiring GIT_PREFIX' '
-
- echo "more content" >> file &&
- git add file &&
- mkdir success &&
- (
- cd success &&
- git commit -m "hook requires GIT_PREFIX = success/"
- ) &&
- rmdir success
-'
-
-test_expect_success 'with failing hook requiring GIT_PREFIX' '
-
- echo "more content" >> file &&
- git add file &&
- mkdir fail &&
- (
- cd fail &&
- test_must_fail git commit -m "hook must fail"
- ) &&
- rmdir fail &&
- git checkout -- file
-'
-
-test_expect_success 'check the author in hook' '
- write_script "$HOOK" <<-\EOF &&
- test "$GIT_AUTHOR_NAME" = "New Author" &&
- test "$GIT_AUTHOR_EMAIL" = "newauthor@example.com"
- EOF
- test_must_fail git commit --allow-empty -m "by a.u.thor" &&
- (
- GIT_AUTHOR_NAME="New Author" &&
- GIT_AUTHOR_EMAIL="newauthor@example.com" &&
- export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL &&
- git commit --allow-empty -m "by new.author via env" &&
- git show -s
- ) &&
- git commit --author="New Author <newauthor@example.com>" \
- --allow-empty -m "by new.author via command line" &&
- git show -s
-'
-
-test_done
diff --git a/t/t7505-prepare-commit-msg-hook.sh b/t/t7505-prepare-commit-msg-hook.sh
index ba8bd1b514..94f85cdf83 100755
--- a/t/t7505-prepare-commit-msg-hook.sh
+++ b/t/t7505-prepare-commit-msg-hook.sh
@@ -241,13 +241,7 @@ test_rebase () {
git add b &&
git rebase --continue
) &&
- if test "$mode" = -p # reword amended after pick
- then
- n=18
- else
- n=17
- fi &&
- git log --pretty=%s -g -n$n HEAD@{1} >actual &&
+ git log --pretty=%s -g -n18 HEAD@{1} >actual &&
test_cmp "$TEST_DIRECTORY/t7505/expected-rebase${mode:--i}" actual
'
}
diff --git a/t/t7505/expected-rebase-i b/t/t7505/expected-rebase-i
index c514bdbb94..93bada596e 100644
--- a/t/t7505/expected-rebase-i
+++ b/t/t7505/expected-rebase-i
@@ -7,7 +7,8 @@ message (no editor) [edit rebase-10]
message [fixup rebase-9]
message (no editor) [fixup rebase-8]
message (no editor) [squash rebase-7]
-message [reword rebase-6]
+HEAD [reword rebase-6]
+message (no editor) [reword rebase-6]
message [squash rebase-5]
message (no editor) [fixup rebase-4]
message (no editor) [pick rebase-3]
diff --git a/t/t7512-status-help.sh b/t/t7512-status-help.sh
index e01c285cbf..66d7a62797 100755
--- a/t/t7512-status-help.sh
+++ b/t/t7512-status-help.sh
@@ -733,6 +733,7 @@ test_expect_success 'status when cherry-picking before resolving conflicts' '
On branch cherry_branch
You are currently cherry-picking commit $TO_CHERRY_PICK.
(fix conflicts and run "git cherry-pick --continue")
+ (use "git cherry-pick --skip" to skip this patch)
(use "git cherry-pick --abort" to cancel the cherry-pick operation)
Unmerged paths:
@@ -757,6 +758,7 @@ test_expect_success 'status when cherry-picking after resolving conflicts' '
On branch cherry_branch
You are currently cherry-picking commit $TO_CHERRY_PICK.
(all conflicts fixed: run "git cherry-pick --continue")
+ (use "git cherry-pick --skip" to skip this patch)
(use "git cherry-pick --abort" to cancel the cherry-pick operation)
Changes to be committed:
@@ -778,6 +780,7 @@ test_expect_success 'status when cherry-picking after committing conflict resolu
On branch cherry_branch
Cherry-pick currently in progress.
(run "git cherry-pick --continue" to continue)
+ (use "git cherry-pick --skip" to skip this patch)
(use "git cherry-pick --abort" to cancel the cherry-pick operation)
nothing to commit (use -u to show untracked files)
@@ -835,6 +838,7 @@ test_expect_success 'status while reverting commit (conflicts)' '
On branch master
You are currently reverting commit $TO_REVERT.
(fix conflicts and run "git revert --continue")
+ (use "git revert --skip" to skip this patch)
(use "git revert --abort" to cancel the revert operation)
Unmerged paths:
@@ -855,6 +859,7 @@ test_expect_success 'status while reverting commit (conflicts resolved)' '
On branch master
You are currently reverting commit $TO_REVERT.
(all conflicts fixed: run "git revert --continue")
+ (use "git revert --skip" to skip this patch)
(use "git revert --abort" to cancel the revert operation)
Changes to be committed:
@@ -887,6 +892,7 @@ test_expect_success 'status while reverting after committing conflict resolution
On branch master
Revert currently in progress.
(run "git revert --continue" to continue)
+ (use "git revert --skip" to skip this patch)
(use "git revert --abort" to cancel the revert operation)
nothing to commit (use -u to show untracked files)
diff --git a/t/t7812-grep-icase-non-ascii.sh b/t/t7812-grep-icase-non-ascii.sh
index 0c685d3598..531eb59d57 100755
--- a/t/t7812-grep-icase-non-ascii.sh
+++ b/t/t7812-grep-icase-non-ascii.sh
@@ -53,4 +53,32 @@ test_expect_success REGEX_LOCALE 'pickaxe -i on non-ascii' '
test_cmp expected actual
'
+test_expect_success GETTEXT_LOCALE,LIBPCRE2 'PCRE v2: setup invalid UTF-8 data' '
+ printf "\\200\\n" >invalid-0x80 &&
+ echo "ævar" >expected &&
+ cat expected >>invalid-0x80 &&
+ git add invalid-0x80
+'
+
+test_expect_success GETTEXT_LOCALE,LIBPCRE2 'PCRE v2: grep ASCII from invalid UTF-8 data' '
+ git grep -h "var" invalid-0x80 >actual &&
+ test_cmp expected actual &&
+ git grep -h "(*NO_JIT)var" invalid-0x80 >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success GETTEXT_LOCALE,LIBPCRE2 'PCRE v2: grep non-ASCII from invalid UTF-8 data' '
+ git grep -h "æ" invalid-0x80 >actual &&
+ test_cmp expected actual &&
+ git grep -h "(*NO_JIT)æ" invalid-0x80 &&
+ test_cmp expected actual
+'
+
+test_expect_success GETTEXT_LOCALE,LIBPCRE2 'PCRE v2: grep non-ASCII from invalid UTF-8 data with -i' '
+ test_might_fail git grep -hi "Æ" invalid-0x80 >actual &&
+ test_cmp expected actual &&
+ test_must_fail git grep -hi "(*NO_JIT)Æ" invalid-0x80 &&
+ test_cmp expected actual
+'
+
test_done
diff --git a/t/t7814-grep-recurse-submodules.sh b/t/t7814-grep-recurse-submodules.sh
index a11366b4ce..946f91fa57 100755
--- a/t/t7814-grep-recurse-submodules.sh
+++ b/t/t7814-grep-recurse-submodules.sh
@@ -408,4 +408,25 @@ test_expect_success 'grep --recurse-submodules with submodules without .gitmodul
test_cmp expect actual
'
+reset_and_clean () {
+ git reset --hard &&
+ git clean -fd &&
+ git submodule foreach --recursive 'git reset --hard' &&
+ git submodule foreach --recursive 'git clean -fd'
+}
+
+test_expect_success 'grep --recurse-submodules without --cached considers worktree modifications' '
+ reset_and_clean &&
+ echo "A modified line in submodule" >>submodule/a &&
+ echo "submodule/a:A modified line in submodule" >expect &&
+ git grep --recurse-submodules "A modified line in submodule" >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'grep --recurse-submodules with --cached ignores worktree modifications' '
+ reset_and_clean &&
+ echo "A modified line in submodule" >>submodule/a &&
+ test_must_fail git grep --recurse-submodules --cached "A modified line in submodule" >actual 2>&1 &&
+ test_must_be_empty actual
+'
test_done
diff --git a/t/t7008-grep-binary.sh b/t/t7815-grep-binary.sh
index 2d87c49b75..90ebb64f46 100755
--- a/t/t7008-grep-binary.sh
+++ b/t/t7815-grep-binary.sh
@@ -4,41 +4,6 @@ test_description='git grep in binary files'
. ./test-lib.sh
-nul_match () {
- matches=$1
- flags=$2
- pattern=$3
- pattern_human=$(echo "$pattern" | sed 's/Q/<NUL>/g')
-
- if test "$matches" = 1
- then
- test_expect_success "git grep -f f $flags '$pattern_human' a" "
- printf '$pattern' | q_to_nul >f &&
- git grep -f f $flags a
- "
- elif test "$matches" = 0
- then
- test_expect_success "git grep -f f $flags '$pattern_human' a" "
- printf '$pattern' | q_to_nul >f &&
- test_must_fail git grep -f f $flags a
- "
- elif test "$matches" = T1
- then
- test_expect_failure "git grep -f f $flags '$pattern_human' a" "
- printf '$pattern' | q_to_nul >f &&
- git grep -f f $flags a
- "
- elif test "$matches" = T0
- then
- test_expect_failure "git grep -f f $flags '$pattern_human' a" "
- printf '$pattern' | q_to_nul >f &&
- test_must_fail git grep -f f $flags a
- "
- else
- test_expect_success "PANIC: Test framework error. Unknown matches value $matches" 'false'
- fi
-}
-
test_expect_success 'setup' "
echo 'binaryQfileQm[*]cQ*æQð' | q_to_nul >a &&
git add a &&
@@ -102,72 +67,6 @@ test_expect_failure 'git grep .fi a' '
git grep .fi a
'
-nul_match 1 '-F' 'yQf'
-nul_match 0 '-F' 'yQx'
-nul_match 1 '-Fi' 'YQf'
-nul_match 0 '-Fi' 'YQx'
-nul_match 1 '' 'yQf'
-nul_match 0 '' 'yQx'
-nul_match 1 '' 'æQð'
-nul_match 1 '-F' 'eQm[*]c'
-nul_match 1 '-Fi' 'EQM[*]C'
-
-# Regex patterns that would match but shouldn't with -F
-nul_match 0 '-F' 'yQ[f]'
-nul_match 0 '-F' '[y]Qf'
-nul_match 0 '-Fi' 'YQ[F]'
-nul_match 0 '-Fi' '[Y]QF'
-nul_match 0 '-F' 'æQ[ð]'
-nul_match 0 '-F' '[æ]Qð'
-nul_match 0 '-Fi' 'ÆQ[Ð]'
-nul_match 0 '-Fi' '[Æ]QÐ'
-
-# kwset is disabled on -i & non-ASCII. No way to match non-ASCII \0
-# patterns case-insensitively.
-nul_match T1 '-i' 'ÆQÐ'
-
-# \0 implicitly disables regexes. This is an undocumented internal
-# limitation.
-nul_match T1 '' 'yQ[f]'
-nul_match T1 '' '[y]Qf'
-nul_match T1 '-i' 'YQ[F]'
-nul_match T1 '-i' '[Y]Qf'
-nul_match T1 '' 'æQ[ð]'
-nul_match T1 '' '[æ]Qð'
-nul_match T1 '-i' 'ÆQ[Ð]'
-
-# ... because of \0 implicitly disabling regexes regexes that
-# should/shouldn't match don't do the right thing.
-nul_match T1 '' 'eQm.*cQ'
-nul_match T1 '-i' 'EQM.*cQ'
-nul_match T0 '' 'eQm[*]c'
-nul_match T0 '-i' 'EQM[*]C'
-
-# Due to the REG_STARTEND extension when kwset() is disabled on -i &
-# non-ASCII the string will be matched in its entirety, but the
-# pattern will be cut off at the first \0.
-nul_match 0 '-i' 'NOMATCHQð'
-nul_match T0 '-i' '[Æ]QNOMATCH'
-nul_match T0 '-i' '[æ]QNOMATCH'
-# Matches, but for the wrong reasons, just stops at [æ]
-nul_match 1 '-i' '[Æ]Qð'
-nul_match 1 '-i' '[æ]Qð'
-
-# Ensure that the matcher doesn't regress to something that stops at
-# \0
-nul_match 0 '-F' 'yQ[f]'
-nul_match 0 '-Fi' 'YQ[F]'
-nul_match 0 '' 'yQNOMATCH'
-nul_match 0 '' 'QNOMATCH'
-nul_match 0 '-i' 'YQNOMATCH'
-nul_match 0 '-i' 'QNOMATCH'
-nul_match 0 '-F' 'æQ[ð]'
-nul_match 0 '-Fi' 'ÆQ[Ð]'
-nul_match 0 '' 'yQNÓMATCH'
-nul_match 0 '' 'QNÓMATCH'
-nul_match 0 '-i' 'YQNÓMATCH'
-nul_match 0 '-i' 'QNÓMATCH'
-
test_expect_success 'grep respects binary diff attribute' '
echo text >t &&
git add t &&
diff --git a/t/t7816-grep-binary-pattern.sh b/t/t7816-grep-binary-pattern.sh
new file mode 100755
index 0000000000..60bab291e4
--- /dev/null
+++ b/t/t7816-grep-binary-pattern.sh
@@ -0,0 +1,127 @@
+#!/bin/sh
+
+test_description='git grep with a binary pattern files'
+
+. ./lib-gettext.sh
+
+nul_match_internal () {
+ matches=$1
+ prereqs=$2
+ lc_all=$3
+ extra_flags=$4
+ flags=$5
+ pattern=$6
+ pattern_human=$(echo "$pattern" | sed 's/Q/<NUL>/g')
+
+ if test "$matches" = 1
+ then
+ test_expect_success $prereqs "LC_ALL='$lc_all' git grep $extra_flags -f f $flags '$pattern_human' a" "
+ printf '$pattern' | q_to_nul >f &&
+ LC_ALL='$lc_all' git grep $extra_flags -f f $flags a
+ "
+ elif test "$matches" = 0
+ then
+ test_expect_success $prereqs "LC_ALL='$lc_all' git grep $extra_flags -f f $flags '$pattern_human' a" "
+ >stderr &&
+ printf '$pattern' | q_to_nul >f &&
+ test_must_fail env LC_ALL=\"$lc_all\" git grep $extra_flags -f f $flags a 2>stderr &&
+ test_i18ngrep ! 'This is only supported with -P under PCRE v2' stderr
+ "
+ elif test "$matches" = P
+ then
+ test_expect_success $prereqs "error, PCRE v2 only: LC_ALL='$lc_all' git grep -f f $flags '$pattern_human' a" "
+ >stderr &&
+ printf '$pattern' | q_to_nul >f &&
+ test_must_fail env LC_ALL=\"$lc_all\" git grep -f f $flags a 2>stderr &&
+ test_i18ngrep 'This is only supported with -P under PCRE v2' stderr
+ "
+ else
+ test_expect_success "PANIC: Test framework error. Unknown matches value $matches" 'false'
+ fi
+}
+
+nul_match () {
+ matches=$1
+ matches_pcre2=$2
+ matches_pcre2_locale=$3
+ flags=$4
+ pattern=$5
+ pattern_human=$(echo "$pattern" | sed 's/Q/<NUL>/g')
+
+ nul_match_internal "$matches" "" "C" "" "$flags" "$pattern"
+ nul_match_internal "$matches_pcre2" "LIBPCRE2" "C" "-P" "$flags" "$pattern"
+ nul_match_internal "$matches_pcre2_locale" "LIBPCRE2,GETTEXT_LOCALE" "$is_IS_locale" "-P" "$flags" "$pattern"
+}
+
+test_expect_success 'setup' "
+ echo 'binaryQfileQm[*]cQ*æQð' | q_to_nul >a &&
+ git add a &&
+ git commit -m.
+"
+
+# Simple fixed-string matching that can use kwset (no -i && non-ASCII)
+nul_match P P P '-F' 'yQf'
+nul_match P P P '-F' 'yQx'
+nul_match P P P '-Fi' 'YQf'
+nul_match P P P '-Fi' 'YQx'
+nul_match P P 1 '' 'yQf'
+nul_match P P 0 '' 'yQx'
+nul_match P P 1 '' 'æQð'
+nul_match P P P '-F' 'eQm[*]c'
+nul_match P P P '-Fi' 'EQM[*]C'
+
+# Regex patterns that would match but shouldn't with -F
+nul_match P P P '-F' 'yQ[f]'
+nul_match P P P '-F' '[y]Qf'
+nul_match P P P '-Fi' 'YQ[F]'
+nul_match P P P '-Fi' '[Y]QF'
+nul_match P P P '-F' 'æQ[ð]'
+nul_match P P P '-F' '[æ]Qð'
+
+# The -F kwset codepath can't handle -i && non-ASCII...
+nul_match P 1 1 '-i' '[æ]Qð'
+
+# ...PCRE v2 only matches non-ASCII with -i casefolding under UTF-8
+# semantics
+nul_match P P P '-Fi' 'ÆQ[Ð]'
+nul_match P 0 1 '-i' 'ÆQ[Ð]'
+nul_match P 0 1 '-i' '[Æ]QÐ'
+nul_match P 0 1 '-i' '[Æ]Qð'
+nul_match P 0 1 '-i' 'ÆQÐ'
+
+# \0 in regexes can only work with -P & PCRE v2
+nul_match P P 1 '' 'yQ[f]'
+nul_match P P 1 '' '[y]Qf'
+nul_match P P 1 '-i' 'YQ[F]'
+nul_match P P 1 '-i' '[Y]Qf'
+nul_match P P 1 '' 'æQ[ð]'
+nul_match P P 1 '' '[æ]Qð'
+nul_match P P 1 '-i' 'ÆQ[Ð]'
+nul_match P P 1 '' 'eQm.*cQ'
+nul_match P P 1 '-i' 'EQM.*cQ'
+nul_match P P 0 '' 'eQm[*]c'
+nul_match P P 0 '-i' 'EQM[*]C'
+
+# Assert that we're using REG_STARTEND and the pattern doesn't match
+# just because it's cut off at the first \0.
+nul_match P P 0 '-i' 'NOMATCHQð'
+nul_match P P 0 '-i' '[Æ]QNOMATCH'
+nul_match P P 0 '-i' '[æ]QNOMATCH'
+
+# Ensure that the matcher doesn't regress to something that stops at
+# \0
+nul_match P P P '-F' 'yQ[f]'
+nul_match P P P '-Fi' 'YQ[F]'
+nul_match P P 0 '' 'yQNOMATCH'
+nul_match P P 0 '' 'QNOMATCH'
+nul_match P P 0 '-i' 'YQNOMATCH'
+nul_match P P 0 '-i' 'QNOMATCH'
+nul_match P P P '-F' 'æQ[ð]'
+nul_match P P P '-Fi' 'ÆQ[Ð]'
+nul_match P P 1 '-i' 'ÆQ[Ð]'
+nul_match P P 0 '' 'yQNÓMATCH'
+nul_match P P 0 '' 'QNÓMATCH'
+nul_match P P 0 '-i' 'YQNÓMATCH'
+nul_match P P 0 '-i' 'QNÓMATCH'
+
+test_done
diff --git a/t/t9300-fast-import.sh b/t/t9300-fast-import.sh
index 141b7fa35e..e707fb861e 100755
--- a/t/t9300-fast-import.sh
+++ b/t/t9300-fast-import.sh
@@ -85,6 +85,36 @@ test_expect_success 'A: create pack from stdin' '
An annotated tag that annotates a blob.
EOF
+ tag to-be-deleted
+ from :3
+ data <<EOF
+ Another annotated tag that annotates a blob.
+ EOF
+
+ reset refs/tags/to-be-deleted
+ from 0000000000000000000000000000000000000000
+
+ tag nested
+ mark :6
+ from :4
+ data <<EOF
+ Tag of our lovely commit
+ EOF
+
+ reset refs/tags/nested
+ from 0000000000000000000000000000000000000000
+
+ tag nested
+ mark :7
+ from :6
+ data <<EOF
+ Tag of tag of our lovely commit
+ EOF
+
+ alias
+ mark :8
+ to :5
+
INPUT_END
git fast-import --export-marks=marks.out <input &&
git whatchanged master
@@ -157,12 +187,19 @@ test_expect_success 'A: verify tag/series-A-blob' '
test_cmp expect actual
'
+test_expect_success 'A: verify tag deletion is successful' '
+ test_must_fail git rev-parse --verify refs/tags/to-be-deleted
+'
+
test_expect_success 'A: verify marks output' '
cat >expect <<-EOF &&
:2 $(git rev-parse --verify master:file2)
:3 $(git rev-parse --verify master:file3)
:4 $(git rev-parse --verify master:file4)
:5 $(git rev-parse --verify master^0)
+ :6 $(git cat-file tag nested | grep object | cut -d" " -f 2)
+ :7 $(git rev-parse --verify nested)
+ :8 $(git rev-parse --verify master^0)
EOF
test_cmp expect marks.out
'
@@ -2781,7 +2818,6 @@ test_expect_success 'S: filemodify with garbage after mark must fail' '
COMMIT
M 100644 :403x hello.c
EOF
- cat err &&
test_i18ngrep "space after mark" err
'
@@ -2798,7 +2834,6 @@ test_expect_success 'S: filemodify with garbage after inline must fail' '
inline
BLOB
EOF
- cat err &&
test_i18ngrep "nvalid dataref" err
'
@@ -2812,7 +2847,6 @@ test_expect_success 'S: filemodify with garbage after sha1 must fail' '
COMMIT
M 100644 ${sha1}x hello.c
EOF
- cat err &&
test_i18ngrep "space after SHA1" err
'
@@ -2828,7 +2862,6 @@ test_expect_success 'S: notemodify with garbage after mark dataref must fail' '
COMMIT
N :202x :302
EOF
- cat err &&
test_i18ngrep "space after mark" err
'
@@ -2844,7 +2877,6 @@ test_expect_success 'S: notemodify with garbage after inline dataref must fail'
note blob
BLOB
EOF
- cat err &&
test_i18ngrep "nvalid dataref" err
'
@@ -2858,7 +2890,6 @@ test_expect_success 'S: notemodify with garbage after sha1 dataref must fail' '
COMMIT
N ${sha1}x :302
EOF
- cat err &&
test_i18ngrep "space after SHA1" err
'
@@ -2874,7 +2905,6 @@ test_expect_success 'S: notemodify with garbage after mark commit-ish must fail'
COMMIT
N :202 :302x
EOF
- cat err &&
test_i18ngrep "after mark" err
'
@@ -2908,7 +2938,6 @@ test_expect_success 'S: from with garbage after mark must fail' '
EOF
# now evaluate the error
- cat err &&
test_i18ngrep "after mark" err
'
@@ -2928,7 +2957,6 @@ test_expect_success 'S: merge with garbage after mark must fail' '
merge :303x
M 100644 :403 hello.c
EOF
- cat err &&
test_i18ngrep "after mark" err
'
@@ -2944,7 +2972,6 @@ test_expect_success 'S: tag with garbage after mark must fail' '
tag S
TAG
EOF
- cat err &&
test_i18ngrep "after mark" err
'
@@ -2955,7 +2982,6 @@ test_expect_success 'S: cat-blob with garbage after mark must fail' '
test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
cat-blob :403x
EOF
- cat err &&
test_i18ngrep "after mark" err
'
@@ -2966,7 +2992,6 @@ test_expect_success 'S: ls with garbage after mark must fail' '
test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
ls :302x hello.c
EOF
- cat err &&
test_i18ngrep "space after mark" err
'
@@ -2975,7 +3000,6 @@ test_expect_success 'S: ls with garbage after sha1 must fail' '
test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
ls ${sha1}x hello.c
EOF
- cat err &&
test_i18ngrep "space after tree-ish" err
'
diff --git a/t/t9350-fast-export.sh b/t/t9350-fast-export.sh
index b4004e05c2..2e4e214815 100755
--- a/t/t9350-fast-export.sh
+++ b/t/t9350-fast-export.sh
@@ -53,6 +53,33 @@ test_expect_success 'fast-export | fast-import' '
'
+test_expect_success 'fast-export ^muss^{commit} muss' '
+ git fast-export --tag-of-filtered-object=rewrite ^muss^{commit} muss >actual &&
+ cat >expected <<-EOF &&
+ tag muss
+ from $(git rev-parse --verify muss^{commit})
+ $(git cat-file tag muss | grep tagger)
+ data 9
+ valentin
+
+ EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'fast-export --mark-tags ^muss^{commit} muss' '
+ git fast-export --mark-tags --tag-of-filtered-object=rewrite ^muss^{commit} muss >actual &&
+ cat >expected <<-EOF &&
+ tag muss
+ mark :1
+ from $(git rev-parse --verify muss^{commit})
+ $(git cat-file tag muss | grep tagger)
+ data 9
+ valentin
+
+ EOF
+ test_cmp expected actual
+'
+
test_expect_success 'fast-export master~2..master' '
git fast-export master~2..master >actual &&
@@ -513,10 +540,41 @@ test_expect_success 'tree_tag' '
'
# NEEDSWORK: not just check return status, but validate the output
+# Note that these tests DO NOTHING other than print a warning that
+# they are ommitting the one tag we asked them to export (because the
+# tags resolve to a tree). They exist just to make sure we do not
+# abort but instead just warn.
test_expect_success 'tree_tag-obj' 'git fast-export tree_tag-obj'
test_expect_success 'tag-obj_tag' 'git fast-export tag-obj_tag'
test_expect_success 'tag-obj_tag-obj' 'git fast-export tag-obj_tag-obj'
+test_expect_success 'handling tags of blobs' '
+ git tag -a -m "Tag of a blob" blobtag $(git rev-parse master:file) &&
+ git fast-export blobtag >actual &&
+ cat >expect <<-EOF &&
+ blob
+ mark :1
+ data 9
+ die Luft
+
+ tag blobtag
+ from :1
+ tagger $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data 14
+ Tag of a blob
+
+ EOF
+ test_cmp expect actual
+'
+
+test_expect_success 'handling nested tags' '
+ git tag -a -m "This is a nested tag" nested muss &&
+ git fast-export --mark-tags nested >output &&
+ grep "^from $ZERO_OID$" output &&
+ grep "^tag nested$" output >tag_lines &&
+ test_line_count = 2 tag_lines
+'
+
test_expect_success 'directory becomes symlink' '
git init dirtosymlink &&
git init result &&
@@ -567,17 +625,15 @@ test_expect_success 'fast-export quotes pathnames' '
'
test_expect_success 'test bidirectionality' '
- >marks-cur &&
- >marks-new &&
git init marks-test &&
- git fast-export --export-marks=marks-cur --import-marks=marks-cur --branches | \
- git --git-dir=marks-test/.git fast-import --export-marks=marks-new --import-marks=marks-new &&
+ git fast-export --export-marks=marks-cur --import-marks-if-exists=marks-cur --branches | \
+ git --git-dir=marks-test/.git fast-import --export-marks=marks-new --import-marks-if-exists=marks-new &&
(cd marks-test &&
git reset --hard &&
echo Wohlauf > file &&
git commit -a -m "back in time") &&
- git --git-dir=marks-test/.git fast-export --export-marks=marks-new --import-marks=marks-new --branches | \
- git fast-import --export-marks=marks-cur --import-marks=marks-cur
+ git --git-dir=marks-test/.git fast-export --export-marks=marks-new --import-marks-if-exists=marks-new --branches | \
+ git fast-import --export-marks=marks-cur --import-marks-if-exists=marks-cur
'
cat > expected << EOF
diff --git a/t/t9902-completion.sh b/t/t9902-completion.sh
index 75512c3403..54f8ce18cb 100755
--- a/t/t9902-completion.sh
+++ b/t/t9902-completion.sh
@@ -28,10 +28,10 @@ complete ()
#
# (2) A test makes sure that common subcommands are included in the
# completion for "git <TAB>", and a plumbing is excluded. "add",
-# "filter-branch" and "ls-files" are listed for this.
+# "rebase" and "ls-files" are listed for this.
-GIT_TESTING_ALL_COMMAND_LIST='add checkout check-attr filter-branch ls-files'
-GIT_TESTING_PORCELAIN_COMMAND_LIST='add checkout filter-branch'
+GIT_TESTING_ALL_COMMAND_LIST='add checkout check-attr rebase ls-files'
+GIT_TESTING_PORCELAIN_COMMAND_LIST='add checkout rebase'
. "$GIT_BUILD_DIR/contrib/completion/git-completion.bash"
@@ -1392,12 +1392,12 @@ test_expect_success 'basic' '
# built-in
grep -q "^add \$" out &&
# script
- grep -q "^filter-branch \$" out &&
+ grep -q "^rebase \$" out &&
# plumbing
! grep -q "^ls-files \$" out &&
- run_completion "git f" &&
- ! grep -q -v "^f" out
+ run_completion "git r" &&
+ ! grep -q -v "^r" out
'
test_expect_success 'double dash "git" itself' '
@@ -1698,6 +1698,69 @@ do
'
done
+test_expect_success 'git config - section' '
+ test_completion "git config br" <<-\EOF
+ branch.Z
+ browser.Z
+ EOF
+'
+
+test_expect_success 'git config - variable name' '
+ test_completion "git config log.d" <<-\EOF
+ log.date Z
+ log.decorate Z
+ EOF
+'
+
+test_expect_success 'git config - value' '
+ test_completion "git config color.pager " <<-\EOF
+ false Z
+ true Z
+ EOF
+'
+
+test_expect_success 'git -c - section' '
+ test_completion "git -c br" <<-\EOF
+ branch.Z
+ browser.Z
+ EOF
+'
+
+test_expect_success 'git -c - variable name' '
+ test_completion "git -c log.d" <<-\EOF
+ log.date=Z
+ log.decorate=Z
+ EOF
+'
+
+test_expect_success 'git -c - value' '
+ test_completion "git -c color.pager=" <<-\EOF
+ false Z
+ true Z
+ EOF
+'
+
+test_expect_success 'git clone --config= - section' '
+ test_completion "git clone --config=br" <<-\EOF
+ branch.Z
+ browser.Z
+ EOF
+'
+
+test_expect_success 'git clone --config= - variable name' '
+ test_completion "git clone --config=log.d" <<-\EOF
+ log.date=Z
+ log.decorate=Z
+ EOF
+'
+
+test_expect_success 'git clone --config= - value' '
+ test_completion "git clone --config=color.pager=" <<-\EOF
+ false Z
+ true Z
+ EOF
+'
+
test_expect_success 'sourcing the completion script clears cached commands' '
__git_compute_all_commands &&
verbose test -n "$__git_all_commands" &&
diff --git a/t/test-lib-functions.sh b/t/test-lib-functions.sh
index e0b3f28d3a..b299ecc326 100644
--- a/t/test-lib-functions.sh
+++ b/t/test-lib-functions.sh
@@ -228,9 +228,11 @@ test_commit () {
# can be a tag pointing to the commit-to-merge.
test_merge () {
+ label="$1" &&
+ shift &&
test_tick &&
- git merge -m "$1" "$2" &&
- git tag "$1"
+ git merge -m "$label" "$@" &&
+ git tag "$label"
}
# Efficiently create <nr> commits, each with a unique number (from 1 to <nr>
@@ -580,7 +582,7 @@ test_expect_failure () {
export test_prereq
if ! test_skip "$@"
then
- say >&3 "checking known breakage: $2"
+ say >&3 "checking known breakage of $TEST_NUMBER.$test_count '$1': $2"
if test_run_ "$2" expecting_failure
then
test_known_broken_ok_ "$1"
@@ -600,7 +602,7 @@ test_expect_success () {
export test_prereq
if ! test_skip "$@"
then
- say >&3 "expecting success: $2"
+ say >&3 "expecting success of $TEST_NUMBER.$test_count '$1': $2"
if test_run_ "$2"
then
test_ok_ "$1"
diff --git a/t/test-lib.sh b/t/test-lib.sh
index 30b07e310f..e06fa02a0e 100644
--- a/t/test-lib.sh
+++ b/t/test-lib.sh
@@ -212,6 +212,8 @@ fi
TEST_STRESS_JOB_SFX="${GIT_TEST_STRESS_JOB_NR:+.stress-$GIT_TEST_STRESS_JOB_NR}"
TEST_NAME="$(basename "$0" .sh)"
+TEST_NUMBER="${TEST_NAME%%-*}"
+TEST_NUMBER="${TEST_NUMBER#t}"
TEST_RESULTS_DIR="$TEST_OUTPUT_DIRECTORY/test-results"
TEST_RESULTS_BASE="$TEST_RESULTS_DIR/$TEST_NAME$TEST_STRESS_JOB_SFX"
TRASH_DIRECTORY="trash directory.$TEST_NAME$TEST_STRESS_JOB_SFX"
@@ -507,6 +509,9 @@ EMPTY_BLOB=e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
LF='
'
+# Single quote
+SQ=\'
+
# UTF-8 ZERO WIDTH NON-JOINER, which HFS+ ignores
# when case-folding filenames
u200c=$(printf '\342\200\214')
@@ -567,6 +572,7 @@ export TERM
error () {
say_color error "error: $*"
+ finalize_junit_xml
GIT_EXIT_OK=t
exit 1
}
@@ -695,7 +701,7 @@ test_failure_ () {
say_color error "not ok $test_count - $1"
shift
printf '%s\n' "$*" | sed -e 's/^/# /'
- test "$immediate" = "" || { GIT_EXIT_OK=t; exit 1; }
+ test "$immediate" = "" || { finalize_junit_xml; GIT_EXIT_OK=t; exit 1; }
}
test_known_broken_ok_ () {
@@ -1063,6 +1069,25 @@ write_junit_xml_testcase () {
junit_have_testcase=t
}
+finalize_junit_xml () {
+ if test -n "$write_junit_xml" && test -n "$junit_xml_path"
+ then
+ test -n "$junit_have_testcase" || {
+ junit_start=$(test-tool date getnanos)
+ write_junit_xml_testcase "all tests skipped"
+ }
+
+ # adjust the overall time
+ junit_time=$(test-tool date getnanos $junit_suite_start)
+ sed "s/<testsuite [^>]*/& time=\"$junit_time\"/" \
+ <"$junit_xml_path" >"$junit_xml_path.new"
+ mv "$junit_xml_path.new" "$junit_xml_path"
+
+ write_junit_xml " </testsuite>" "</testsuites>"
+ write_junit_xml=
+ fi
+}
+
test_atexit_cleanup=:
test_atexit_handler () {
# In a succeeding test script 'test_atexit_handler' is invoked
@@ -1085,21 +1110,7 @@ test_done () {
# removed, so the commands can access pidfiles and socket files.
test_atexit_handler
- if test -n "$write_junit_xml" && test -n "$junit_xml_path"
- then
- test -n "$junit_have_testcase" || {
- junit_start=$(test-tool date getnanos)
- write_junit_xml_testcase "all tests skipped"
- }
-
- # adjust the overall time
- junit_time=$(test-tool date getnanos $junit_suite_start)
- sed "s/<testsuite [^>]*/& time=\"$junit_time\"/" \
- <"$junit_xml_path" >"$junit_xml_path.new"
- mv "$junit_xml_path.new" "$junit_xml_path"
-
- write_junit_xml " </testsuite>" "</testsuites>"
- fi
+ finalize_junit_xml
if test -z "$HARNESS_ACTIVE"
then
diff --git a/tag.c b/tag.c
index 5db870edb9..bfa0e31435 100644
--- a/tag.c
+++ b/tag.c
@@ -212,3 +212,10 @@ int parse_tag(struct tag *item)
free(data);
return ret;
}
+
+struct object_id *get_tagged_oid(struct tag *tag)
+{
+ if (!tag->tagged)
+ die("bad tag");
+ return &tag->tagged->oid;
+}
diff --git a/tag.h b/tag.h
index 03265fbfe2..3ce8e72192 100644
--- a/tag.h
+++ b/tag.h
@@ -19,5 +19,6 @@ struct object *deref_tag(struct repository *r, struct object *, const char *, in
struct object *deref_tag_noverify(struct object *);
int gpg_verify_tag(const struct object_id *oid,
const char *name_to_report, unsigned flags);
+struct object_id *get_tagged_oid(struct tag *tag);
#endif /* TAG_H */
diff --git a/templates/hooks--pre-merge-commit.sample b/templates/hooks--pre-merge-commit.sample
new file mode 100755
index 0000000000..399eab1924
--- /dev/null
+++ b/templates/hooks--pre-merge-commit.sample
@@ -0,0 +1,13 @@
+#!/bin/sh
+#
+# An example hook script to verify what is about to be committed.
+# Called by "git merge" with no arguments. The hook should
+# exit with non-zero status after issuing an appropriate message to
+# stderr if it wants to stop the merge commit.
+#
+# To enable this hook, rename this file to "pre-merge-commit".
+
+. git-sh-setup
+test -x "$GIT_DIR/hooks/pre-commit" &&
+ exec "$GIT_DIR/hooks/pre-commit"
+:
diff --git a/trace.c b/trace.c
index fa4a2e7120..b3ef0e627f 100644
--- a/trace.c
+++ b/trace.c
@@ -88,8 +88,6 @@ static int prepare_trace_line(const char *file, int line,
if (!trace_want(key))
return 0;
- set_try_to_free_routine(NULL); /* is never reset */
-
/* unit tests may want to disable additional trace output */
if (trace_want(&trace_bare))
return 1;
diff --git a/trace2/tr2_dst.c b/trace2/tr2_dst.c
index 5dda0ca1cd..ae052a07fe 100644
--- a/trace2/tr2_dst.c
+++ b/trace2/tr2_dst.c
@@ -8,6 +8,19 @@
*/
#define MAX_AUTO_ATTEMPTS 10
+/*
+ * Sentinel file used to detect when we should discard new traces to avoid
+ * writing too many trace files to a directory.
+ */
+#define DISCARD_SENTINEL_NAME "git-trace2-discard"
+
+/*
+ * When set to zero, disables directory file count checks. Otherwise, controls
+ * how many files we can write to a directory before entering discard mode.
+ * This can be overridden via the TR2_SYSENV_MAX_FILES setting.
+ */
+static int tr2env_max_files = 0;
+
static int tr2_dst_want_warning(void)
{
static int tr2env_dst_debug = -1;
@@ -32,9 +45,75 @@ void tr2_dst_trace_disable(struct tr2_dst *dst)
dst->need_close = 0;
}
+/*
+ * Check to make sure we're not overloading the target directory with too many
+ * files. First get the threshold (if present) from the config or envvar. If
+ * it's zero or unset, disable this check. Next check for the presence of a
+ * sentinel file, then check file count.
+ *
+ * Returns 0 if tracing should proceed as normal. Returns 1 if the sentinel file
+ * already exists, which means tracing should be disabled. Returns -1 if there
+ * are too many files but there was no sentinel file, which means we have
+ * created and should write traces to the sentinel file.
+ *
+ * We expect that some trace processing system is gradually collecting files
+ * from the target directory; after it removes the sentinel file we'll start
+ * writing traces again.
+ */
+static int tr2_dst_too_many_files(struct tr2_dst *dst, const char *tgt_prefix)
+{
+ int file_count = 0, max_files = 0, ret = 0;
+ const char *max_files_var;
+ DIR *dirp;
+ struct strbuf path = STRBUF_INIT, sentinel_path = STRBUF_INIT;
+ struct stat statbuf;
+
+ /* Get the config or envvar and decide if we should continue this check */
+ max_files_var = tr2_sysenv_get(TR2_SYSENV_MAX_FILES);
+ if (max_files_var && *max_files_var && ((max_files = atoi(max_files_var)) >= 0))
+ tr2env_max_files = max_files;
+
+ if (!tr2env_max_files) {
+ ret = 0;
+ goto cleanup;
+ }
+
+ strbuf_addstr(&path, tgt_prefix);
+ if (!is_dir_sep(path.buf[path.len - 1])) {
+ strbuf_addch(&path, '/');
+ }
+
+ /* check sentinel */
+ strbuf_addbuf(&sentinel_path, &path);
+ strbuf_addstr(&sentinel_path, DISCARD_SENTINEL_NAME);
+ if (!stat(sentinel_path.buf, &statbuf)) {
+ ret = 1;
+ goto cleanup;
+ }
+
+ /* check file count */
+ dirp = opendir(path.buf);
+ while (file_count < tr2env_max_files && dirp && readdir(dirp))
+ file_count++;
+ if (dirp)
+ closedir(dirp);
+
+ if (file_count >= tr2env_max_files) {
+ dst->too_many_files = 1;
+ dst->fd = open(sentinel_path.buf, O_WRONLY | O_CREAT | O_EXCL, 0666);
+ ret = -1;
+ goto cleanup;
+ }
+
+cleanup:
+ strbuf_release(&path);
+ strbuf_release(&sentinel_path);
+ return ret;
+}
+
static int tr2_dst_try_auto_path(struct tr2_dst *dst, const char *tgt_prefix)
{
- int fd;
+ int too_many_files;
const char *last_slash, *sid = tr2_sid_get();
struct strbuf path = STRBUF_INIT;
size_t base_path_len;
@@ -50,18 +129,29 @@ static int tr2_dst_try_auto_path(struct tr2_dst *dst, const char *tgt_prefix)
strbuf_addstr(&path, sid);
base_path_len = path.len;
- for (attempt_count = 0; attempt_count < MAX_AUTO_ATTEMPTS; attempt_count++) {
- if (attempt_count > 0) {
- strbuf_setlen(&path, base_path_len);
- strbuf_addf(&path, ".%d", attempt_count);
+ too_many_files = tr2_dst_too_many_files(dst, tgt_prefix);
+ if (!too_many_files) {
+ for (attempt_count = 0; attempt_count < MAX_AUTO_ATTEMPTS; attempt_count++) {
+ if (attempt_count > 0) {
+ strbuf_setlen(&path, base_path_len);
+ strbuf_addf(&path, ".%d", attempt_count);
+ }
+
+ dst->fd = open(path.buf, O_WRONLY | O_CREAT | O_EXCL, 0666);
+ if (dst->fd != -1)
+ break;
}
-
- fd = open(path.buf, O_WRONLY | O_CREAT | O_EXCL, 0666);
- if (fd != -1)
- break;
+ } else if (too_many_files == 1) {
+ strbuf_release(&path);
+ if (tr2_dst_want_warning())
+ warning("trace2: not opening %s trace file due to too "
+ "many files in target directory %s",
+ tr2_sysenv_display_name(dst->sysenv_var),
+ tgt_prefix);
+ return 0;
}
- if (fd == -1) {
+ if (dst->fd == -1) {
if (tr2_dst_want_warning())
warning("trace2: could not open '%.*s' for '%s' tracing: %s",
(int) base_path_len, path.buf,
@@ -75,7 +165,6 @@ static int tr2_dst_try_auto_path(struct tr2_dst *dst, const char *tgt_prefix)
strbuf_release(&path);
- dst->fd = fd;
dst->need_close = 1;
dst->initialized = 1;
@@ -215,13 +304,8 @@ connected:
static void tr2_dst_malformed_warning(struct tr2_dst *dst,
const char *tgt_value)
{
- struct strbuf buf = STRBUF_INIT;
-
- strbuf_addf(&buf, "trace2: unknown value for '%s': '%s'",
- tr2_sysenv_display_name(dst->sysenv_var), tgt_value);
- warning("%s", buf.buf);
-
- strbuf_release(&buf);
+ warning("trace2: unknown value for '%s': '%s'",
+ tr2_sysenv_display_name(dst->sysenv_var), tgt_value);
}
int tr2_dst_get_trace_fd(struct tr2_dst *dst)
diff --git a/trace2/tr2_dst.h b/trace2/tr2_dst.h
index 3adf3bac13..b1a8c144e0 100644
--- a/trace2/tr2_dst.h
+++ b/trace2/tr2_dst.h
@@ -9,6 +9,7 @@ struct tr2_dst {
int fd;
unsigned int initialized : 1;
unsigned int need_close : 1;
+ unsigned int too_many_files : 1;
};
/*
diff --git a/trace2/tr2_sysenv.c b/trace2/tr2_sysenv.c
index 5958cfc424..3c3792eca2 100644
--- a/trace2/tr2_sysenv.c
+++ b/trace2/tr2_sysenv.c
@@ -49,6 +49,9 @@ static struct tr2_sysenv_entry tr2_sysenv_settings[] = {
"trace2.perftarget" },
[TR2_SYSENV_PERF_BRIEF] = { "GIT_TRACE2_PERF_BRIEF",
"trace2.perfbrief" },
+
+ [TR2_SYSENV_MAX_FILES] = { "GIT_TRACE2_MAX_FILES",
+ "trace2.maxfiles" },
};
/* clang-format on */
diff --git a/trace2/tr2_sysenv.h b/trace2/tr2_sysenv.h
index 8dd82a7a56..d4364a7b85 100644
--- a/trace2/tr2_sysenv.h
+++ b/trace2/tr2_sysenv.h
@@ -24,6 +24,8 @@ enum tr2_sysenv_variable {
TR2_SYSENV_PERF,
TR2_SYSENV_PERF_BRIEF,
+ TR2_SYSENV_MAX_FILES,
+
TR2_SYSENV_MUST_BE_LAST
};
diff --git a/trace2/tr2_tgt_event.c b/trace2/tr2_tgt_event.c
index c2852d1bd2..6353e8ad91 100644
--- a/trace2/tr2_tgt_event.c
+++ b/trace2/tr2_tgt_event.c
@@ -10,16 +10,17 @@
#include "trace2/tr2_tgt.h"
#include "trace2/tr2_tls.h"
-static struct tr2_dst tr2dst_event = { TR2_SYSENV_EVENT, 0, 0, 0 };
+static struct tr2_dst tr2dst_event = { TR2_SYSENV_EVENT, 0, 0, 0, 0 };
/*
- * The version number of the JSON data generated by the EVENT target
- * in this source file. Update this if you make a significant change
- * to the JSON fields or message structure. You probably do not need
- * to update this if you just add another call to one of the existing
- * TRACE2 API methods.
+ * The version number of the JSON data generated by the EVENT target in this
+ * source file. The version should be incremented if new event types are added,
+ * if existing fields are removed, or if there are significant changes in
+ * interpretation of existing events or fields. Smaller changes, such as adding
+ * a new field to an existing event, do not require an increment to the EVENT
+ * format version.
*/
-#define TR2_EVENT_VERSION "1"
+#define TR2_EVENT_VERSION "2"
/*
* Region nesting limit for messages written to the event target.
@@ -107,6 +108,19 @@ static void event_fmt_prepare(const char *event_name, const char *file,
jw_object_intmax(jw, "repo", repo->trace2_repo_id);
}
+static void fn_too_many_files_fl(const char *file, int line)
+{
+ const char *event_name = "too_many_files";
+ struct json_writer jw = JSON_WRITER_INIT;
+
+ jw_object_begin(&jw, 0);
+ event_fmt_prepare(event_name, file, line, NULL, &jw);
+ jw_end(&jw);
+
+ tr2_dst_write_line(&tr2dst_event, &jw.json);
+ jw_release(&jw);
+}
+
static void fn_version_fl(const char *file, int line)
{
const char *event_name = "version";
@@ -120,6 +134,9 @@ static void fn_version_fl(const char *file, int line)
tr2_dst_write_line(&tr2dst_event, &jw.json);
jw_release(&jw);
+
+ if (tr2dst_event.too_many_files)
+ fn_too_many_files_fl(file, line);
}
static void fn_start_fl(const char *file, int line,
@@ -205,11 +222,6 @@ static void maybe_add_string_va(struct json_writer *jw, const char *field_name,
strbuf_release(&buf);
return;
}
-
- if (fmt && *fmt) {
- jw_object_string(jw, field_name, fmt);
- return;
- }
}
static void fn_error_va_fl(const char *file, int line, const char *fmt,
diff --git a/trace2/tr2_tgt_normal.c b/trace2/tr2_tgt_normal.c
index 00b116d797..31b602c171 100644
--- a/trace2/tr2_tgt_normal.c
+++ b/trace2/tr2_tgt_normal.c
@@ -9,7 +9,7 @@
#include "trace2/tr2_tgt.h"
#include "trace2/tr2_tls.h"
-static struct tr2_dst tr2dst_normal = { TR2_SYSENV_NORMAL, 0, 0, 0 };
+static struct tr2_dst tr2dst_normal = { TR2_SYSENV_NORMAL, 0, 0, 0, 0 };
/*
* Use the TR2_SYSENV_NORMAL_BRIEF setting to omit the "<time> <file>:<line>"
@@ -87,7 +87,7 @@ static void fn_start_fl(const char *file, int line,
struct strbuf buf_payload = STRBUF_INIT;
strbuf_addstr(&buf_payload, "start ");
- sq_quote_argv_pretty(&buf_payload, argv);
+ sq_append_quote_argv_pretty(&buf_payload, argv);
normal_io_write_fl(file, line, &buf_payload);
strbuf_release(&buf_payload);
}
@@ -135,11 +135,6 @@ static void maybe_append_string_va(struct strbuf *buf, const char *fmt,
va_end(copy_ap);
return;
}
-
- if (fmt && *fmt) {
- strbuf_addstr(buf, fmt);
- return;
- }
}
static void fn_error_va_fl(const char *file, int line, const char *fmt,
@@ -147,8 +142,11 @@ static void fn_error_va_fl(const char *file, int line, const char *fmt,
{
struct strbuf buf_payload = STRBUF_INIT;
- strbuf_addstr(&buf_payload, "error ");
- maybe_append_string_va(&buf_payload, fmt, ap);
+ strbuf_addstr(&buf_payload, "error");
+ if (fmt && *fmt) {
+ strbuf_addch(&buf_payload, ' ');
+ maybe_append_string_va(&buf_payload, fmt, ap);
+ }
normal_io_write_fl(file, line, &buf_payload);
strbuf_release(&buf_payload);
}
@@ -188,8 +186,8 @@ static void fn_alias_fl(const char *file, int line, const char *alias,
{
struct strbuf buf_payload = STRBUF_INIT;
- strbuf_addf(&buf_payload, "alias %s ->", alias);
- sq_quote_argv_pretty(&buf_payload, argv);
+ strbuf_addf(&buf_payload, "alias %s -> ", alias);
+ sq_append_quote_argv_pretty(&buf_payload, argv);
normal_io_write_fl(file, line, &buf_payload);
strbuf_release(&buf_payload);
}
@@ -200,12 +198,12 @@ static void fn_child_start_fl(const char *file, int line,
{
struct strbuf buf_payload = STRBUF_INIT;
- strbuf_addf(&buf_payload, "child_start[%d] ", cmd->trace2_child_id);
+ strbuf_addf(&buf_payload, "child_start[%d]", cmd->trace2_child_id);
if (cmd->dir) {
- strbuf_addstr(&buf_payload, " cd");
+ strbuf_addstr(&buf_payload, " cd ");
sq_quote_buf_pretty(&buf_payload, cmd->dir);
- strbuf_addstr(&buf_payload, "; ");
+ strbuf_addstr(&buf_payload, ";");
}
/*
@@ -213,9 +211,10 @@ static void fn_child_start_fl(const char *file, int line,
* See trace_add_env() in run-command.c as used by original trace.c
*/
+ strbuf_addch(&buf_payload, ' ');
if (cmd->git_cmd)
- strbuf_addstr(&buf_payload, "git");
- sq_quote_argv_pretty(&buf_payload, cmd->argv);
+ strbuf_addstr(&buf_payload, "git ");
+ sq_append_quote_argv_pretty(&buf_payload, cmd->argv);
normal_io_write_fl(file, line, &buf_payload);
strbuf_release(&buf_payload);
@@ -240,9 +239,11 @@ static void fn_exec_fl(const char *file, int line, uint64_t us_elapsed_absolute,
struct strbuf buf_payload = STRBUF_INIT;
strbuf_addf(&buf_payload, "exec[%d] ", exec_id);
- if (exe)
+ if (exe) {
strbuf_addstr(&buf_payload, exe);
- sq_quote_argv_pretty(&buf_payload, argv);
+ strbuf_addch(&buf_payload, ' ');
+ }
+ sq_append_quote_argv_pretty(&buf_payload, argv);
normal_io_write_fl(file, line, &buf_payload);
strbuf_release(&buf_payload);
}
diff --git a/trace2/tr2_tgt_perf.c b/trace2/tr2_tgt_perf.c
index ea0cbbe13e..ffac8029ad 100644
--- a/trace2/tr2_tgt_perf.c
+++ b/trace2/tr2_tgt_perf.c
@@ -11,7 +11,7 @@
#include "trace2/tr2_tgt.h"
#include "trace2/tr2_tls.h"
-static struct tr2_dst tr2dst_perf = { TR2_SYSENV_PERF, 0, 0, 0 };
+static struct tr2_dst tr2dst_perf = { TR2_SYSENV_PERF, 0, 0, 0, 0 };
/*
* Use TR2_SYSENV_PERF_BRIEF to omit the "<time> <file>:<line>"
@@ -21,10 +21,10 @@ static struct tr2_dst tr2dst_perf = { TR2_SYSENV_PERF, 0, 0, 0 };
*/
static int tr2env_perf_be_brief;
-#define TR2FMT_PERF_FL_WIDTH (50)
+#define TR2FMT_PERF_FL_WIDTH (28)
#define TR2FMT_PERF_MAX_EVENT_NAME (12)
-#define TR2FMT_PERF_REPO_WIDTH (4)
-#define TR2FMT_PERF_CATEGORY_WIDTH (10)
+#define TR2FMT_PERF_REPO_WIDTH (3)
+#define TR2FMT_PERF_CATEGORY_WIDTH (12)
#define TR2_DOTS_BUFFER_SIZE (100)
#define TR2_INDENT (2)
@@ -79,17 +79,36 @@ static void perf_fmt_prepare(const char *event_name,
if (!tr2env_perf_be_brief) {
struct tr2_tbuf tb_now;
+ size_t fl_end_col;
tr2_tbuf_local_time(&tb_now);
strbuf_addstr(buf, tb_now.buf);
strbuf_addch(buf, ' ');
- if (file && *file)
- strbuf_addf(buf, "%s:%d ", file, line);
- while (buf->len < TR2FMT_PERF_FL_WIDTH)
+ fl_end_col = buf->len + TR2FMT_PERF_FL_WIDTH;
+
+ if (file && *file) {
+ struct strbuf buf_fl = STRBUF_INIT;
+
+ strbuf_addf(&buf_fl, "%s:%d", file, line);
+
+ if (buf_fl.len <= TR2FMT_PERF_FL_WIDTH)
+ strbuf_addbuf(buf, &buf_fl);
+ else {
+ size_t avail = TR2FMT_PERF_FL_WIDTH - 3;
+ strbuf_addstr(buf, "...");
+ strbuf_add(buf,
+ &buf_fl.buf[buf_fl.len - avail],
+ avail);
+ }
+
+ strbuf_release(&buf_fl);
+ }
+
+ while (buf->len < fl_end_col)
strbuf_addch(buf, ' ');
- strbuf_addstr(buf, "| ");
+ strbuf_addstr(buf, " | ");
}
strbuf_addf(buf, "d%d | ", tr2_sid_depth());
@@ -102,7 +121,7 @@ static void perf_fmt_prepare(const char *event_name,
strbuf_addf(buf, "r%d ", repo->trace2_repo_id);
while (buf->len < len)
strbuf_addch(buf, ' ');
- strbuf_addstr(buf, "| ");
+ strbuf_addstr(buf, " | ");
if (p_us_elapsed_absolute)
strbuf_addf(buf, "%9.6f | ",
@@ -116,8 +135,8 @@ static void perf_fmt_prepare(const char *event_name,
else
strbuf_addf(buf, "%9s | ", " ");
- strbuf_addf(buf, "%-*s | ", TR2FMT_PERF_CATEGORY_WIDTH,
- (category ? category : ""));
+ strbuf_addf(buf, "%-*.*s | ", TR2FMT_PERF_CATEGORY_WIDTH,
+ TR2FMT_PERF_CATEGORY_WIDTH, (category ? category : ""));
if (ctx->nr_open_regions > 0) {
int len_indent = TR2_INDENT_LENGTH(ctx);
@@ -165,7 +184,7 @@ static void fn_start_fl(const char *file, int line,
const char *event_name = "start";
struct strbuf buf_payload = STRBUF_INIT;
- sq_quote_argv_pretty(&buf_payload, argv);
+ sq_append_quote_argv_pretty(&buf_payload, argv);
perf_io_write_fl(file, line, event_name, NULL, &us_elapsed_absolute,
NULL, NULL, &buf_payload);
@@ -220,11 +239,6 @@ static void maybe_append_string_va(struct strbuf *buf, const char *fmt,
va_end(copy_ap);
return;
}
-
- if (fmt && *fmt) {
- strbuf_addstr(buf, fmt);
- return;
- }
}
static void fn_error_va_fl(const char *file, int line, const char *fmt,
@@ -285,8 +299,9 @@ static void fn_alias_fl(const char *file, int line, const char *alias,
const char *event_name = "alias";
struct strbuf buf_payload = STRBUF_INIT;
- strbuf_addf(&buf_payload, "alias:%s argv:", alias);
- sq_quote_argv_pretty(&buf_payload, argv);
+ strbuf_addf(&buf_payload, "alias:%s argv:[", alias);
+ sq_append_quote_argv_pretty(&buf_payload, argv);
+ strbuf_addch(&buf_payload, ']');
perf_io_write_fl(file, line, event_name, NULL, NULL, NULL, NULL,
&buf_payload);
@@ -315,10 +330,14 @@ static void fn_child_start_fl(const char *file, int line,
sq_quote_buf_pretty(&buf_payload, cmd->dir);
}
- strbuf_addstr(&buf_payload, " argv:");
- if (cmd->git_cmd)
- strbuf_addstr(&buf_payload, " git");
- sq_quote_argv_pretty(&buf_payload, cmd->argv);
+ strbuf_addstr(&buf_payload, " argv:[");
+ if (cmd->git_cmd) {
+ strbuf_addstr(&buf_payload, "git");
+ if (cmd->argv[0])
+ strbuf_addch(&buf_payload, ' ');
+ }
+ sq_append_quote_argv_pretty(&buf_payload, cmd->argv);
+ strbuf_addch(&buf_payload, ']');
perf_io_write_fl(file, line, event_name, NULL, &us_elapsed_absolute,
NULL, NULL, &buf_payload);
@@ -369,10 +388,14 @@ static void fn_exec_fl(const char *file, int line, uint64_t us_elapsed_absolute,
struct strbuf buf_payload = STRBUF_INIT;
strbuf_addf(&buf_payload, "id:%d ", exec_id);
- strbuf_addstr(&buf_payload, "argv:");
- if (exe)
- strbuf_addf(&buf_payload, " %s", exe);
- sq_quote_argv_pretty(&buf_payload, argv);
+ strbuf_addstr(&buf_payload, "argv:[");
+ if (exe) {
+ strbuf_addstr(&buf_payload, exe);
+ if (argv[0])
+ strbuf_addch(&buf_payload, ' ');
+ }
+ sq_append_quote_argv_pretty(&buf_payload, argv);
+ strbuf_addch(&buf_payload, ']');
perf_io_write_fl(file, line, event_name, NULL, &us_elapsed_absolute,
NULL, NULL, &buf_payload);
@@ -433,8 +456,11 @@ static void fn_region_enter_printf_va_fl(const char *file, int line,
struct strbuf buf_payload = STRBUF_INIT;
if (label)
- strbuf_addf(&buf_payload, "label:%s ", label);
- maybe_append_string_va(&buf_payload, fmt, ap);
+ strbuf_addf(&buf_payload, "label:%s", label);
+ if (fmt && *fmt) {
+ strbuf_addch(&buf_payload, ' ');
+ maybe_append_string_va(&buf_payload, fmt, ap);
+ }
perf_io_write_fl(file, line, event_name, repo, &us_elapsed_absolute,
NULL, category, &buf_payload);
@@ -450,8 +476,11 @@ static void fn_region_leave_printf_va_fl(
struct strbuf buf_payload = STRBUF_INIT;
if (label)
- strbuf_addf(&buf_payload, "label:%s ", label);
- maybe_append_string_va(&buf_payload, fmt, ap);
+ strbuf_addf(&buf_payload, "label:%s", label);
+ if (fmt && *fmt) {
+ strbuf_addch(&buf_payload, ' ' );
+ maybe_append_string_va(&buf_payload, fmt, ap);
+ }
perf_io_write_fl(file, line, event_name, repo, &us_elapsed_absolute,
&us_elapsed_region, category, &buf_payload);
diff --git a/transport-helper.c b/transport-helper.c
index 6b05a88faf..9e1279b928 100644
--- a/transport-helper.c
+++ b/transport-helper.c
@@ -33,6 +33,16 @@ struct helper_data {
check_connectivity : 1,
no_disconnect_req : 1,
no_private_update : 1;
+
+ /*
+ * As an optimization, the transport code may invoke fetch before
+ * get_refs_list. If this happens, and if the transport helper doesn't
+ * support connect or stateless_connect, we need to invoke
+ * get_refs_list ourselves if we haven't already done so. Keep track of
+ * whether we have invoked get_refs_list.
+ */
+ unsigned get_refs_list_called : 1;
+
char *export_marks;
char *import_marks;
/* These go from remote name (as in "list") to private name */
@@ -652,17 +662,25 @@ static int connect_helper(struct transport *transport, const char *name,
return 0;
}
+static struct ref *get_refs_list_using_list(struct transport *transport,
+ int for_push);
+
static int fetch(struct transport *transport,
int nr_heads, struct ref **to_fetch)
{
struct helper_data *data = transport->data;
int i, count;
+ get_helper(transport);
+
if (process_connect(transport, 0)) {
do_take_over(transport);
return transport->vtable->fetch(transport, nr_heads, to_fetch);
}
+ if (!data->get_refs_list_called)
+ get_refs_list_using_list(transport, 0);
+
count = 0;
for (i = 0; i < nr_heads; i++)
if (!(to_fetch[i]->status & REF_STATUS_UPTODATE))
@@ -682,13 +700,9 @@ static int fetch(struct transport *transport,
set_helper_option(transport, "update-shallow", "true");
if (data->transport_options.filter_options.choice) {
- struct strbuf expanded_filter_spec = STRBUF_INIT;
- expand_list_objects_filter_spec(
- &data->transport_options.filter_options,
- &expanded_filter_spec);
- set_helper_option(transport, "filter",
- expanded_filter_spec.buf);
- strbuf_release(&expanded_filter_spec);
+ const char *spec = expand_list_objects_filter_spec(
+ &data->transport_options.filter_options);
+ set_helper_option(transport, "filter", spec);
}
if (data->transport_options.negotiation_tips)
@@ -1059,6 +1073,19 @@ static int has_attribute(const char *attrs, const char *attr)
static struct ref *get_refs_list(struct transport *transport, int for_push,
const struct argv_array *ref_prefixes)
{
+ get_helper(transport);
+
+ if (process_connect(transport, for_push)) {
+ do_take_over(transport);
+ return transport->vtable->get_refs_list(transport, for_push, ref_prefixes);
+ }
+
+ return get_refs_list_using_list(transport, for_push);
+}
+
+static struct ref *get_refs_list_using_list(struct transport *transport,
+ int for_push)
+{
struct helper_data *data = transport->data;
struct child_process *helper;
struct ref *ret = NULL;
@@ -1066,13 +1093,9 @@ static struct ref *get_refs_list(struct transport *transport, int for_push,
struct ref *posn;
struct strbuf buf = STRBUF_INIT;
+ data->get_refs_list_called = 1;
helper = get_helper(transport);
- if (process_connect(transport, for_push)) {
- do_take_over(transport);
- return transport->vtable->get_refs_list(transport, for_push, ref_prefixes);
- }
-
if (data->push && for_push)
write_str_in_full(helper->in, "list for-push\n");
else
@@ -1119,7 +1142,6 @@ static struct ref *get_refs_list(struct transport *transport, int for_push,
}
static struct transport_vtable vtable = {
- 0,
set_helper_option,
get_refs_list,
fetch,
diff --git a/transport-internal.h b/transport-internal.h
index 004bee5e36..1cde6258a7 100644
--- a/transport-internal.h
+++ b/transport-internal.h
@@ -7,12 +7,6 @@ struct argv_array;
struct transport_vtable {
/**
- * This transport supports the fetch() function being called
- * without get_refs_list() first being called.
- */
- unsigned fetch_without_list : 1;
-
- /**
* Returns 0 if successful, positive if the option is not
* recognized or is inapplicable, and negative if the option
* is applicable but the value is invalid.
diff --git a/transport.c b/transport.c
index 778c60bf57..83379a037d 100644
--- a/transport.c
+++ b/transport.c
@@ -122,6 +122,7 @@ static void set_upstreams(struct transport *transport, struct ref *refs,
struct bundle_transport_data {
int fd;
struct bundle_header header;
+ unsigned get_refs_from_bundle_called : 1;
};
static struct ref *get_refs_from_bundle(struct transport *transport,
@@ -135,6 +136,8 @@ static struct ref *get_refs_from_bundle(struct transport *transport,
if (for_push)
return NULL;
+ data->get_refs_from_bundle_called = 1;
+
if (data->fd > 0)
close(data->fd);
data->fd = read_bundle_header(transport->url, &data->header);
@@ -154,6 +157,9 @@ static int fetch_refs_from_bundle(struct transport *transport,
int nr_heads, struct ref **to_fetch)
{
struct bundle_transport_data *data = transport->data;
+
+ if (!data->get_refs_from_bundle_called)
+ get_refs_from_bundle(transport, 0, NULL);
return unbundle(the_repository, &data->header, data->fd,
transport->progress ? BUNDLE_VERBOSE : 0);
}
@@ -224,6 +230,7 @@ static int set_git_option(struct git_transport_options *opts,
opts->no_dependents = !!value;
return 0;
} else if (!strcmp(name, TRANS_OPT_LIST_OBJECTS_FILTER)) {
+ list_objects_filter_die_if_populated(&opts->filter_options);
parse_list_objects_filter(&opts->filter_options, value);
return 0;
}
@@ -742,7 +749,6 @@ static int disconnect_git(struct transport *transport)
}
static struct transport_vtable taken_over_vtable = {
- 1,
NULL,
get_refs_via_connect,
fetch_refs_via_pack,
@@ -892,7 +898,6 @@ void transport_check_allowed(const char *type)
}
static struct transport_vtable bundle_vtable = {
- 0,
NULL,
get_refs_from_bundle,
fetch_refs_from_bundle,
@@ -902,7 +907,6 @@ static struct transport_vtable bundle_vtable = {
};
static struct transport_vtable builtin_smart_vtable = {
- 1,
NULL,
get_refs_via_connect,
fetch_refs_via_pack,
@@ -1141,8 +1145,10 @@ int transport_push(struct repository *r,
refspec_ref_prefixes(rs, &ref_prefixes);
+ trace2_region_enter("transport_push", "get_refs_list", r);
remote_refs = transport->vtable->get_refs_list(transport, 1,
&ref_prefixes);
+ trace2_region_leave("transport_push", "get_refs_list", r);
argv_array_clear(&ref_prefixes);
@@ -1178,6 +1184,7 @@ int transport_push(struct repository *r,
struct ref *ref = remote_refs;
struct oid_array commits = OID_ARRAY_INIT;
+ trace2_region_enter("transport_push", "push_submodules", r);
for (; ref; ref = ref->next)
if (!is_null_oid(&ref->new_oid))
oid_array_append(&commits,
@@ -1190,9 +1197,11 @@ int transport_push(struct repository *r,
transport->push_options,
pretend)) {
oid_array_clear(&commits);
+ trace2_region_leave("transport_push", "push_submodules", r);
die(_("failed to push all needed submodules"));
}
oid_array_clear(&commits);
+ trace2_region_leave("transport_push", "push_submodules", r);
}
if (((flags & TRANSPORT_RECURSE_SUBMODULES_CHECK) ||
@@ -1203,6 +1212,7 @@ int transport_push(struct repository *r,
struct string_list needs_pushing = STRING_LIST_INIT_DUP;
struct oid_array commits = OID_ARRAY_INIT;
+ trace2_region_enter("transport_push", "check_submodules", r);
for (; ref; ref = ref->next)
if (!is_null_oid(&ref->new_oid))
oid_array_append(&commits,
@@ -1213,15 +1223,19 @@ int transport_push(struct repository *r,
transport->remote->name,
&needs_pushing)) {
oid_array_clear(&commits);
+ trace2_region_leave("transport_push", "check_submodules", r);
die_with_unpushed_submodules(&needs_pushing);
}
string_list_clear(&needs_pushing, 0);
oid_array_clear(&commits);
+ trace2_region_leave("transport_push", "check_submodules", r);
}
- if (!(flags & TRANSPORT_RECURSE_SUBMODULES_ONLY))
+ if (!(flags & TRANSPORT_RECURSE_SUBMODULES_ONLY)) {
+ trace2_region_enter("transport_push", "push_refs", r);
push_ret = transport->vtable->push_refs(transport, remote_refs, flags);
- else
+ trace2_region_leave("transport_push", "push_refs", r);
+ } else
push_ret = 0;
err = push_had_errors(remote_refs);
ret = push_ret | err;
@@ -1285,15 +1299,6 @@ int transport_fetch_refs(struct transport *transport, struct ref *refs)
struct ref **heads = NULL;
struct ref *rm;
- if (!transport->vtable->fetch_without_list)
- /*
- * Some transports (e.g. the built-in bundle transport and the
- * transport helper interface) do not work when fetching is
- * done immediately after transport creation. List the remote
- * refs anyway (if not already listed) as a workaround.
- */
- transport_get_remote_refs(transport, NULL);
-
for (rm = refs; rm; rm = rm->next) {
nr_refs++;
if (rm->peer_ref &&
diff --git a/tree-walk.c b/tree-walk.c
index c20b62f49e..bea819d826 100644
--- a/tree-walk.c
+++ b/tree-walk.c
@@ -170,40 +170,61 @@ int tree_entry_gently(struct tree_desc *desc, struct name_entry *entry)
void setup_traverse_info(struct traverse_info *info, const char *base)
{
- int pathlen = strlen(base);
+ size_t pathlen = strlen(base);
static struct traverse_info dummy;
memset(info, 0, sizeof(*info));
if (pathlen && base[pathlen-1] == '/')
pathlen--;
info->pathlen = pathlen ? pathlen + 1 : 0;
- info->name.path = base;
- info->name.pathlen = pathlen;
- if (pathlen) {
- hashcpy(info->name.oid.hash, (const unsigned char *)base + pathlen + 1);
+ info->name = base;
+ info->namelen = pathlen;
+ if (pathlen)
info->prev = &dummy;
- }
}
-char *make_traverse_path(char *path, const struct traverse_info *info, const struct name_entry *n)
+char *make_traverse_path(char *path, size_t pathlen,
+ const struct traverse_info *info,
+ const char *name, size_t namelen)
{
- int len = tree_entry_len(n);
- int pathlen = info->pathlen;
+ /* Always points to the end of the name we're about to add */
+ size_t pos = st_add(info->pathlen, namelen);
+
+ if (pos >= pathlen)
+ BUG("too small buffer passed to make_traverse_path");
- path[pathlen + len] = 0;
+ path[pos] = 0;
for (;;) {
- memcpy(path + pathlen, n->path, len);
- if (!pathlen)
+ if (pos < namelen)
+ BUG("traverse_info pathlen does not match strings");
+ pos -= namelen;
+ memcpy(path + pos, name, namelen);
+
+ if (!pos)
break;
- path[--pathlen] = '/';
- n = &info->name;
- len = tree_entry_len(n);
+ path[--pos] = '/';
+
+ if (!info)
+ BUG("traverse_info ran out of list items");
+ name = info->name;
+ namelen = info->namelen;
info = info->prev;
- pathlen -= len;
}
return path;
}
+void strbuf_make_traverse_path(struct strbuf *out,
+ const struct traverse_info *info,
+ const char *name, size_t namelen)
+{
+ size_t len = traverse_path_len(info, namelen);
+
+ strbuf_grow(out, len);
+ make_traverse_path(out->buf + out->len, out->alloc - out->len,
+ info, name, namelen);
+ strbuf_setlen(out, out->len + len);
+}
+
struct tree_desc_skip {
struct tree_desc_skip *prev;
const void *ptr;
@@ -400,13 +421,12 @@ int traverse_trees(struct index_state *istate,
tx[i].d = t[i];
if (info->prev) {
- strbuf_grow(&base, info->pathlen);
- make_traverse_path(base.buf, info->prev, &info->name);
- base.buf[info->pathlen-1] = '/';
- strbuf_setlen(&base, info->pathlen);
- traverse_path = xstrndup(base.buf, info->pathlen);
+ strbuf_make_traverse_path(&base, info->prev,
+ info->name, info->namelen);
+ strbuf_addch(&base, '/');
+ traverse_path = xstrndup(base.buf, base.len);
} else {
- traverse_path = xstrndup(info->name.path, info->pathlen);
+ traverse_path = xstrndup(info->name, info->pathlen);
}
info->traverse_path = traverse_path;
for (;;) {
diff --git a/tree-walk.h b/tree-walk.h
index 2a5db29e8f..abe2caf4e0 100644
--- a/tree-walk.h
+++ b/tree-walk.h
@@ -58,8 +58,11 @@ enum get_oid_result get_tree_entry_follow_symlinks(struct repository *r, struct
struct traverse_info {
const char *traverse_path;
struct traverse_info *prev;
- struct name_entry name;
- int pathlen;
+ const char *name;
+ size_t namelen;
+ unsigned mode;
+
+ size_t pathlen;
struct pathspec *pathspec;
unsigned long df_conflicts;
@@ -69,12 +72,17 @@ struct traverse_info {
};
int get_tree_entry(struct repository *, const struct object_id *, const char *, struct object_id *, unsigned short *);
-char *make_traverse_path(char *path, const struct traverse_info *info, const struct name_entry *n);
+char *make_traverse_path(char *path, size_t pathlen, const struct traverse_info *info,
+ const char *name, size_t namelen);
+void strbuf_make_traverse_path(struct strbuf *out,
+ const struct traverse_info *info,
+ const char *name, size_t namelen);
void setup_traverse_info(struct traverse_info *info, const char *base);
-static inline int traverse_path_len(const struct traverse_info *info, const struct name_entry *n)
+static inline size_t traverse_path_len(const struct traverse_info *info,
+ size_t namelen)
{
- return info->pathlen + tree_entry_len(n);
+ return st_add(info->pathlen, namelen);
}
/* in general, positive means "kind of interesting" */
diff --git a/tree.c b/tree.c
index 4720945e6a..1466bcc6a8 100644
--- a/tree.c
+++ b/tree.c
@@ -244,19 +244,7 @@ void free_tree_buffer(struct tree *tree)
struct tree *parse_tree_indirect(const struct object_id *oid)
{
- struct object *obj = parse_object(the_repository, oid);
- do {
- if (!obj)
- return NULL;
- if (obj->type == OBJ_TREE)
- return (struct tree *) obj;
- else if (obj->type == OBJ_COMMIT)
- obj = &(get_commit_tree(((struct commit *)obj))->object);
- else if (obj->type == OBJ_TAG)
- obj = ((struct tag *) obj)->tagged;
- else
- return NULL;
- if (!obj->parsed)
- parse_object(the_repository, &obj->oid);
- } while (1);
+ struct repository *r = the_repository;
+ struct object *obj = parse_object(r, oid);
+ return (struct tree *)repo_peel_to_type(r, NULL, 0, obj, OBJ_TREE);
}
diff --git a/unpack-trees.c b/unpack-trees.c
index 62276d4fef..33ea7810d8 100644
--- a/unpack-trees.c
+++ b/unpack-trees.c
@@ -11,12 +11,11 @@
#include "refs.h"
#include "attr.h"
#include "split-index.h"
-#include "dir.h"
#include "submodule.h"
#include "submodule-config.h"
#include "fsmonitor.h"
#include "object-store.h"
-#include "fetch-object.h"
+#include "promisor-remote.h"
/*
* Error messages expected by scripts out of plumbing commands such as
@@ -400,7 +399,7 @@ static int check_updates(struct unpack_trees_options *o)
load_gitmodules_file(index, &state);
enable_delayed_checkout(&state);
- if (repository_format_partial_clone && o->update && !o->dry_run) {
+ if (has_promisor_remote() && o->update && !o->dry_run) {
/*
* Prefetch the objects that are to be checked out in the loop
* below.
@@ -419,8 +418,8 @@ static int check_updates(struct unpack_trees_options *o)
oid_array_append(&to_fetch, &ce->oid);
}
if (to_fetch.nr)
- fetch_objects(repository_format_partial_clone,
- to_fetch.oid, to_fetch.nr);
+ promisor_remote_get_direct(the_repository,
+ to_fetch.oid, to_fetch.nr);
oid_array_clear(&to_fetch);
}
for (i = 0; i < index->cache_nr; i++) {
@@ -632,7 +631,7 @@ static int unpack_index_entry(struct cache_entry *ce,
return ret;
}
-static int find_cache_pos(struct traverse_info *, const struct name_entry *);
+static int find_cache_pos(struct traverse_info *, const char *p, size_t len);
static void restore_cache_bottom(struct traverse_info *info, int bottom)
{
@@ -651,7 +650,7 @@ static int switch_cache_bottom(struct traverse_info *info)
if (o->diff_index_cached)
return 0;
ret = o->cache_bottom;
- pos = find_cache_pos(info->prev, &info->name);
+ pos = find_cache_pos(info->prev, info->name, info->namelen);
if (pos < -1)
o->cache_bottom = -2 - pos;
@@ -686,21 +685,19 @@ static int index_pos_by_traverse_info(struct name_entry *names,
struct traverse_info *info)
{
struct unpack_trees_options *o = info->data;
- int len = traverse_path_len(info, names);
- char *name = xmalloc(len + 1 /* slash */ + 1 /* NUL */);
+ struct strbuf name = STRBUF_INIT;
int pos;
- make_traverse_path(name, info, names);
- name[len++] = '/';
- name[len] = '\0';
- pos = index_name_pos(o->src_index, name, len);
+ strbuf_make_traverse_path(&name, info, names->path, names->pathlen);
+ strbuf_addch(&name, '/');
+ pos = index_name_pos(o->src_index, name.buf, name.len);
if (pos >= 0)
BUG("This is a directory and should not exist in index");
pos = -pos - 1;
- if (!starts_with(o->src_index->cache[pos]->name, name) ||
- (pos > 0 && starts_with(o->src_index->cache[pos-1]->name, name)))
+ if (!starts_with(o->src_index->cache[pos]->name, name.buf) ||
+ (pos > 0 && starts_with(o->src_index->cache[pos-1]->name, name.buf)))
BUG("pos must point at the first entry in this directory");
- free(name);
+ strbuf_release(&name);
return pos;
}
@@ -811,8 +808,10 @@ static int traverse_trees_recursive(int n, unsigned long dirmask,
newinfo = *info;
newinfo.prev = info;
newinfo.pathspec = info->pathspec;
- newinfo.name = *p;
- newinfo.pathlen += tree_entry_len(p) + 1;
+ newinfo.name = p->path;
+ newinfo.namelen = p->pathlen;
+ newinfo.mode = p->mode;
+ newinfo.pathlen = st_add3(newinfo.pathlen, tree_entry_len(p), 1);
newinfo.df_conflicts |= df_conflicts;
/*
@@ -863,14 +862,18 @@ static int traverse_trees_recursive(int n, unsigned long dirmask,
* itself - the caller needs to do the final check for the cache
* entry having more data at the end!
*/
-static int do_compare_entry_piecewise(const struct cache_entry *ce, const struct traverse_info *info, const struct name_entry *n)
+static int do_compare_entry_piecewise(const struct cache_entry *ce,
+ const struct traverse_info *info,
+ const char *name, size_t namelen,
+ unsigned mode)
{
- int len, pathlen, ce_len;
+ int pathlen, ce_len;
const char *ce_name;
if (info->prev) {
int cmp = do_compare_entry_piecewise(ce, info->prev,
- &info->name);
+ info->name, info->namelen,
+ info->mode);
if (cmp)
return cmp;
}
@@ -884,15 +887,15 @@ static int do_compare_entry_piecewise(const struct cache_entry *ce, const struct
ce_len -= pathlen;
ce_name = ce->name + pathlen;
- len = tree_entry_len(n);
- return df_name_compare(ce_name, ce_len, S_IFREG, n->path, len, n->mode);
+ return df_name_compare(ce_name, ce_len, S_IFREG, name, namelen, mode);
}
static int do_compare_entry(const struct cache_entry *ce,
const struct traverse_info *info,
- const struct name_entry *n)
+ const char *name, size_t namelen,
+ unsigned mode)
{
- int len, pathlen, ce_len;
+ int pathlen, ce_len;
const char *ce_name;
int cmp;
@@ -902,7 +905,7 @@ static int do_compare_entry(const struct cache_entry *ce,
* it is quicker to use the precomputed version.
*/
if (!info->traverse_path)
- return do_compare_entry_piecewise(ce, info, n);
+ return do_compare_entry_piecewise(ce, info, name, namelen, mode);
cmp = strncmp(ce->name, info->traverse_path, info->pathlen);
if (cmp)
@@ -917,13 +920,12 @@ static int do_compare_entry(const struct cache_entry *ce,
ce_len -= pathlen;
ce_name = ce->name + pathlen;
- len = tree_entry_len(n);
- return df_name_compare(ce_name, ce_len, S_IFREG, n->path, len, n->mode);
+ return df_name_compare(ce_name, ce_len, S_IFREG, name, namelen, mode);
}
static int compare_entry(const struct cache_entry *ce, const struct traverse_info *info, const struct name_entry *n)
{
- int cmp = do_compare_entry(ce, info, n);
+ int cmp = do_compare_entry(ce, info, n->path, n->pathlen, n->mode);
if (cmp)
return cmp;
@@ -931,7 +933,7 @@ static int compare_entry(const struct cache_entry *ce, const struct traverse_inf
* Even if the beginning compared identically, the ce should
* compare as bigger than a directory leading up to it!
*/
- return ce_namelen(ce) > traverse_path_len(info, n);
+ return ce_namelen(ce) > traverse_path_len(info, tree_entry_len(n));
}
static int ce_in_traverse_path(const struct cache_entry *ce,
@@ -939,7 +941,8 @@ static int ce_in_traverse_path(const struct cache_entry *ce,
{
if (!info->prev)
return 1;
- if (do_compare_entry(ce, info->prev, &info->name))
+ if (do_compare_entry(ce, info->prev,
+ info->name, info->namelen, info->mode))
return 0;
/*
* If ce (blob) is the same name as the path (which is a tree
@@ -954,7 +957,7 @@ static struct cache_entry *create_ce_entry(const struct traverse_info *info,
struct index_state *istate,
int is_transient)
{
- int len = traverse_path_len(info, n);
+ size_t len = traverse_path_len(info, tree_entry_len(n));
struct cache_entry *ce =
is_transient ?
make_empty_transient_cache_entry(len) :
@@ -964,7 +967,8 @@ static struct cache_entry *create_ce_entry(const struct traverse_info *info,
ce->ce_flags = create_ce_flags(stage);
ce->ce_namelen = len;
oidcpy(&ce->oid, &n->oid);
- make_traverse_path(ce->name, info, n);
+ /* len+1 because the cache_entry allocates space for NUL */
+ make_traverse_path(ce->name, len + 1, info, n->path, n->pathlen);
return ce;
}
@@ -1057,13 +1061,12 @@ static int unpack_failed(struct unpack_trees_options *o, const char *message)
* the directory.
*/
static int find_cache_pos(struct traverse_info *info,
- const struct name_entry *p)
+ const char *p, size_t p_len)
{
int pos;
struct unpack_trees_options *o = info->data;
struct index_state *index = o->src_index;
int pfxlen = info->pathlen;
- int p_len = tree_entry_len(p);
for (pos = o->cache_bottom; pos < index->cache_nr; pos++) {
const struct cache_entry *ce = index->cache[pos];
@@ -1099,7 +1102,7 @@ static int find_cache_pos(struct traverse_info *info,
ce_len = ce_slash - ce_name;
else
ce_len = ce_namelen(ce) - pfxlen;
- cmp = name_compare(p->path, p_len, ce_name, ce_len);
+ cmp = name_compare(p, p_len, ce_name, ce_len);
/*
* Exact match; if we have a directory we need to
* delay returning it.
@@ -1114,7 +1117,7 @@ static int find_cache_pos(struct traverse_info *info,
* E.g. ce_name == "t-i", and p->path == "t"; we may
* have "t/a" in the index.
*/
- if (p_len < ce_len && !memcmp(ce_name, p->path, p_len) &&
+ if (p_len < ce_len && !memcmp(ce_name, p, p_len) &&
ce_name[p_len] < '/')
continue; /* keep looking */
break;
@@ -1125,7 +1128,7 @@ static int find_cache_pos(struct traverse_info *info,
static struct cache_entry *find_cache_entry(struct traverse_info *info,
const struct name_entry *p)
{
- int pos = find_cache_pos(info, p);
+ int pos = find_cache_pos(info, p->path, p->pathlen);
struct unpack_trees_options *o = info->data;
if (0 <= pos)
@@ -1138,10 +1141,10 @@ static void debug_path(struct traverse_info *info)
{
if (info->prev) {
debug_path(info->prev);
- if (*info->prev->name.path)
+ if (*info->prev->name)
putchar('/');
}
- printf("%s", info->name.path);
+ printf("%s", info->name);
}
static void debug_name_entry(int i, struct name_entry *n)
@@ -1265,7 +1268,8 @@ static int clear_ce_flags_1(struct index_state *istate,
struct cache_entry **cache, int nr,
struct strbuf *prefix,
int select_mask, int clear_mask,
- struct exclude_list *el, int defval);
+ struct pattern_list *pl,
+ enum pattern_match_result default_match);
/* Whole directory matching */
static int clear_ce_flags_dir(struct index_state *istate,
@@ -1273,19 +1277,21 @@ static int clear_ce_flags_dir(struct index_state *istate,
struct strbuf *prefix,
char *basename,
int select_mask, int clear_mask,
- struct exclude_list *el, int defval)
+ struct pattern_list *pl,
+ enum pattern_match_result default_match)
{
struct cache_entry **cache_end;
int dtype = DT_DIR;
- int ret = is_excluded_from_list(prefix->buf, prefix->len,
- basename, &dtype, el, istate);
int rc;
+ enum pattern_match_result ret;
+ ret = path_matches_pattern_list(prefix->buf, prefix->len,
+ basename, &dtype, pl, istate);
strbuf_addch(prefix, '/');
/* If undecided, use matching result of parent dir in defval */
- if (ret < 0)
- ret = defval;
+ if (ret == UNDECIDED)
+ ret = default_match;
for (cache_end = cache; cache_end != cache + nr; cache_end++) {
struct cache_entry *ce = *cache_end;
@@ -1294,23 +1300,23 @@ static int clear_ce_flags_dir(struct index_state *istate,
}
/*
- * TODO: check el, if there are no patterns that may conflict
+ * TODO: check pl, if there are no patterns that may conflict
* with ret (iow, we know in advance the incl/excl
* decision for the entire directory), clear flag here without
* calling clear_ce_flags_1(). That function will call
- * the expensive is_excluded_from_list() on every entry.
+ * the expensive path_matches_pattern_list() on every entry.
*/
rc = clear_ce_flags_1(istate, cache, cache_end - cache,
prefix,
select_mask, clear_mask,
- el, ret);
+ pl, ret);
strbuf_setlen(prefix, prefix->len - 1);
return rc;
}
/*
* Traverse the index, find every entry that matches according to
- * o->el. Do "ce_flags &= ~clear_mask" on those entries. Return the
+ * o->pl. Do "ce_flags &= ~clear_mask" on those entries. Return the
* number of traversed entries.
*
* If select_mask is non-zero, only entries whose ce_flags has on of
@@ -1327,7 +1333,8 @@ static int clear_ce_flags_1(struct index_state *istate,
struct cache_entry **cache, int nr,
struct strbuf *prefix,
int select_mask, int clear_mask,
- struct exclude_list *el, int defval)
+ struct pattern_list *pl,
+ enum pattern_match_result default_match)
{
struct cache_entry **cache_end = cache + nr;
@@ -1338,7 +1345,8 @@ static int clear_ce_flags_1(struct index_state *istate,
while(cache != cache_end) {
struct cache_entry *ce = *cache;
const char *name, *slash;
- int len, dtype, ret;
+ int len, dtype;
+ enum pattern_match_result ret;
if (select_mask && !(ce->ce_flags & select_mask)) {
cache++;
@@ -1362,7 +1370,7 @@ static int clear_ce_flags_1(struct index_state *istate,
prefix,
prefix->buf + prefix->len - len,
select_mask, clear_mask,
- el, defval);
+ pl, default_match);
/* clear_c_f_dir eats a whole dir already? */
if (processed) {
@@ -1374,18 +1382,20 @@ static int clear_ce_flags_1(struct index_state *istate,
strbuf_addch(prefix, '/');
cache += clear_ce_flags_1(istate, cache, cache_end - cache,
prefix,
- select_mask, clear_mask, el, defval);
+ select_mask, clear_mask, pl,
+ default_match);
strbuf_setlen(prefix, prefix->len - len - 1);
continue;
}
/* Non-directory */
dtype = ce_to_dtype(ce);
- ret = is_excluded_from_list(ce->name, ce_namelen(ce),
- name, &dtype, el, istate);
- if (ret < 0)
- ret = defval;
- if (ret > 0)
+ ret = path_matches_pattern_list(ce->name,
+ ce_namelen(ce),
+ name, &dtype, pl, istate);
+ if (ret == UNDECIDED)
+ ret = default_match;
+ if (ret == MATCHED)
ce->ce_flags &= ~clear_mask;
cache++;
}
@@ -1394,7 +1404,7 @@ static int clear_ce_flags_1(struct index_state *istate,
static int clear_ce_flags(struct index_state *istate,
int select_mask, int clear_mask,
- struct exclude_list *el)
+ struct pattern_list *pl)
{
static struct strbuf prefix = STRBUF_INIT;
@@ -1405,13 +1415,13 @@ static int clear_ce_flags(struct index_state *istate,
istate->cache_nr,
&prefix,
select_mask, clear_mask,
- el, 0);
+ pl, 0);
}
/*
* Set/Clear CE_NEW_SKIP_WORKTREE according to $GIT_DIR/info/sparse-checkout
*/
-static void mark_new_skip_worktree(struct exclude_list *el,
+static void mark_new_skip_worktree(struct pattern_list *pl,
struct index_state *istate,
int select_flag, int skip_wt_flag)
{
@@ -1437,7 +1447,7 @@ static void mark_new_skip_worktree(struct exclude_list *el,
* 2. Widen worktree according to sparse-checkout file.
* Matched entries will have skip_wt_flag cleared (i.e. "in")
*/
- clear_ce_flags(istate, select_flag, skip_wt_flag, el);
+ clear_ce_flags(istate, select_flag, skip_wt_flag, pl);
}
static int verify_absent(const struct cache_entry *,
@@ -1453,21 +1463,21 @@ int unpack_trees(unsigned len, struct tree_desc *t, struct unpack_trees_options
{
int i, ret;
static struct cache_entry *dfc;
- struct exclude_list el;
+ struct pattern_list pl;
if (len > MAX_UNPACK_TREES)
die("unpack_trees takes at most %d trees", MAX_UNPACK_TREES);
trace_performance_enter();
- memset(&el, 0, sizeof(el));
+ memset(&pl, 0, sizeof(pl));
if (!core_apply_sparse_checkout || !o->update)
o->skip_sparse_checkout = 1;
if (!o->skip_sparse_checkout) {
char *sparse = git_pathdup("info/sparse-checkout");
- if (add_excludes_from_file_to_list(sparse, "", 0, &el, NULL) < 0)
+ if (add_patterns_from_file_to_list(sparse, "", 0, &pl, NULL) < 0)
o->skip_sparse_checkout = 1;
else
- o->el = &el;
+ o->pl = &pl;
free(sparse);
}
@@ -1498,7 +1508,7 @@ int unpack_trees(unsigned len, struct tree_desc *t, struct unpack_trees_options
* Sparse checkout loop #1: set NEW_SKIP_WORKTREE on existing entries
*/
if (!o->skip_sparse_checkout)
- mark_new_skip_worktree(o->el, o->src_index, 0, CE_NEW_SKIP_WORKTREE);
+ mark_new_skip_worktree(o->pl, o->src_index, 0, CE_NEW_SKIP_WORKTREE);
if (!dfc)
dfc = xcalloc(1, cache_entry_size(0));
@@ -1563,7 +1573,7 @@ int unpack_trees(unsigned len, struct tree_desc *t, struct unpack_trees_options
* If the will have NEW_SKIP_WORKTREE, also set CE_SKIP_WORKTREE
* so apply_sparse_checkout() won't attempt to remove it from worktree
*/
- mark_new_skip_worktree(o->el, &o->result, CE_ADDED, CE_SKIP_WORKTREE | CE_NEW_SKIP_WORKTREE);
+ mark_new_skip_worktree(o->pl, &o->result, CE_ADDED, CE_SKIP_WORKTREE | CE_NEW_SKIP_WORKTREE);
ret = 0;
for (i = 0; i < o->result.cache_nr; i++) {
@@ -1631,7 +1641,7 @@ int unpack_trees(unsigned len, struct tree_desc *t, struct unpack_trees_options
done:
trace_performance_leave("unpack_trees");
- clear_exclude_list(&el);
+ clear_pattern_list(&pl);
return ret;
return_failed:
diff --git a/unpack-trees.h b/unpack-trees.h
index d344d7d296..f2eee0c7c5 100644
--- a/unpack-trees.h
+++ b/unpack-trees.h
@@ -10,7 +10,7 @@
struct cache_entry;
struct unpack_trees_options;
-struct exclude_list;
+struct pattern_list;
typedef int (*merge_fn_t)(const struct cache_entry * const *src,
struct unpack_trees_options *options);
@@ -83,7 +83,7 @@ struct unpack_trees_options {
struct index_state *src_index;
struct index_state result;
- struct exclude_list *el; /* for internal use */
+ struct pattern_list *pl; /* for internal use */
};
int unpack_trees(unsigned n, struct tree_desc *t,
diff --git a/upload-pack.c b/upload-pack.c
index 222cd3ad89..a00d7ece6b 100644
--- a/upload-pack.c
+++ b/upload-pack.c
@@ -140,18 +140,17 @@ static void create_pack_file(const struct object_array *have_obj,
argv_array_push(&pack_objects.args, "--delta-base-offset");
if (use_include_tag)
argv_array_push(&pack_objects.args, "--include-tag");
- if (filter_options.filter_spec) {
- struct strbuf expanded_filter_spec = STRBUF_INIT;
- expand_list_objects_filter_spec(&filter_options,
- &expanded_filter_spec);
+ if (filter_options.choice) {
+ const char *spec =
+ expand_list_objects_filter_spec(&filter_options);
if (pack_objects.use_shell) {
struct strbuf buf = STRBUF_INIT;
- sq_quote_buf(&buf, expanded_filter_spec.buf);
+ sq_quote_buf(&buf, spec);
argv_array_pushf(&pack_objects.args, "--filter=%s", buf.buf);
strbuf_release(&buf);
} else {
argv_array_pushf(&pack_objects.args, "--filter=%s",
- expanded_filter_spec.buf);
+ spec);
}
}
@@ -722,7 +721,7 @@ static void deepen_by_rev_list(struct packet_writer *writer, int ac,
{
struct commit_list *result;
- close_commit_graph(the_repository->objects);
+ disable_commit_graph(the_repository);
result = get_shallow_commits_by_rev_list(ac, av, SHALLOW, NOT_SHALLOW);
send_shallow(writer, result);
free_commit_list(result);
@@ -884,6 +883,7 @@ static void receive_needs(struct packet_reader *reader, struct object_array *wan
if (skip_prefix(reader->line, "filter ", &arg)) {
if (!filter_capability_requested)
die("git upload-pack: filtering capability not negotiated");
+ list_objects_filter_die_if_populated(&filter_options);
parse_list_objects_filter(&filter_options, arg);
continue;
}
@@ -1305,6 +1305,7 @@ static void process_args(struct packet_reader *request,
}
if (allow_filter && skip_prefix(arg, "filter ", &p)) {
+ list_objects_filter_die_if_populated(&filter_options);
parse_list_objects_filter(&filter_options, p);
continue;
}
diff --git a/url.c b/url.c
index 1b8ef78cea..e34e5e7517 100644
--- a/url.c
+++ b/url.c
@@ -86,6 +86,12 @@ char *url_decode_mem(const char *url, int len)
return url_decode_internal(&url, len, NULL, &out, 0);
}
+char *url_percent_decode(const char *encoded)
+{
+ struct strbuf out = STRBUF_INIT;
+ return url_decode_internal(&encoded, strlen(encoded), NULL, &out, 0);
+}
+
char *url_decode_parameter_name(const char **query)
{
struct strbuf out = STRBUF_INIT;
diff --git a/url.h b/url.h
index 00b7d58c33..2a27c34277 100644
--- a/url.h
+++ b/url.h
@@ -7,6 +7,14 @@ int is_url(const char *url);
int is_urlschemechar(int first_flag, int ch);
char *url_decode(const char *url);
char *url_decode_mem(const char *url, int len);
+
+/*
+ * Similar to the url_decode_{,mem} methods above, but doesn't assume there
+ * is a scheme followed by a : at the start of the string. Instead, %-sequences
+ * before any : are also parsed.
+ */
+char *url_percent_decode(const char *encoded);
+
char *url_decode_parameter_name(const char **query);
char *url_decode_parameter_value(const char **query);
diff --git a/userdiff.c b/userdiff.c
index e74a6d4022..86e3244e15 100644
--- a/userdiff.c
+++ b/userdiff.c
@@ -23,6 +23,14 @@ IPATTERN("ada",
"[a-zA-Z][a-zA-Z0-9_]*"
"|[-+]?[0-9][0-9#_.aAbBcCdDeEfF]*([eE][+-]?[0-9_]+)?"
"|=>|\\.\\.|\\*\\*|:=|/=|>=|<=|<<|>>|<>"),
+PATTERNS("dts",
+ "!;\n"
+ /* lines beginning with a word optionally preceded by '&' or the root */
+ "^[ \t]*((/|&?[a-zA-Z_]).*)",
+ /* -- */
+ /* Property names and math operators */
+ "[a-zA-Z0-9,._+?#-]+"
+ "|[-+*/%&^|!~]|>>|<<|&&|\\|\\|"),
IPATTERN("fortran",
"!^([C*]|[ \t]*!)\n"
"!^[ \t]*MODULE[ \t]+PROCEDURE[ \t]\n"
diff --git a/wrapper.c b/wrapper.c
index 1e45ab7b92..e1eaef2e16 100644
--- a/wrapper.c
+++ b/wrapper.c
@@ -4,12 +4,6 @@
#include "cache.h"
#include "config.h"
-static void do_nothing(size_t size)
-{
-}
-
-static void (*try_to_free_routine)(size_t size) = do_nothing;
-
static int memory_limit_check(size_t size, int gentle)
{
static size_t limit = 0;
@@ -30,24 +24,11 @@ static int memory_limit_check(size_t size, int gentle)
return 0;
}
-try_to_free_t set_try_to_free_routine(try_to_free_t routine)
-{
- try_to_free_t old = try_to_free_routine;
- if (!routine)
- routine = do_nothing;
- try_to_free_routine = routine;
- return old;
-}
-
char *xstrdup(const char *str)
{
char *ret = strdup(str);
- if (!ret) {
- try_to_free_routine(strlen(str) + 1);
- ret = strdup(str);
- if (!ret)
- die("Out of memory, strdup failed");
- }
+ if (!ret)
+ die("Out of memory, strdup failed");
return ret;
}
@@ -61,19 +42,13 @@ static void *do_xmalloc(size_t size, int gentle)
if (!ret && !size)
ret = malloc(1);
if (!ret) {
- try_to_free_routine(size);
- ret = malloc(size);
- if (!ret && !size)
- ret = malloc(1);
- if (!ret) {
- if (!gentle)
- die("Out of memory, malloc failed (tried to allocate %lu bytes)",
- (unsigned long)size);
- else {
- error("Out of memory, malloc failed (tried to allocate %lu bytes)",
- (unsigned long)size);
- return NULL;
- }
+ if (!gentle)
+ die("Out of memory, malloc failed (tried to allocate %lu bytes)",
+ (unsigned long)size);
+ else {
+ error("Out of memory, malloc failed (tried to allocate %lu bytes)",
+ (unsigned long)size);
+ return NULL;
}
}
#ifdef XMALLOC_POISON
@@ -138,14 +113,8 @@ void *xrealloc(void *ptr, size_t size)
ret = realloc(ptr, size);
if (!ret && !size)
ret = realloc(ptr, 1);
- if (!ret) {
- try_to_free_routine(size);
- ret = realloc(ptr, size);
- if (!ret && !size)
- ret = realloc(ptr, 1);
- if (!ret)
- die("Out of memory, realloc failed");
- }
+ if (!ret)
+ die("Out of memory, realloc failed");
return ret;
}
@@ -160,14 +129,8 @@ void *xcalloc(size_t nmemb, size_t size)
ret = calloc(nmemb, size);
if (!ret && (!nmemb || !size))
ret = calloc(1, 1);
- if (!ret) {
- try_to_free_routine(nmemb * size);
- ret = calloc(nmemb, size);
- if (!ret && (!nmemb || !size))
- ret = calloc(1, 1);
- if (!ret)
- die("Out of memory, calloc failed");
- }
+ if (!ret)
+ die("Out of memory, calloc failed");
return ret;
}
@@ -478,7 +441,9 @@ int git_mkstemps_mode(char *pattern, int suffix_len, int mode)
"abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"0123456789";
- static const int num_letters = 62;
+ static const int num_letters = ARRAY_SIZE(letters) - 1;
+ static const char x_pattern[] = "XXXXXX";
+ static const int num_x = ARRAY_SIZE(x_pattern) - 1;
uint64_t value;
struct timeval tv;
char *filename_template;
@@ -487,12 +452,12 @@ int git_mkstemps_mode(char *pattern, int suffix_len, int mode)
len = strlen(pattern);
- if (len < 6 + suffix_len) {
+ if (len < num_x + suffix_len) {
errno = EINVAL;
return -1;
}
- if (strncmp(&pattern[len - 6 - suffix_len], "XXXXXX", 6)) {
+ if (strncmp(&pattern[len - num_x - suffix_len], x_pattern, num_x)) {
errno = EINVAL;
return -1;
}
@@ -503,16 +468,15 @@ int git_mkstemps_mode(char *pattern, int suffix_len, int mode)
*/
gettimeofday(&tv, NULL);
value = ((uint64_t)tv.tv_usec << 16) ^ tv.tv_sec ^ getpid();
- filename_template = &pattern[len - 6 - suffix_len];
+ filename_template = &pattern[len - num_x - suffix_len];
for (count = 0; count < TMP_MAX; ++count) {
uint64_t v = value;
+ int i;
/* Fill in the random bits. */
- filename_template[0] = letters[v % num_letters]; v /= num_letters;
- filename_template[1] = letters[v % num_letters]; v /= num_letters;
- filename_template[2] = letters[v % num_letters]; v /= num_letters;
- filename_template[3] = letters[v % num_letters]; v /= num_letters;
- filename_template[4] = letters[v % num_letters]; v /= num_letters;
- filename_template[5] = letters[v % num_letters]; v /= num_letters;
+ for (i = 0; i < num_x; i++) {
+ filename_template[i] = letters[v % num_letters];
+ v /= num_letters;
+ }
fd = open(pattern, O_CREAT | O_EXCL | O_RDWR, mode);
if (fd >= 0)
diff --git a/wt-status.c b/wt-status.c
index 9f6c65a580..cc6f94504d 100644
--- a/wt-status.c
+++ b/wt-status.c
@@ -1434,6 +1434,8 @@ static void show_cherry_pick_in_progress(struct wt_status *s,
status_printf_ln(s, color,
_(" (all conflicts fixed: run \"git cherry-pick --continue\")"));
status_printf_ln(s, color,
+ _(" (use \"git cherry-pick --skip\" to skip this patch)"));
+ status_printf_ln(s, color,
_(" (use \"git cherry-pick --abort\" to cancel the cherry-pick operation)"));
}
wt_longstatus_print_trailer(s);
@@ -1461,6 +1463,8 @@ static void show_revert_in_progress(struct wt_status *s,
status_printf_ln(s, color,
_(" (all conflicts fixed: run \"git revert --continue\")"));
status_printf_ln(s, color,
+ _(" (use \"git revert --skip\" to skip this patch)"));
+ status_printf_ln(s, color,
_(" (use \"git revert --abort\" to cancel the revert operation)"));
}
wt_longstatus_print_trailer(s);
@@ -2025,7 +2029,7 @@ static void wt_porcelain_v2_print_tracking(struct wt_status *s)
char eol = s->null_termination ? '\0' : '\n';
fprintf(s->fp, "# branch.oid %s%c",
- (s->is_initial ? "(initial)" : sha1_to_hex(s->sha1_commit)),
+ (s->is_initial ? "(initial)" : oid_to_hex(&s->oid_commit)),
eol);
if (!s->branch)
diff --git a/wt-status.h b/wt-status.h
index 77dad5b920..71c3f25f43 100644
--- a/wt-status.h
+++ b/wt-status.h
@@ -116,7 +116,7 @@ struct wt_status {
int rename_limit;
enum wt_status_format status_format;
struct wt_status_state state;
- unsigned char sha1_commit[GIT_MAX_RAWSZ]; /* when not Initial */
+ struct object_id oid_commit; /* when not Initial */
/* These are computed during processing of the individual sections */
int committable;
diff --git a/xdiff/xdiffi.c b/xdiff/xdiffi.c
index 1f1f4a3c78..bd035139f9 100644
--- a/xdiff/xdiffi.c
+++ b/xdiff/xdiffi.c
@@ -38,9 +38,9 @@ typedef struct s_xdpsplit {
* Basically considers a "box" (off1, off2, lim1, lim2) and scan from both
* the forward diagonal starting from (off1, off2) and the backward diagonal
* starting from (lim1, lim2). If the K values on the same diagonal crosses
- * returns the furthest point of reach. We might end up having to expensive
- * cases using this algorithm is full, so a little bit of heuristic is needed
- * to cut the search and to return a suboptimal point.
+ * returns the furthest point of reach. We might encounter expensive edge cases
+ * using this algorithm, so a little bit of heuristic is needed to cut the
+ * search and to return a suboptimal point.
*/
static long xdl_split(unsigned long const *ha1, long off1, long lim1,
unsigned long const *ha2, long off2, long lim2,
@@ -63,11 +63,13 @@ static long xdl_split(unsigned long const *ha1, long off1, long lim1,
int got_snake = 0;
/*
- * We need to extent the diagonal "domain" by one. If the next
+ * We need to extend the diagonal "domain" by one. If the next
* values exits the box boundaries we need to change it in the
- * opposite direction because (max - min) must be a power of two.
+ * opposite direction because (max - min) must be a power of
+ * two.
+ *
* Also we initialize the external K value to -1 so that we can
- * avoid extra conditions check inside the core loop.
+ * avoid extra conditions in the check inside the core loop.
*/
if (fmin > dmin)
kvdf[--fmin - 1] = -1;
@@ -98,11 +100,13 @@ static long xdl_split(unsigned long const *ha1, long off1, long lim1,
}
/*
- * We need to extent the diagonal "domain" by one. If the next
+ * We need to extend the diagonal "domain" by one. If the next
* values exits the box boundaries we need to change it in the
- * opposite direction because (max - min) must be a power of two.
+ * opposite direction because (max - min) must be a power of
+ * two.
+ *
* Also we initialize the external K value to -1 so that we can
- * avoid extra conditions check inside the core loop.
+ * avoid extra conditions in the check inside the core loop.
*/
if (bmin > dmin)
kvdb[--bmin - 1] = XDL_LINE_MAX;
@@ -138,7 +142,7 @@ static long xdl_split(unsigned long const *ha1, long off1, long lim1,
/*
* If the edit cost is above the heuristic trigger and if
* we got a good snake, we sample current diagonals to see
- * if some of the, have reached an "interesting" path. Our
+ * if some of them have reached an "interesting" path. Our
* measure is a function of the distance from the diagonal
* corner (i1 + i2) penalized with the distance from the
* mid diagonal itself. If this value is above the current
@@ -196,8 +200,9 @@ static long xdl_split(unsigned long const *ha1, long off1, long lim1,
}
/*
- * Enough is enough. We spent too much time here and now we collect
- * the furthest reaching path using the (i1 + i2) measure.
+ * Enough is enough. We spent too much time here and now we
+ * collect the furthest reaching path using the (i1 + i2)
+ * measure.
*/
if (ec >= xenv->mxcost) {
long fbest, fbest1, bbest, bbest1;
@@ -244,9 +249,9 @@ static long xdl_split(unsigned long const *ha1, long off1, long lim1,
/*
- * Rule: "Divide et Impera". Recursively split the box in sub-boxes by calling
- * the box splitting function. Note that the real job (marking changed lines)
- * is done in the two boundary reaching checks.
+ * Rule: "Divide et Impera" (divide & conquer). Recursively split the box in
+ * sub-boxes by calling the box splitting function. Note that the real job
+ * (marking changed lines) is done in the two boundary reaching checks.
*/
int xdl_recs_cmp(diffdata_t *dd1, long off1, long lim1,
diffdata_t *dd2, long off2, long lim2,
@@ -323,7 +328,9 @@ int xdl_do_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
}
/*
- * Allocate and setup K vectors to be used by the differential algorithm.
+ * Allocate and setup K vectors to be used by the differential
+ * algorithm.
+ *
* One is to store the forward path and one to store the backward path.
*/
ndiags = xe->xdf1.nreff + xe->xdf2.nreff + 3;
@@ -394,8 +401,8 @@ static int recs_match(xrecord_t *rec1, xrecord_t *rec2, long flags)
/*
* If a line is indented more than this, get_indent() just returns this value.
* This avoids having to do absurd amounts of work for data that are not
- * human-readable text, and also ensures that the output of get_indent fits within
- * an int.
+ * human-readable text, and also ensures that the output of get_indent fits
+ * within an int.
*/
#define MAX_INDENT 200
@@ -429,9 +436,9 @@ static int get_indent(xrecord_t *rec)
}
/*
- * If more than this number of consecutive blank rows are found, just return this
- * value. This avoids requiring O(N^2) work for pathological cases, and also
- * ensures that the output of score_split fits in an int.
+ * If more than this number of consecutive blank rows are found, just return
+ * this value. This avoids requiring O(N^2) work for pathological cases, and
+ * also ensures that the output of score_split fits in an int.
*/
#define MAX_BLANKS 20
@@ -443,8 +450,8 @@ struct split_measurement {
int end_of_file;
/*
- * How much is the line immediately following the split indented (or -1 if
- * the line is blank):
+ * How much is the line immediately following the split indented (or -1
+ * if the line is blank):
*/
int indent;
@@ -454,8 +461,8 @@ struct split_measurement {
int pre_blank;
/*
- * How much is the nearest non-blank line above the split indented (or -1
- * if there is no such line)?
+ * How much is the nearest non-blank line above the split indented (or
+ * -1 if there is no such line)?
*/
int pre_indent;
@@ -581,13 +588,13 @@ static void measure_split(const xdfile_t *xdf, long split,
/*
* Compute a badness score for the hypothetical split whose measurements are
- * stored in m. The weight factors were determined empirically using the tools and
- * corpus described in
+ * stored in m. The weight factors were determined empirically using the tools
+ * and corpus described in
*
* https://github.com/mhagger/diff-slider-tools
*
- * Also see that project if you want to improve the weights based on, for example,
- * a larger or more diverse corpus.
+ * Also see that project if you want to improve the weights based on, for
+ * example, a larger or more diverse corpus.
*/
static void score_add_split(const struct split_measurement *m, struct split_score *s)
{
@@ -809,13 +816,16 @@ int xdl_change_compact(xdfile_t *xdf, xdfile_t *xdfo, long flags) {
group_init(xdfo, &go);
while (1) {
- /* If the group is empty in the to-be-compacted file, skip it: */
+ /*
+ * If the group is empty in the to-be-compacted file, skip it:
+ */
if (g.end == g.start)
goto next;
/*
* Now shift the change up and then down as far as possible in
- * each direction. If it bumps into any other changes, merge them.
+ * each direction. If it bumps into any other changes, merge
+ * them.
*/
do {
groupsize = g.end - g.start;
@@ -858,17 +868,17 @@ int xdl_change_compact(xdfile_t *xdf, xdfile_t *xdfo, long flags) {
* If the group can be shifted, then we can possibly use this
* freedom to produce a more intuitive diff.
*
- * The group is currently shifted as far down as possible, so the
- * heuristics below only have to handle upwards shifts.
+ * The group is currently shifted as far down as possible, so
+ * the heuristics below only have to handle upwards shifts.
*/
if (g.end == earliest_end) {
/* no shifting was possible */
} else if (end_matching_other != -1) {
/*
- * Move the possibly merged group of changes back to line
- * up with the last group of changes from the other file
- * that it can align with.
+ * Move the possibly merged group of changes back to
+ * line up with the last group of changes from the
+ * other file that it can align with.
*/
while (go.end == go.start) {
if (group_slide_up(xdf, &g, flags))
@@ -879,14 +889,15 @@ int xdl_change_compact(xdfile_t *xdf, xdfile_t *xdfo, long flags) {
} else if (flags & XDF_INDENT_HEURISTIC) {
/*
* Indent heuristic: a group of pure add/delete lines
- * implies two splits, one between the end of the "before"
- * context and the start of the group, and another between
- * the end of the group and the beginning of the "after"
- * context. Some splits are aesthetically better and some
- * are worse. We compute a badness "score" for each split,
- * and add the scores for the two splits to define a
- * "score" for each position that the group can be shifted
- * to. Then we pick the shift with the lowest score.
+ * implies two splits, one between the end of the
+ * "before" context and the start of the group, and
+ * another between the end of the group and the
+ * beginning of the "after" context. Some splits are
+ * aesthetically better and some are worse. We compute
+ * a badness "score" for each split, and add the scores
+ * for the two splits to define a "score" for each
+ * position that the group can be shifted to. Then we
+ * pick the shift with the lowest score.
*/
long shift, best_shift = -1;
struct split_score best_score;