summaryrefslogtreecommitdiff
path: root/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'contrib')
-rw-r--r--contrib/blameview/README9
-rwxr-xr-xcontrib/blameview/blameview.perl155
-rw-r--r--contrib/buildsystems/Generators/Vcproj.pm4
-rwxr-xr-x[-rw-r--r--]contrib/buildsystems/engine.pl12
-rwxr-xr-x[-rw-r--r--]contrib/buildsystems/generate0
-rwxr-xr-x[-rw-r--r--]contrib/buildsystems/parse.pl0
-rw-r--r--[-rwxr-xr-x]contrib/completion/git-completion.bash1688
-rw-r--r--contrib/completion/git-completion.tcsh126
-rw-r--r--contrib/completion/git-completion.zsh225
-rw-r--r--contrib/completion/git-prompt.sh528
-rw-r--r--contrib/contacts/.gitignore3
-rw-r--r--contrib/contacts/Makefile71
-rwxr-xr-xcontrib/contacts/git-contacts203
-rw-r--r--contrib/contacts/git-contacts.txt94
-rw-r--r--contrib/continuous/cidaemon503
-rw-r--r--contrib/continuous/post-receive-cinotify104
-rwxr-xr-xcontrib/convert-grafts-to-replace-refs.sh28
-rw-r--r--contrib/convert-objects/git-convert-objects.txt3
-rw-r--r--contrib/credential/gnome-keyring/.gitignore1
-rw-r--r--contrib/credential/gnome-keyring/Makefile24
-rw-r--r--contrib/credential/gnome-keyring/git-credential-gnome-keyring.c471
-rw-r--r--contrib/credential/netrc/Makefile5
-rwxr-xr-xcontrib/credential/netrc/git-credential-netrc423
-rw-r--r--contrib/credential/netrc/test.netrc13
-rwxr-xr-xcontrib/credential/netrc/test.pl106
-rw-r--r--contrib/credential/osxkeychain/.gitignore1
-rw-r--r--contrib/credential/osxkeychain/Makefile17
-rw-r--r--contrib/credential/osxkeychain/git-credential-osxkeychain.c183
-rw-r--r--contrib/credential/wincred/Makefile22
-rw-r--r--contrib/credential/wincred/git-credential-wincred.c301
-rw-r--r--contrib/diff-highlight/README193
-rwxr-xr-xcontrib/diff-highlight/diff-highlight213
-rw-r--r--contrib/emacs/git-blame.el188
-rw-r--r--contrib/emacs/git.el15
-rw-r--r--contrib/examples/builtin-fetch--tool.c575
-rwxr-xr-xcontrib/examples/git-checkout.sh10
-rwxr-xr-xcontrib/examples/git-clone.sh22
-rwxr-xr-xcontrib/examples/git-commit.sh18
-rwxr-xr-xcontrib/examples/git-fetch.sh10
-rwxr-xr-xcontrib/examples/git-log.sh15
-rwxr-xr-xcontrib/examples/git-ls-remote.sh4
-rwxr-xr-xcontrib/examples/git-merge.sh134
-rwxr-xr-xcontrib/examples/git-notes.sh121
-rwxr-xr-xcontrib/examples/git-remote.perl10
-rwxr-xr-xcontrib/examples/git-repack.sh194
-rwxr-xr-xcontrib/examples/git-reset.sh4
-rwxr-xr-xcontrib/examples/git-resolve.sh6
-rwxr-xr-xcontrib/examples/git-revert.sh16
-rwxr-xr-xcontrib/examples/git-svnimport.perl6
-rw-r--r--contrib/examples/git-svnimport.txt2
-rwxr-xr-xcontrib/examples/git-tag.sh2
-rwxr-xr-xcontrib/examples/git-whatchanged.sh28
-rwxr-xr-xcontrib/fast-import/git-import.perl2
-rwxr-xr-xcontrib/fast-import/git-import.sh2
-rwxr-xr-xcontrib/fast-import/git-p41928
-rw-r--r--contrib/fast-import/git-p4.README12
-rw-r--r--contrib/fast-import/git-p4.bat1
-rw-r--r--contrib/fast-import/git-p4.txt210
-rwxr-xr-xcontrib/fast-import/import-directories.perl9
-rwxr-xr-xcontrib/fast-import/import-tars.perl5
-rwxr-xr-xcontrib/fast-import/import-zips.py101
-rw-r--r--contrib/git-jump/README92
-rwxr-xr-xcontrib/git-jump/git-jump69
-rwxr-xr-xcontrib/git-resurrect.sh2
-rw-r--r--contrib/git-shell-commands/README18
-rwxr-xr-xcontrib/git-shell-commands/help18
-rwxr-xr-xcontrib/git-shell-commands/list10
-rwxr-xr-xcontrib/gitview/gitview2
-rw-r--r--contrib/gitview/gitview.txt3
-rwxr-xr-xcontrib/hg-to-git/hg-to-git.py17
-rw-r--r--contrib/hooks/multimail/CHANGES33
-rw-r--r--contrib/hooks/multimail/README500
-rw-r--r--contrib/hooks/multimail/README.Git15
-rw-r--r--contrib/hooks/multimail/README.migrate-from-post-receive-email145
-rwxr-xr-xcontrib/hooks/multimail/git_multimail.py2539
-rwxr-xr-xcontrib/hooks/multimail/migrate-mailhook-config269
-rwxr-xr-xcontrib/hooks/multimail/post-receive90
-rwxr-xr-xcontrib/hooks/post-receive-email152
-rwxr-xr-x[-rw-r--r--]contrib/hooks/pre-auto-gc-battery1
-rwxr-xr-x[-rw-r--r--]contrib/hooks/setgitperms.perl2
-rwxr-xr-x[-rw-r--r--]contrib/hooks/update-paranoid0
-rw-r--r--contrib/mw-to-git/.gitignore2
-rw-r--r--contrib/mw-to-git/.perlcriticrc28
-rw-r--r--contrib/mw-to-git/Git/Mediawiki.pm100
-rw-r--r--contrib/mw-to-git/Makefile57
-rwxr-xr-xcontrib/mw-to-git/bin-wrapper/git14
-rwxr-xr-xcontrib/mw-to-git/git-mw.perl368
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki.perl1338
-rw-r--r--contrib/mw-to-git/git-remote-mediawiki.txt7
-rw-r--r--contrib/mw-to-git/t/.gitignore4
-rw-r--r--contrib/mw-to-git/t/Makefile31
-rw-r--r--contrib/mw-to-git/t/README124
-rwxr-xr-xcontrib/mw-to-git/t/install-wiki.sh55
-rw-r--r--contrib/mw-to-git/t/install-wiki/.gitignore1
-rw-r--r--contrib/mw-to-git/t/install-wiki/LocalSettings.php129
-rw-r--r--contrib/mw-to-git/t/install-wiki/db_install.php120
-rw-r--r--contrib/mw-to-git/t/push-pull-tests.sh144
-rwxr-xr-xcontrib/mw-to-git/t/t9360-mw-to-git-clone.sh257
-rwxr-xr-xcontrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh24
-rwxr-xr-xcontrib/mw-to-git/t/t9362-mw-to-git-utf8.sh347
-rwxr-xr-xcontrib/mw-to-git/t/t9363-mw-to-git-export-import.sh217
-rwxr-xr-xcontrib/mw-to-git/t/t9364-pull-by-rev.sh17
-rwxr-xr-xcontrib/mw-to-git/t/t9365-continuing-queries.sh23
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw-lib.sh432
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw.pl225
-rw-r--r--contrib/mw-to-git/t/test.config37
-rw-r--r--contrib/p4import/README1
-rw-r--r--contrib/p4import/git-p4import.py360
-rw-r--r--contrib/p4import/git-p4import.txt167
-rw-r--r--contrib/patches/docbook-xsl-manpages-charmap.patch21
-rw-r--r--contrib/persistent-https/LICENSE202
-rw-r--r--contrib/persistent-https/Makefile38
-rw-r--r--contrib/persistent-https/README62
-rw-r--r--contrib/persistent-https/client.go189
-rw-r--r--contrib/persistent-https/main.go82
-rw-r--r--contrib/persistent-https/proxy.go190
-rw-r--r--contrib/persistent-https/socket.go97
-rw-r--r--contrib/remote-helpers/README15
-rwxr-xr-xcontrib/remote-helpers/git-remote-bzr13
-rwxr-xr-xcontrib/remote-helpers/git-remote-hg13
-rwxr-xr-xcontrib/rerere-train.sh2
-rwxr-xr-xcontrib/stats/mailmap.pl96
-rw-r--r--contrib/subtree/.gitignore7
-rw-r--r--contrib/subtree/COPYING339
-rw-r--r--contrib/subtree/INSTALL28
-rw-r--r--contrib/subtree/Makefile81
-rw-r--r--contrib/subtree/README8
-rwxr-xr-xcontrib/subtree/git-subtree.sh737
-rw-r--r--contrib/subtree/git-subtree.txt351
-rw-r--r--contrib/subtree/t/Makefile69
-rwxr-xr-xcontrib/subtree/t/t7900-subtree.sh469
-rw-r--r--contrib/subtree/todo50
-rw-r--r--contrib/svn-fe/.gitignore4
-rw-r--r--contrib/svn-fe/Makefile105
-rw-r--r--contrib/svn-fe/svn-fe.c18
-rw-r--r--contrib/svn-fe/svn-fe.txt71
-rwxr-xr-xcontrib/svn-fe/svnrdump_sim.py68
-rwxr-xr-xcontrib/thunderbird-patch-inline/appp.sh18
-rw-r--r--contrib/vim/README32
-rwxr-xr-xcontrib/workdir/git-new-workdir57
140 files changed, 17119 insertions, 4398 deletions
diff --git a/contrib/blameview/README b/contrib/blameview/README
deleted file mode 100644
index fada5ce909..0000000000
--- a/contrib/blameview/README
+++ /dev/null
@@ -1,9 +0,0 @@
-This is a sample program to use 'git-blame --incremental', based
-on this message.
-
-From: Jeff King <peff@peff.net>
-Subject: Re: More precise tag following
-To: Linus Torvalds <torvalds@linux-foundation.org>
-Cc: git@vger.kernel.org
-Date: Sat, 27 Jan 2007 18:52:38 -0500
-Message-ID: <20070127235238.GA28706@coredump.intra.peff.net>
diff --git a/contrib/blameview/blameview.perl b/contrib/blameview/blameview.perl
deleted file mode 100755
index 1dec00137b..0000000000
--- a/contrib/blameview/blameview.perl
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/perl
-
-use Gtk2 -init;
-use Gtk2::SimpleList;
-
-my $hash;
-my $fn;
-if ( @ARGV == 1 ) {
- $hash = "HEAD";
- $fn = shift;
-} elsif ( @ARGV == 2 ) {
- $hash = shift;
- $fn = shift;
-} else {
- die "Usage blameview [<rev>] <filename>";
-}
-
-Gtk2::Rc->parse_string(<<'EOS');
-style "treeview_style"
-{
- GtkTreeView::vertical-separator = 0
-}
-class "GtkTreeView" style "treeview_style"
-EOS
-
-my $window = Gtk2::Window->new('toplevel');
-$window->signal_connect(destroy => sub { Gtk2->main_quit });
-my $vpan = Gtk2::VPaned->new();
-$window->add($vpan);
-my $scrolled_window = Gtk2::ScrolledWindow->new;
-$vpan->pack1($scrolled_window, 1, 1);
-my $fileview = Gtk2::SimpleList->new(
- 'Commit' => 'text',
- 'FileLine' => 'text',
- 'Data' => 'text'
-);
-$scrolled_window->add($fileview);
-$fileview->get_column(0)->set_spacing(0);
-$fileview->set_size_request(1024, 768);
-$fileview->set_rules_hint(1);
-$fileview->signal_connect (row_activated => sub {
- my ($sl, $path, $column) = @_;
- my $row_ref = $sl->get_row_data_from_path ($path);
- system("blameview @$row_ref[0]~1 $fn &");
- });
-
-my $commitwindow = Gtk2::ScrolledWindow->new();
-$commitwindow->set_policy ('GTK_POLICY_AUTOMATIC','GTK_POLICY_AUTOMATIC');
-$vpan->pack2($commitwindow, 1, 1);
-my $commit_text = Gtk2::TextView->new();
-my $commit_buffer = Gtk2::TextBuffer->new();
-$commit_text->set_buffer($commit_buffer);
-$commitwindow->add($commit_text);
-
-$fileview->signal_connect (cursor_changed => sub {
- my ($sl) = @_;
- my ($path, $focus_column) = $sl->get_cursor();
- my $row_ref = $sl->get_row_data_from_path ($path);
- my $c_fh;
- open($c_fh, '-|', "git cat-file commit @$row_ref[0]")
- or die "unable to find commit @$row_ref[0]";
- my @buffer = <$c_fh>;
- $commit_buffer->set_text("@buffer");
- close($c_fh);
- });
-
-my $fh;
-open($fh, '-|', "git cat-file blob $hash:$fn")
- or die "unable to open $fn: $!";
-
-while(<$fh>) {
- chomp;
- $fileview->{data}->[$.] = ['HEAD', "$fn:$.", $_];
-}
-
-my $blame;
-open($blame, '-|', qw(git blame --incremental --), $fn, $hash)
- or die "cannot start git-blame $fn";
-
-Glib::IO->add_watch(fileno($blame), 'in', \&read_blame_line);
-
-$window->show_all;
-Gtk2->main;
-exit 0;
-
-my %commitinfo = ();
-
-sub flush_blame_line {
- my ($attr) = @_;
-
- return unless defined $attr;
-
- my ($commit, $s_lno, $lno, $cnt) =
- @{$attr}{qw(COMMIT S_LNO LNO CNT)};
-
- my ($filename, $author, $author_time, $author_tz) =
- @{$commitinfo{$commit}}{qw(FILENAME AUTHOR AUTHOR-TIME AUTHOR-TZ)};
- my $info = $author . ' ' . format_time($author_time, $author_tz);
-
- for(my $i = 0; $i < $cnt; $i++) {
- @{$fileview->{data}->[$lno+$i-1]}[0,1,2] =
- (substr($commit, 0, 8), $filename . ':' . ($s_lno+$i));
- }
-}
-
-my $buf;
-my $current;
-sub read_blame_line {
-
- my $r = sysread($blame, $buf, 1024, length($buf));
- die "I/O error" unless defined $r;
-
- if ($r == 0) {
- flush_blame_line($current);
- $current = undef;
- return 0;
- }
-
- while ($buf =~ s/([^\n]*)\n//) {
- my $line = $1;
-
- if (($commit, $s_lno, $lno, $cnt) =
- ($line =~ /^([0-9a-f]{40}) (\d+) (\d+) (\d+)$/)) {
- flush_blame_line($current);
- $current = +{
- COMMIT => $1,
- S_LNO => $2,
- LNO => $3,
- CNT => $4,
- };
- next;
- }
-
- # extended attribute values
- if ($line =~ /^(author|author-mail|author-time|author-tz|committer|committer-mail|committer-time|committer-tz|summary|filename) (.*)$/) {
- my $commit = $current->{COMMIT};
- $commitinfo{$commit}{uc($1)} = $2;
- next;
- }
- }
- return 1;
-}
-
-sub format_time {
- my $time = shift;
- my $tz = shift;
-
- my $minutes = $tz < 0 ? 0-$tz : $tz;
- $minutes = ($minutes / 100)*60 + ($minutes % 100);
- $minutes = $tz < 0 ? 0-$minutes : $minutes;
- $time += $minutes * 60;
- my @t = gmtime($time);
- return sprintf('%04d-%02d-%02d %02d:%02d:%02d %s',
- $t[5] + 1900, @t[4,3,2,1,0], $tz);
-}
diff --git a/contrib/buildsystems/Generators/Vcproj.pm b/contrib/buildsystems/Generators/Vcproj.pm
index be94ba18d2..cfa74adcc2 100644
--- a/contrib/buildsystems/Generators/Vcproj.pm
+++ b/contrib/buildsystems/Generators/Vcproj.pm
@@ -178,6 +178,7 @@ sub createLibProject {
MinimalRebuild="true"
RuntimeLibrary="1"
UsePrecompiledHeader="0"
+ ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
WarningLevel="3"
DebugInformationFormat="3"
/>
@@ -244,6 +245,7 @@ sub createLibProject {
RuntimeLibrary="0"
EnableFunctionLevelLinking="true"
UsePrecompiledHeader="0"
+ ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
WarningLevel="3"
DebugInformationFormat="3"
/>
@@ -401,6 +403,7 @@ sub createAppProject {
MinimalRebuild="true"
RuntimeLibrary="1"
UsePrecompiledHeader="0"
+ ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
WarningLevel="3"
DebugInformationFormat="3"
/>
@@ -472,6 +475,7 @@ sub createAppProject {
RuntimeLibrary="0"
EnableFunctionLevelLinking="true"
UsePrecompiledHeader="0"
+ ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
WarningLevel="3"
DebugInformationFormat="3"
/>
diff --git a/contrib/buildsystems/engine.pl b/contrib/buildsystems/engine.pl
index 20bd061b3e..23da787dc5 100644..100755
--- a/contrib/buildsystems/engine.pl
+++ b/contrib/buildsystems/engine.pl
@@ -135,6 +135,11 @@ sub parseMakeOutput
}
} while($ate_next);
+ if ($text =~ /^test /) {
+ # options to test (eg -o) may be mistaken for linker options
+ next;
+ }
+
if($text =~ / -c /) {
# compilation
handleCompileLine($text, $line);
@@ -180,9 +185,6 @@ sub parseMakeOutput
# } elsif ($text =~ /generate-cmdlist\.sh/) {
# # command for generating list of commands
#
-# } elsif ($text =~ /^test / && $text =~ /|| rm -f /) {
-# # commands removing executables, if they exist
-#
# } elsif ($text =~ /new locations or Tcl/) {
# # command for detecting Tcl/Tk changes
#
@@ -315,6 +317,10 @@ sub handleLinkLine
$appout = shift @parts;
} elsif ("$part" eq "-lz") {
push(@libs, "zlib.lib");
+ } elsif ("$part" eq "-lcrypto") {
+ push(@libs, "libeay32.lib");
+ } elsif ("$part" eq "-lssl") {
+ push(@libs, "ssleay32.lib");
} elsif ($part =~ /^-/) {
push(@lflags, $part);
} elsif ($part =~ /\.(a|lib)$/) {
diff --git a/contrib/buildsystems/generate b/contrib/buildsystems/generate
index bc10f25ff2..bc10f25ff2 100644..100755
--- a/contrib/buildsystems/generate
+++ b/contrib/buildsystems/generate
diff --git a/contrib/buildsystems/parse.pl b/contrib/buildsystems/parse.pl
index c9656ece99..c9656ece99 100644..100755
--- a/contrib/buildsystems/parse.pl
+++ b/contrib/buildsystems/parse.pl
diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash
index d3fec32997..8cfee95f88 100755..100644
--- a/contrib/completion/git-completion.bash
+++ b/contrib/completion/git-completion.bash
@@ -1,6 +1,4 @@
-#!bash
-#
-# bash completion support for core Git.
+# bash/zsh completion support for core Git.
#
# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
# Conceptually based on gitcompletion (http://gitweb.hawaga.org.uk/).
@@ -13,52 +11,22 @@
# *) .git/remotes file names
# *) git 'subcommands'
# *) tree paths within 'ref:path/to/file' expressions
+# *) file paths within current working directory and index
# *) common --long-options
#
# To use these routines:
#
-# 1) Copy this file to somewhere (e.g. ~/.git-completion.sh).
-# 2) Added the following line to your .bashrc:
-# source ~/.git-completion.sh
-#
-# 3) You may want to make sure the git executable is available
-# in your PATH before this script is sourced, as some caching
-# is performed while the script loads. If git isn't found
-# at source time then all lookups will be done on demand,
-# which may be slightly slower.
-#
-# 4) Consider changing your PS1 to also show the current branch:
-# PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
-#
-# The argument to __git_ps1 will be displayed only if you
-# are currently in a git repository. The %s token will be
-# the name of the current branch.
-#
-# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty
-# value, unstaged (*) and staged (+) changes will be shown next
-# to the branch name. You can configure this per-repository
-# with the bash.showDirtyState variable, which defaults to true
-# once GIT_PS1_SHOWDIRTYSTATE is enabled.
-#
-# You can also see if currently something is stashed, by setting
-# GIT_PS1_SHOWSTASHSTATE to a nonempty value. If something is stashed,
-# then a '$' will be shown next to the branch name.
-#
-# If you would like to see if there're untracked files, then you can
-# set GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're
-# untracked files, then a '%' will be shown next to the branch name.
-#
-# To submit patches:
-#
-# *) Read Documentation/SubmittingPatches
-# *) Send all patches to the current maintainer:
-#
-# "Shawn O. Pearce" <spearce@spearce.org>
-#
-# *) Always CC the Git mailing list:
-#
-# git@vger.kernel.org
+# 1) Copy this file to somewhere (e.g. ~/.git-completion.bash).
+# 2) Add the following line to your .bashrc/.zshrc:
+# source ~/.git-completion.bash
+# 3) Consider changing your PS1 to also show the current branch,
+# see git-prompt.sh for details.
#
+# If you use complex aliases of form '!f() { ... }; f', you can use the null
+# command ':' as the first command in the function body to declare the desired
+# completion style. For example '!f() { : git commit ; ... }; f' will
+# tell the completion to use commit completion. This also works with aliases
+# of form "!sh -c '...'". For example, "!sh -c ': git commit ; ... '".
case "$COMP_WORDBREAKS" in
*:*) : great ;;
@@ -72,6 +40,9 @@ __gitdir ()
if [ -z "${1-}" ]; then
if [ -n "${__git_dir-}" ]; then
echo "$__git_dir"
+ elif [ -n "${GIT_DIR-}" ]; then
+ test -d "${GIT_DIR-}" || return 1
+ echo "$GIT_DIR"
elif [ -d .git ]; then
echo .git
else
@@ -84,202 +55,339 @@ __gitdir ()
fi
}
-# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
-# returns text to add to bash PS1 prompt (includes branch name)
-__git_ps1 ()
-{
- local g="$(__gitdir)"
- if [ -n "$g" ]; then
- local r
- local b
- if [ -f "$g/rebase-merge/interactive" ]; then
- r="|REBASE-i"
- b="$(cat "$g/rebase-merge/head-name")"
- elif [ -d "$g/rebase-merge" ]; then
- r="|REBASE-m"
- b="$(cat "$g/rebase-merge/head-name")"
- else
- if [ -d "$g/rebase-apply" ]; then
- if [ -f "$g/rebase-apply/rebasing" ]; then
- r="|REBASE"
- elif [ -f "$g/rebase-apply/applying" ]; then
- r="|AM"
- else
- r="|AM/REBASE"
- fi
- elif [ -f "$g/MERGE_HEAD" ]; then
- r="|MERGING"
- elif [ -f "$g/BISECT_LOG" ]; then
- r="|BISECTING"
- fi
-
- b="$(git symbolic-ref HEAD 2>/dev/null)" || {
-
- b="$(
- case "${GIT_PS1_DESCRIBE_STYLE-}" in
- (contains)
- git describe --contains HEAD ;;
- (branch)
- git describe --contains --all HEAD ;;
- (describe)
- git describe HEAD ;;
- (* | default)
- git describe --exact-match HEAD ;;
- esac 2>/dev/null)" ||
-
- b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." ||
- b="unknown"
- b="($b)"
- }
- fi
+# The following function is based on code from:
+#
+# bash_completion - programmable completion functions for bash 3.2+
+#
+# Copyright © 2006-2008, Ian Macdonald <ian@caliban.org>
+# © 2009-2010, Bash Completion Maintainers
+# <bash-completion-devel@lists.alioth.debian.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The latest version of this software can be obtained here:
+#
+# http://bash-completion.alioth.debian.org/
+#
+# RELEASE: 2.x
- local w
- local i
- local s
- local u
- local c
+# This function can be used to access a tokenized list of words
+# on the command line:
+#
+# __git_reassemble_comp_words_by_ref '=:'
+# if test "${words_[cword_-1]}" = -w
+# then
+# ...
+# fi
+#
+# The argument should be a collection of characters from the list of
+# word completion separators (COMP_WORDBREAKS) to treat as ordinary
+# characters.
+#
+# This is roughly equivalent to going back in time and setting
+# COMP_WORDBREAKS to exclude those characters. The intent is to
+# make option types like --date=<type> and <rev>:<path> easy to
+# recognize by treating each shell word as a single token.
+#
+# It is best not to set COMP_WORDBREAKS directly because the value is
+# shared with other completion scripts. By the time the completion
+# function gets called, COMP_WORDS has already been populated so local
+# changes to COMP_WORDBREAKS have no effect.
+#
+# Output: words_, cword_, cur_.
- if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then
- if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then
- c="BARE:"
- else
- b="GIT_DIR!"
- fi
- elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then
- if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ]; then
- if [ "$(git config --bool bash.showDirtyState)" != "false" ]; then
- git diff --no-ext-diff --ignore-submodules \
- --quiet --exit-code || w="*"
- if git rev-parse --quiet --verify HEAD >/dev/null; then
- git diff-index --cached --quiet \
- --ignore-submodules HEAD -- || i="+"
- else
- i="#"
- fi
- fi
+__git_reassemble_comp_words_by_ref()
+{
+ local exclude i j first
+ # Which word separators to exclude?
+ exclude="${1//[^$COMP_WORDBREAKS]}"
+ cword_=$COMP_CWORD
+ if [ -z "$exclude" ]; then
+ words_=("${COMP_WORDS[@]}")
+ return
+ fi
+ # List of word completion separators has shrunk;
+ # re-assemble words to complete.
+ for ((i=0, j=0; i < ${#COMP_WORDS[@]}; i++, j++)); do
+ # Append each nonempty word consisting of just
+ # word separator characters to the current word.
+ first=t
+ while
+ [ $i -gt 0 ] &&
+ [ -n "${COMP_WORDS[$i]}" ] &&
+ # word consists of excluded word separators
+ [ "${COMP_WORDS[$i]//[^$exclude]}" = "${COMP_WORDS[$i]}" ]
+ do
+ # Attach to the previous token,
+ # unless the previous token is the command name.
+ if [ $j -ge 2 ] && [ -n "$first" ]; then
+ ((j--))
fi
- if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then
- git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$"
+ first=
+ words_[$j]=${words_[j]}${COMP_WORDS[i]}
+ if [ $i = $COMP_CWORD ]; then
+ cword_=$j
fi
-
- if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ]; then
- if [ -n "$(git ls-files --others --exclude-standard)" ]; then
- u="%"
- fi
+ if (($i < ${#COMP_WORDS[@]} - 1)); then
+ ((i++))
+ else
+ # Done.
+ return
fi
+ done
+ words_[$j]=${words_[j]}${COMP_WORDS[i]}
+ if [ $i = $COMP_CWORD ]; then
+ cword_=$j
fi
+ done
+}
- if [ -n "${1-}" ]; then
- printf "$1" "$c${b##refs/heads/}$w$i$s$u$r"
- else
- printf " (%s)" "$c${b##refs/heads/}$w$i$s$u$r"
- fi
+if ! type _get_comp_words_by_ref >/dev/null 2>&1; then
+_get_comp_words_by_ref ()
+{
+ local exclude cur_ words_ cword_
+ if [ "$1" = "-n" ]; then
+ exclude=$2
+ shift 2
fi
+ __git_reassemble_comp_words_by_ref "$exclude"
+ cur_=${words_[cword_]}
+ while [ $# -gt 0 ]; do
+ case "$1" in
+ cur)
+ cur=$cur_
+ ;;
+ prev)
+ prev=${words_[$cword_-1]}
+ ;;
+ words)
+ words=("${words_[@]}")
+ ;;
+ cword)
+ cword=$cword_
+ ;;
+ esac
+ shift
+ done
}
+fi
-# __gitcomp_1 requires 2 arguments
-__gitcomp_1 ()
+__gitcompappend ()
{
- local c IFS=' '$'\t'$'\n'
- for c in $1; do
- case "$c$2" in
- --*=*) printf %s$'\n' "$c$2" ;;
- *.) printf %s$'\n' "$c$2" ;;
- *) printf %s$'\n' "$c$2 " ;;
- esac
+ local i=${#COMPREPLY[@]}
+ for x in $1; do
+ if [[ "$x" == "$3"* ]]; then
+ COMPREPLY[i++]="$2$x$4"
+ fi
done
}
-# __gitcomp accepts 1, 2, 3, or 4 arguments
-# generates completion reply with compgen
+__gitcompadd ()
+{
+ COMPREPLY=()
+ __gitcompappend "$@"
+}
+
+# Generates completion reply, appending a space to possible completion words,
+# if necessary.
+# It accepts 1 to 4 arguments:
+# 1: List of possible completion words.
+# 2: A prefix to be added to each possible completion word (optional).
+# 3: Generate possible completion matches for this word (optional).
+# 4: A suffix to be appended to each possible completion word (optional).
__gitcomp ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
- if [ $# -gt 2 ]; then
- cur="$3"
- fi
- case "$cur" in
+ local cur_="${3-$cur}"
+
+ case "$cur_" in
--*=)
- COMPREPLY=()
;;
*)
- local IFS=$'\n'
- COMPREPLY=($(compgen -P "${2-}" \
- -W "$(__gitcomp_1 "${1-}" "${4-}")" \
- -- "$cur"))
+ local c i=0 IFS=$' \t\n'
+ for c in $1; do
+ c="$c${4-}"
+ if [[ $c == "$cur_"* ]]; then
+ case $c in
+ --*=*|*.) ;;
+ *) c="$c " ;;
+ esac
+ COMPREPLY[i++]="${2-}$c"
+ fi
+ done
;;
esac
}
-# __git_heads accepts 0 or 1 arguments (to pass to __gitdir)
+# Variation of __gitcomp_nl () that appends to the existing list of
+# completion candidates, COMPREPLY.
+__gitcomp_nl_append ()
+{
+ local IFS=$'\n'
+ __gitcompappend "$1" "${2-}" "${3-$cur}" "${4- }"
+}
+
+# Generates completion reply from newline-separated possible completion words
+# by appending a space to all of them.
+# It accepts 1 to 4 arguments:
+# 1: List of possible completion words, separated by a single newline.
+# 2: A prefix to be added to each possible completion word (optional).
+# 3: Generate possible completion matches for this word (optional).
+# 4: A suffix to be appended to each possible completion word instead of
+# the default space (optional). If specified but empty, nothing is
+# appended.
+__gitcomp_nl ()
+{
+ COMPREPLY=()
+ __gitcomp_nl_append "$@"
+}
+
+# Generates completion reply with compgen from newline-separated possible
+# completion filenames.
+# It accepts 1 to 3 arguments:
+# 1: List of possible completion filenames, separated by a single newline.
+# 2: A directory prefix to be added to each possible completion filename
+# (optional).
+# 3: Generate possible completion matches for this word (optional).
+__gitcomp_file ()
+{
+ local IFS=$'\n'
+
+ # XXX does not work when the directory prefix contains a tilde,
+ # since tilde expansion is not applied.
+ # This means that COMPREPLY will be empty and Bash default
+ # completion will be used.
+ __gitcompadd "$1" "${2-}" "${3-$cur}" ""
+
+ # use a hack to enable file mode in bash < 4
+ compopt -o filenames +o nospace 2>/dev/null ||
+ compgen -f /non-existing-dir/ > /dev/null
+}
+
+# Execute 'git ls-files', unless the --committable option is specified, in
+# which case it runs 'git diff-index' to find out the files that can be
+# committed. It return paths relative to the directory specified in the first
+# argument, and using the options specified in the second argument.
+__git_ls_files_helper ()
+{
+ if [ "$2" == "--committable" ]; then
+ git -C "$1" diff-index --name-only --relative HEAD
+ else
+ # NOTE: $2 is not quoted in order to support multiple options
+ git -C "$1" ls-files --exclude-standard $2
+ fi 2>/dev/null
+}
+
+
+# __git_index_files accepts 1 or 2 arguments:
+# 1: Options to pass to ls-files (required).
+# 2: A directory path (optional).
+# If provided, only files within the specified directory are listed.
+# Sub directories are never recursed. Path must have a trailing
+# slash.
+__git_index_files ()
+{
+ local dir="$(__gitdir)" root="${2-.}" file
+
+ if [ -d "$dir" ]; then
+ __git_ls_files_helper "$root" "$1" |
+ while read -r file; do
+ case "$file" in
+ ?*/*) echo "${file%%/*}" ;;
+ *) echo "$file" ;;
+ esac
+ done | sort | uniq
+ fi
+}
+
__git_heads ()
{
- local cmd i is_hash=y dir="$(__gitdir "${1-}")"
+ local dir="$(__gitdir)"
if [ -d "$dir" ]; then
git --git-dir="$dir" for-each-ref --format='%(refname:short)' \
refs/heads
return
fi
- for i in $(git ls-remote "${1-}" 2>/dev/null); do
- case "$is_hash,$i" in
- y,*) is_hash=n ;;
- n,*^{}) is_hash=y ;;
- n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;;
- n,*) is_hash=y; echo "$i" ;;
- esac
- done
}
-# __git_tags accepts 0 or 1 arguments (to pass to __gitdir)
__git_tags ()
{
- local cmd i is_hash=y dir="$(__gitdir "${1-}")"
+ local dir="$(__gitdir)"
if [ -d "$dir" ]; then
git --git-dir="$dir" for-each-ref --format='%(refname:short)' \
refs/tags
return
fi
- for i in $(git ls-remote "${1-}" 2>/dev/null); do
- case "$is_hash,$i" in
- y,*) is_hash=n ;;
- n,*^{}) is_hash=y ;;
- n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}" ;;
- n,*) is_hash=y; echo "$i" ;;
- esac
- done
}
-# __git_refs accepts 0 or 1 arguments (to pass to __gitdir)
+# __git_refs accepts 0, 1 (to pass to __gitdir), or 2 arguments
+# presence of 2nd argument means use the guess heuristic employed
+# by checkout for tracking branches
__git_refs ()
{
- local i is_hash=y dir="$(__gitdir "${1-}")"
- local cur="${COMP_WORDS[COMP_CWORD]}" format refs
+ local i hash dir="$(__gitdir "${1-}")" track="${2-}"
+ local format refs
if [ -d "$dir" ]; then
case "$cur" in
refs|refs/*)
format="refname"
refs="${cur%/*}"
+ track=""
;;
*)
- if [ -e "$dir/HEAD" ]; then echo HEAD; fi
+ for i in HEAD FETCH_HEAD ORIG_HEAD MERGE_HEAD; do
+ if [ -e "$dir/$i" ]; then echo $i; fi
+ done
format="refname:short"
refs="refs/tags refs/heads refs/remotes"
;;
esac
git --git-dir="$dir" for-each-ref --format="%($format)" \
$refs
+ if [ -n "$track" ]; then
+ # employ the heuristic used by git checkout
+ # Try to find a remote branch that matches the completion word
+ # but only output if the branch name is unique
+ local ref entry
+ git --git-dir="$dir" for-each-ref --shell --format="ref=%(refname:short)" \
+ "refs/remotes/" | \
+ while read -r entry; do
+ eval "$entry"
+ ref="${ref#*/}"
+ if [[ "$ref" == "$cur"* ]]; then
+ echo "$ref"
+ fi
+ done | sort | uniq -u
+ fi
return
fi
- for i in $(git ls-remote "$dir" 2>/dev/null); do
- case "$is_hash,$i" in
- y,*) is_hash=n ;;
- n,*^{}) is_hash=y ;;
- n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}" ;;
- n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;;
- n,refs/remotes/*) is_hash=y; echo "${i#refs/remotes/}" ;;
- n,*) is_hash=y; echo "$i" ;;
- esac
- done
+ case "$cur" in
+ refs|refs/*)
+ git ls-remote "$dir" "$cur*" 2>/dev/null | \
+ while read -r hash i; do
+ case "$i" in
+ *^{}) ;;
+ *) echo "$i" ;;
+ esac
+ done
+ ;;
+ *)
+ echo "HEAD"
+ git for-each-ref --format="%(refname:short)" -- \
+ "refs/remotes/$dir/" 2>/dev/null | sed -e "s#^$dir/##"
+ ;;
+ esac
}
# __git_refs2 requires 1 argument (to pass to __git_refs)
@@ -294,42 +402,25 @@ __git_refs2 ()
# __git_refs_remotes requires 1 argument (to pass to ls-remote)
__git_refs_remotes ()
{
- local cmd i is_hash=y
- for i in $(git ls-remote "$1" 2>/dev/null); do
- case "$is_hash,$i" in
- n,refs/heads/*)
- is_hash=y
- echo "$i:refs/remotes/$1/${i#refs/heads/}"
- ;;
- y,*) is_hash=n ;;
- n,*^{}) is_hash=y ;;
- n,refs/tags/*) is_hash=y;;
- n,*) is_hash=y; ;;
- esac
+ local i hash
+ git ls-remote "$1" 'refs/heads/*' 2>/dev/null | \
+ while read -r hash i; do
+ echo "$i:refs/remotes/$1/${i#refs/heads/}"
done
}
__git_remotes ()
{
- local i ngoff IFS=$'\n' d="$(__gitdir)"
- shopt -q nullglob || ngoff=1
- shopt -s nullglob
- for i in "$d/remotes"/*; do
- echo ${i#$d/remotes/}
- done
- [ "$ngoff" ] && shopt -u nullglob
+ local i IFS=$'\n' d="$(__gitdir)"
+ test -d "$d/remotes" && ls -1 "$d/remotes"
for i in $(git --git-dir="$d" config --get-regexp 'remote\..*\.url' 2>/dev/null); do
i="${i#remote.}"
echo "${i/.url*/}"
done
}
-__git_merge_strategies ()
+__git_list_merge_strategies ()
{
- if [ -n "${__git_merge_strategylist-}" ]; then
- echo "$__git_merge_strategylist"
- return
- fi
git merge -s help 2>&1 |
sed -n -e '/[Aa]vailable strategies are: /,/^$/{
s/\.$//
@@ -339,36 +430,47 @@ __git_merge_strategies ()
p
}'
}
-__git_merge_strategylist=
-__git_merge_strategylist=$(__git_merge_strategies 2>/dev/null)
-__git_complete_file ()
+__git_merge_strategies=
+# 'git merge -s help' (and thus detection of the merge strategy
+# list) fails, unfortunately, if run outside of any git working
+# tree. __git_merge_strategies is set to the empty string in
+# that case, and the detection will be repeated the next time it
+# is needed.
+__git_compute_merge_strategies ()
{
- local pfx ls ref cur="${COMP_WORDS[COMP_CWORD]}"
- case "$cur" in
+ test -n "$__git_merge_strategies" ||
+ __git_merge_strategies=$(__git_list_merge_strategies)
+}
+
+__git_complete_revlist_file ()
+{
+ local pfx ls ref cur_="$cur"
+ case "$cur_" in
+ *..?*:*)
+ return
+ ;;
?*:*)
- ref="${cur%%:*}"
- cur="${cur#*:}"
- case "$cur" in
+ ref="${cur_%%:*}"
+ cur_="${cur_#*:}"
+ case "$cur_" in
?*/*)
- pfx="${cur%/*}"
- cur="${cur##*/}"
+ pfx="${cur_%/*}"
+ cur_="${cur_##*/}"
ls="$ref:$pfx"
pfx="$pfx/"
;;
*)
ls="$ref"
;;
- esac
+ esac
case "$COMP_WORDBREAKS" in
*:*) : great ;;
*) pfx="$ref:$pfx" ;;
esac
- local IFS=$'\n'
- COMPREPLY=($(compgen -P "$pfx" \
- -W "$(git --git-dir="$(__gitdir)" ls-tree "$ls" \
+ __gitcomp_nl "$(git --git-dir="$(__gitdir)" ls-tree "$ls" 2>/dev/null \
| sed '/^100... blob /{
s,^.* ,,
s,$, ,
@@ -382,91 +484,121 @@ __git_complete_file ()
s,$,/,
}
s/^.* //')" \
- -- "$cur"))
+ "$pfx" "$cur_" ""
+ ;;
+ *...*)
+ pfx="${cur_%...*}..."
+ cur_="${cur_#*...}"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
+ ;;
+ *..*)
+ pfx="${cur_%..*}.."
+ cur_="${cur_#*..}"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
;;
*)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
;;
esac
}
-__git_complete_revlist ()
+
+# __git_complete_index_file requires 1 argument:
+# 1: the options to pass to ls-file
+#
+# The exception is --committable, which finds the files appropriate commit.
+__git_complete_index_file ()
{
- local pfx cur="${COMP_WORDS[COMP_CWORD]}"
- case "$cur" in
- *...*)
- pfx="${cur%...*}..."
- cur="${cur#*...}"
- __gitcomp "$(__git_refs)" "$pfx" "$cur"
- ;;
- *..*)
- pfx="${cur%..*}.."
- cur="${cur#*..}"
- __gitcomp "$(__git_refs)" "$pfx" "$cur"
- ;;
- *)
- __gitcomp "$(__git_refs)"
+ local pfx="" cur_="$cur"
+
+ case "$cur_" in
+ ?*/*)
+ pfx="${cur_%/*}"
+ cur_="${cur_##*/}"
+ pfx="${pfx}/"
;;
esac
+
+ __gitcomp_file "$(__git_index_files "$1" ${pfx:+"$pfx"})" "$pfx" "$cur_"
+}
+
+__git_complete_file ()
+{
+ __git_complete_revlist_file
+}
+
+__git_complete_revlist ()
+{
+ __git_complete_revlist_file
}
__git_complete_remote_or_refspec ()
{
- local cmd="${COMP_WORDS[1]}"
- local cur="${COMP_WORDS[COMP_CWORD]}"
+ local cur_="$cur" cmd="${words[1]}"
local i c=2 remote="" pfx="" lhs=1 no_complete_refspec=0
- while [ $c -lt $COMP_CWORD ]; do
- i="${COMP_WORDS[c]}"
+ if [ "$cmd" = "remote" ]; then
+ ((c++))
+ fi
+ while [ $c -lt $cword ]; do
+ i="${words[c]}"
case "$i" in
- --all|--mirror) [ "$cmd" = "push" ] && no_complete_refspec=1 ;;
+ --mirror) [ "$cmd" = "push" ] && no_complete_refspec=1 ;;
+ --all)
+ case "$cmd" in
+ push) no_complete_refspec=1 ;;
+ fetch)
+ return
+ ;;
+ *) ;;
+ esac
+ ;;
-*) ;;
*) remote="$i"; break ;;
esac
- c=$((++c))
+ ((c++))
done
if [ -z "$remote" ]; then
- __gitcomp "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)"
return
fi
if [ $no_complete_refspec = 1 ]; then
- COMPREPLY=()
return
fi
[ "$remote" = "." ] && remote=
- case "$cur" in
+ case "$cur_" in
*:*)
case "$COMP_WORDBREAKS" in
*:*) : great ;;
- *) pfx="${cur%%:*}:" ;;
+ *) pfx="${cur_%%:*}:" ;;
esac
- cur="${cur#*:}"
+ cur_="${cur_#*:}"
lhs=0
;;
+*)
pfx="+"
- cur="${cur#+}"
+ cur_="${cur_#+}"
;;
esac
case "$cmd" in
fetch)
if [ $lhs = 1 ]; then
- __gitcomp "$(__git_refs2 "$remote")" "$pfx" "$cur"
+ __gitcomp_nl "$(__git_refs2 "$remote")" "$pfx" "$cur_"
else
- __gitcomp "$(__git_refs)" "$pfx" "$cur"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
fi
;;
- pull)
+ pull|remote)
if [ $lhs = 1 ]; then
- __gitcomp "$(__git_refs "$remote")" "$pfx" "$cur"
+ __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_"
else
- __gitcomp "$(__git_refs)" "$pfx" "$cur"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
fi
;;
push)
if [ $lhs = 1 ]; then
- __gitcomp "$(__git_refs)" "$pfx" "$cur"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
else
- __gitcomp "$(__git_refs "$remote")" "$pfx" "$cur"
+ __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_"
fi
;;
esac
@@ -474,29 +606,34 @@ __git_complete_remote_or_refspec ()
__git_complete_strategy ()
{
- case "${COMP_WORDS[COMP_CWORD-1]}" in
+ __git_compute_merge_strategies
+ case "$prev" in
-s|--strategy)
- __gitcomp "$(__git_merge_strategies)"
+ __gitcomp "$__git_merge_strategies"
return 0
esac
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--strategy=*)
- __gitcomp "$(__git_merge_strategies)" "" "${cur##--strategy=}"
+ __gitcomp "$__git_merge_strategies" "" "${cur##--strategy=}"
return 0
;;
esac
return 1
}
-__git_all_commands ()
-{
- if [ -n "${__git_all_commandlist-}" ]; then
- echo "$__git_all_commandlist"
- return
+__git_commands () {
+ if test -n "${GIT_TESTING_COMMAND_COMPLETION:-}"
+ then
+ printf "%s" "${GIT_TESTING_COMMAND_COMPLETION}"
+ else
+ git help -a|egrep '^ [a-zA-Z0-9]'
fi
+}
+
+__git_list_all_commands ()
+{
local i IFS=" "$'\n'
- for i in $(git help -a|egrep '^ [a-zA-Z0-9]')
+ for i in $(__git_commands)
do
case $i in
*--*) : helper pattern;;
@@ -504,17 +641,19 @@ __git_all_commands ()
esac
done
}
-__git_all_commandlist=
-__git_all_commandlist="$(__git_all_commands 2>/dev/null)"
-__git_porcelain_commands ()
+__git_all_commands=
+__git_compute_all_commands ()
+{
+ test -n "$__git_all_commands" ||
+ __git_all_commands=$(__git_list_all_commands)
+}
+
+__git_list_porcelain_commands ()
{
- if [ -n "${__git_porcelain_commandlist-}" ]; then
- echo "$__git_porcelain_commandlist"
- return
- fi
local i IFS=" "$'\n'
- for i in "help" $(__git_all_commands)
+ __git_compute_all_commands
+ for i in $__git_all_commands
do
case $i in
*--*) : helper pattern;;
@@ -523,10 +662,14 @@ __git_porcelain_commands ()
archimport) : import;;
cat-file) : plumbing;;
check-attr) : plumbing;;
+ check-ignore) : plumbing;;
+ check-mailmap) : plumbing;;
check-ref-format) : plumbing;;
checkout-index) : plumbing;;
commit-tree) : plumbing;;
count-objects) : infrequent;;
+ credential-cache) : credentials helper;;
+ credential-store) : credentials helper;;
cvsexportcommit) : export;;
cvsimport) : import;;
cvsserver) : daemon;;
@@ -545,7 +688,6 @@ __git_porcelain_commands ()
index-pack) : plumbing;;
init-db) : deprecated;;
local-fetch) : plumbing;;
- lost-found) : infrequent;;
ls-files) : plumbing;;
ls-remote) : plumbing;;
ls-tree) : plumbing;;
@@ -559,14 +701,12 @@ __git_porcelain_commands ()
pack-refs) : plumbing;;
parse-remote) : plumbing;;
patch-id) : plumbing;;
- peek-remote) : plumbing;;
prune) : plumbing;;
prune-packed) : plumbing;;
quiltimport) : import;;
read-tree) : plumbing;;
receive-pack) : plumbing;;
- reflog) : plumbing;;
- repo-config) : deprecated;;
+ remote-*) : transport;;
rerere) : plumbing;;
rev-list) : plumbing;;
rev-parse) : plumbing;;
@@ -579,7 +719,6 @@ __git_porcelain_commands ()
ssh-*) : transport;;
stripspace) : plumbing;;
symbolic-ref) : plumbing;;
- tar-tree) : deprecated;;
unpack-file) : plumbing;;
unpack-objects) : plumbing;;
update-index) : plumbing;;
@@ -595,8 +734,27 @@ __git_porcelain_commands ()
esac
done
}
-__git_porcelain_commandlist=
-__git_porcelain_commandlist="$(__git_porcelain_commands 2>/dev/null)"
+
+__git_porcelain_commands=
+__git_compute_porcelain_commands ()
+{
+ __git_compute_all_commands
+ test -n "$__git_porcelain_commands" ||
+ __git_porcelain_commands=$(__git_list_porcelain_commands)
+}
+
+__git_pretty_aliases ()
+{
+ local i IFS=$'\n'
+ for i in $(git --git-dir="$(__gitdir)" config --get-regexp "pretty\..*" 2>/dev/null); do
+ case "$i" in
+ pretty.*)
+ i="${i#pretty.}"
+ echo "${i/ */}"
+ ;;
+ esac
+ done
+}
__git_aliases ()
{
@@ -617,10 +775,23 @@ __git_aliased_command ()
local word cmdline=$(git --git-dir="$(__gitdir)" \
config --get "alias.$1")
for word in $cmdline; do
- if [ "${word##-*}" ]; then
- echo $word
+ case "$word" in
+ \!gitk|gitk)
+ echo "gitk"
return
- fi
+ ;;
+ \!*) : shell command alias ;;
+ -*) : option ;;
+ *=*) : setting env ;;
+ git) : git itself ;;
+ \(\)) : skip parens of shell function definition ;;
+ {) : skip start of shell helper function ;;
+ :) : skip null command ;;
+ \'*) : skip opening quote after sh -c ;;
+ *)
+ echo "$word"
+ return
+ esac
done
}
@@ -628,38 +799,74 @@ __git_aliased_command ()
__git_find_on_cmdline ()
{
local word subcommand c=1
-
- while [ $c -lt $COMP_CWORD ]; do
- word="${COMP_WORDS[c]}"
+ while [ $c -lt $cword ]; do
+ word="${words[c]}"
for subcommand in $1; do
if [ "$subcommand" = "$word" ]; then
echo "$subcommand"
return
fi
done
- c=$((++c))
+ ((c++))
done
}
__git_has_doubledash ()
{
local c=1
- while [ $c -lt $COMP_CWORD ]; do
- if [ "--" = "${COMP_WORDS[c]}" ]; then
+ while [ $c -lt $cword ]; do
+ if [ "--" = "${words[c]}" ]; then
return 0
fi
- c=$((++c))
+ ((c++))
done
return 1
}
+# Try to count non option arguments passed on the command line for the
+# specified git command.
+# When options are used, it is necessary to use the special -- option to
+# tell the implementation were non option arguments begin.
+# XXX this can not be improved, since options can appear everywhere, as
+# an example:
+# git mv x -n y
+#
+# __git_count_arguments requires 1 argument: the git command executed.
+__git_count_arguments ()
+{
+ local word i c=0
+
+ # Skip "git" (first argument)
+ for ((i=1; i < ${#words[@]}; i++)); do
+ word="${words[i]}"
+
+ case "$word" in
+ --)
+ # Good; we can assume that the following are only non
+ # option arguments.
+ ((c = 0))
+ ;;
+ "$1")
+ # Skip the specified git command and discard git
+ # main options
+ ((c = 0))
+ ;;
+ ?*)
+ ((c++))
+ ;;
+ esac
+ done
+
+ printf "%d" $c
+}
+
__git_whitespacelist="nowarn warn error error-all fix"
_git_am ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}" dir="$(__gitdir)"
+ local dir="$(__gitdir)"
if [ -d "$dir"/rebase-apply ]; then
- __gitcomp "--skip --resolved --abort"
+ __gitcomp "--skip --continue --resolved --abort"
return
fi
case "$cur" in
@@ -676,12 +883,10 @@ _git_am ()
"
return
esac
- COMPREPLY=()
}
_git_apply ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--whitespace=*)
__gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
@@ -697,14 +902,10 @@ _git_apply ()
"
return
esac
- COMPREPLY=()
}
_git_add ()
{
- __git_has_doubledash && return
-
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "
@@ -713,19 +914,20 @@ _git_add ()
"
return
esac
- COMPREPLY=()
+
+ # XXX should we check for --update and --all options ?
+ __git_complete_index_file "--others --modified --directory --no-empty-directory"
}
_git_archive ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--format=*)
__gitcomp "$(git archive --list)" "" "${cur##--format=}"
return
;;
--remote=*)
- __gitcomp "$(__git_remotes)" "" "${cur##--remote=}"
+ __gitcomp_nl "$(__git_remotes)" "" "${cur##--remote=}"
return
;;
--*)
@@ -746,16 +948,19 @@ _git_bisect ()
local subcommands="start bad good skip reset visualize replay log run"
local subcommand="$(__git_find_on_cmdline "$subcommands")"
if [ -z "$subcommand" ]; then
- __gitcomp "$subcommands"
+ if [ -f "$(__gitdir)"/BISECT_START ]; then
+ __gitcomp "$subcommands"
+ else
+ __gitcomp "replay start"
+ fi
return
fi
case "$subcommand" in
- bad|good|reset|skip)
- __gitcomp "$(__git_refs)"
+ bad|good|reset|skip|start)
+ __gitcomp_nl "$(__git_refs)"
;;
*)
- COMPREPLY=()
;;
esac
}
@@ -764,27 +969,32 @@ _git_branch ()
{
local i c=1 only_local_ref="n" has_r="n"
- while [ $c -lt $COMP_CWORD ]; do
- i="${COMP_WORDS[c]}"
+ while [ $c -lt $cword ]; do
+ i="${words[c]}"
case "$i" in
-d|-m) only_local_ref="y" ;;
-r) has_r="y" ;;
esac
- c=$((++c))
+ ((c++))
done
- case "${COMP_WORDS[COMP_CWORD]}" in
+ case "$cur" in
+ --set-upstream-to=*)
+ __gitcomp "$(__git_refs)" "" "${cur##--set-upstream-to=}"
+ ;;
--*)
__gitcomp "
--color --no-color --verbose --abbrev= --no-abbrev
--track --no-track --contains --merged --no-merged
+ --set-upstream-to= --edit-description --list
+ --unset-upstream
"
;;
*)
if [ $only_local_ref = "y" -a $has_r = "n" ]; then
- __gitcomp "$(__git_heads)"
+ __gitcomp_nl "$(__git_heads)"
else
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
fi
;;
esac
@@ -792,8 +1002,8 @@ _git_branch ()
_git_bundle ()
{
- local cmd="${COMP_WORDS[2]}"
- case "$COMP_CWORD" in
+ local cmd="${words[2]}"
+ case "$cword" in
2)
__gitcomp "create list-heads verify unbundle"
;;
@@ -814,7 +1024,6 @@ _git_checkout ()
{
__git_has_doubledash && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--conflict=*)
__gitcomp "diff3 merge" "" "${cur##--conflict=}"
@@ -822,11 +1031,17 @@ _git_checkout ()
--*)
__gitcomp "
--quiet --ours --theirs --track --no-track --merge
- --conflict= --patch
+ --conflict= --orphan --patch
"
;;
*)
- __gitcomp "$(__git_refs)"
+ # check if --track, --no-track, or --no-guess was specified
+ # if so, disable DWIM mode
+ local flags="--track --no-track --no-guess" track=1
+ if [ -n "$(__git_find_on_cmdline "$flags")" ]; then
+ track=''
+ fi
+ __gitcomp_nl "$(__git_refs '' $track)"
;;
esac
}
@@ -838,34 +1053,36 @@ _git_cherry ()
_git_cherry_pick ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
+ local dir="$(__gitdir)"
+ if [ -f "$dir"/CHERRY_PICK_HEAD ]; then
+ __gitcomp "--continue --quit --abort"
+ return
+ fi
case "$cur" in
--*)
- __gitcomp "--edit --no-commit"
+ __gitcomp "--edit --no-commit --signoff --strategy= --mainline"
;;
*)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
;;
esac
}
_git_clean ()
{
- __git_has_doubledash && return
-
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "--dry-run --quiet"
return
;;
esac
- COMPREPLY=()
+
+ # XXX should we check for -x option ?
+ __git_complete_index_file "--others --directory"
}
_git_clone ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "
@@ -881,33 +1098,61 @@ _git_clone ()
--upload-pack
--template=
--depth
+ --single-branch
+ --branch
"
return
;;
esac
- COMPREPLY=()
}
_git_commit ()
{
- __git_has_doubledash && return
+ case "$prev" in
+ -c|-C)
+ __gitcomp_nl "$(__git_refs)" "" "${cur}"
+ return
+ ;;
+ esac
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
+ --cleanup=*)
+ __gitcomp "default strip verbatim whitespace
+ " "" "${cur##--cleanup=}"
+ return
+ ;;
+ --reuse-message=*|--reedit-message=*|\
+ --fixup=*|--squash=*)
+ __gitcomp_nl "$(__git_refs)" "" "${cur#*=}"
+ return
+ ;;
+ --untracked-files=*)
+ __gitcomp "all no normal" "" "${cur##--untracked-files=}"
+ return
+ ;;
--*)
__gitcomp "
--all --author= --signoff --verify --no-verify
- --edit --amend --include --only --interactive
- --dry-run
+ --edit --no-edit
+ --amend --include --only --interactive
+ --dry-run --reuse-message= --reedit-message=
+ --reset-author --file= --message= --template=
+ --cleanup= --untracked-files --untracked-files=
+ --verbose --quiet --fixup= --squash=
"
return
esac
- COMPREPLY=()
+
+ if git rev-parse --verify --quiet HEAD >/dev/null; then
+ __git_complete_index_file "--committable"
+ else
+ # This is the first commit
+ __git_complete_index_file "--cached"
+ fi
}
_git_describe ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "
@@ -916,71 +1161,87 @@ _git_describe ()
"
return
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
+__git_diff_algorithms="myers minimal patience histogram"
+
__git_diff_common_options="--stat --numstat --shortstat --summary
--patch-with-stat --name-only --name-status --color
--no-color --color-words --no-renames --check
--full-index --binary --abbrev --diff-filter=
--find-copies-harder
--text --ignore-space-at-eol --ignore-space-change
- --ignore-all-space --exit-code --quiet --ext-diff
- --no-ext-diff
+ --ignore-all-space --ignore-blank-lines --exit-code
+ --quiet --ext-diff --no-ext-diff
--no-prefix --src-prefix= --dst-prefix=
--inter-hunk-context=
- --patience
- --raw
+ --patience --histogram --minimal
+ --raw --word-diff
--dirstat --dirstat= --dirstat-by-file
--dirstat-by-file= --cumulative
+ --diff-algorithm=
"
_git_diff ()
{
__git_has_doubledash && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
+ --diff-algorithm=*)
+ __gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}"
+ return
+ ;;
--*)
__gitcomp "--cached --staged --pickaxe-all --pickaxe-regex
- --base --ours --theirs
+ --base --ours --theirs --no-index
$__git_diff_common_options
"
return
;;
esac
- __git_complete_file
+ __git_complete_revlist_file
}
-__git_mergetools_common="diffuse ecmerge emerge kdiff3 meld opendiff
- tkdiff vimdiff gvimdiff xxdiff araxis
+__git_mergetools_common="diffuse diffmerge ecmerge emerge kdiff3 meld opendiff
+ tkdiff vimdiff gvimdiff xxdiff araxis p4merge bc codecompare
"
_git_difftool ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
+ __git_has_doubledash && return
+
case "$cur" in
--tool=*)
__gitcomp "$__git_mergetools_common kompare" "" "${cur##--tool=}"
return
;;
--*)
- __gitcomp "--tool="
+ __gitcomp "--cached --staged --pickaxe-all --pickaxe-regex
+ --base --ours --theirs
+ --no-renames --diff-filter= --find-copies-harder
+ --relative --ignore-submodules
+ --tool="
return
;;
esac
- COMPREPLY=()
+ __git_complete_revlist_file
}
+__git_fetch_recurse_submodules="yes on-demand no"
+
__git_fetch_options="
--quiet --verbose --append --upload-pack --force --keep --depth=
- --tags --no-tags
+ --tags --no-tags --all --prune --dry-run --recurse-submodules=
"
_git_fetch ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
+ --recurse-submodules=*)
+ __gitcomp "$__git_fetch_recurse_submodules" "" "${cur##--recurse-submodules=}"
+ return
+ ;;
--*)
__gitcomp "$__git_fetch_options"
return
@@ -989,9 +1250,17 @@ _git_fetch ()
__git_complete_remote_or_refspec
}
+__git_format_patch_options="
+ --stdout --attach --no-attach --thread --thread= --no-thread
+ --numbered --start-number --numbered-files --keep-subject --signoff
+ --signature --no-signature --in-reply-to= --cc= --full-index --binary
+ --not --all --cover-letter --no-prefix --src-prefix= --dst-prefix=
+ --inline --suffix= --ignore-if-in-upstream --subject-prefix=
+ --output-directory --reroll-count --to= --quiet --notes
+"
+
_git_format_patch ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--thread=*)
__gitcomp "
@@ -1000,21 +1269,7 @@ _git_format_patch ()
return
;;
--*)
- __gitcomp "
- --stdout --attach --no-attach --thread --thread=
- --output-directory
- --numbered --start-number
- --numbered-files
- --keep-subject
- --signoff
- --in-reply-to= --cc=
- --full-index --binary
- --not --all
- --cover-letter
- --no-prefix --src-prefix= --dst-prefix=
- --inline --suffix= --ignore-if-in-upstream
- --subject-prefix=
- "
+ __gitcomp "$__git_format_patch_options"
return
;;
esac
@@ -1023,7 +1278,6 @@ _git_format_patch ()
_git_fsck ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "
@@ -1033,33 +1287,39 @@ _git_fsck ()
return
;;
esac
- COMPREPLY=()
}
_git_gc ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "--prune --aggressive"
return
;;
esac
- COMPREPLY=()
+}
+
+_git_gitk ()
+{
+ _gitk
+}
+
+__git_match_ctag() {
+ awk "/^${1////\\/}/ { print \$1 }" "$2"
}
_git_grep ()
{
__git_has_doubledash && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "
--cached
--text --ignore-case --word-regexp --invert-match
- --full-name
+ --full-name --line-number
--extended-regexp --basic-regexp --fixed-strings
+ --perl-regexp
--files-with-matches --name-only
--files-without-match
--max-depth
@@ -1070,29 +1330,37 @@ _git_grep ()
;;
esac
- __gitcomp "$(__git_refs)"
+ case "$cword,$prev" in
+ 2,*|*,-*)
+ if test -r tags; then
+ __gitcomp_nl "$(__git_match_ctag "$cur" tags)"
+ return
+ fi
+ ;;
+ esac
+
+ __gitcomp_nl "$(__git_refs)"
}
_git_help ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "--all --info --man --web"
return
;;
esac
- __gitcomp "$(__git_all_commands)
+ __git_compute_all_commands
+ __gitcomp "$__git_all_commands $(__git_aliases)
attributes cli core-tutorial cvs-migration
diffcore gitk glossary hooks ignore modules
- repository-layout tutorial tutorial-2
+ namespaces repository-layout tutorial tutorial-2
workflows
"
}
_git_init ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--shared=*)
__gitcomp "
@@ -1105,14 +1373,10 @@ _git_init ()
return
;;
esac
- COMPREPLY=()
}
_git_ls_files ()
{
- __git_has_doubledash && return
-
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "--cached --deleted --modified --others --ignored
@@ -1125,12 +1389,15 @@ _git_ls_files ()
return
;;
esac
- COMPREPLY=()
+
+ # XXX ignore options like --modified and always suggest all cached
+ # files.
+ __git_complete_index_file "--cached"
}
_git_ls_remote ()
{
- __gitcomp "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)"
}
_git_ls_tree ()
@@ -1146,12 +1413,14 @@ __git_log_common_options="
--max-count=
--max-age= --since= --after=
--min-age= --until= --before=
+ --min-parents= --max-parents=
+ --no-min-parents --no-max-parents
"
# Options that go well for log and gitk (not shortlog)
__git_log_gitk_options="
--dense --sparse --full-history
--simplify-merges --simplify-by-decoration
- --left-right
+ --left-right --notes --no-notes
"
# Options that go well for log and shortlog (not gitk)
__git_log_shortlog_options="
@@ -1166,21 +1435,15 @@ _git_log ()
{
__git_has_doubledash && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
local g="$(git rev-parse --git-dir 2>/dev/null)"
local merge=""
if [ -f "$g/MERGE_HEAD" ]; then
merge="--merge"
fi
case "$cur" in
- --pretty=*)
- __gitcomp "$__git_log_pretty_formats
- " "" "${cur##--pretty=}"
- return
- ;;
- --format=*)
- __gitcomp "$__git_log_pretty_formats
- " "" "${cur##--format=}"
+ --pretty=*|--format=*)
+ __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
+ " "" "${cur#*=}"
return
;;
--date=*)
@@ -1201,6 +1464,7 @@ _git_log ()
--abbrev-commit --abbrev=
--relative-date --date=
--pretty= --format= --oneline
+ --show-signature
--cherry-pick
--graph
--decorate --decorate=
@@ -1216,27 +1480,29 @@ _git_log ()
__git_complete_revlist
}
+# Common merge options shared by git-merge(1) and git-pull(1).
__git_merge_options="
--no-commit --no-stat --log --no-log --squash --strategy
- --commit --stat --no-squash --ff --no-ff
+ --commit --stat --no-squash --ff --no-ff --ff-only --edit --no-edit
+ --verify-signatures --no-verify-signatures --gpg-sign
+ --quiet --verbose --progress --no-progress
"
_git_merge ()
{
__git_complete_strategy && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
- __gitcomp "$__git_merge_options"
+ __gitcomp "$__git_merge_options
+ --rerere-autoupdate --no-rerere-autoupdate --abort"
return
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_mergetool ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--tool=*)
__gitcomp "$__git_mergetools_common tortoisemerge" "" "${cur##--tool=}"
@@ -1247,24 +1513,35 @@ _git_mergetool ()
return
;;
esac
- COMPREPLY=()
}
_git_merge_base ()
{
- __gitcomp "$(__git_refs)"
+ case "$cur" in
+ --*)
+ __gitcomp "--octopus --independent --is-ancestor --fork-point"
+ return
+ ;;
+ esac
+ __gitcomp_nl "$(__git_refs)"
}
_git_mv ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "--dry-run"
return
;;
esac
- COMPREPLY=()
+
+ if [ $(__git_count_arguments "mv") -gt 0 ]; then
+ # We need to show both cached and untracked files (including
+ # empty directories) since this may not be the last argument.
+ __git_complete_index_file "--cached --others --directory"
+ else
+ __git_complete_index_file "--cached"
+ fi
}
_git_name_rev ()
@@ -1272,12 +1549,62 @@ _git_name_rev ()
__gitcomp "--tags --all --stdin"
}
+_git_notes ()
+{
+ local subcommands='add append copy edit list prune remove show'
+ local subcommand="$(__git_find_on_cmdline "$subcommands")"
+
+ case "$subcommand,$cur" in
+ ,--*)
+ __gitcomp '--ref'
+ ;;
+ ,*)
+ case "$prev" in
+ --ref)
+ __gitcomp_nl "$(__git_refs)"
+ ;;
+ *)
+ __gitcomp "$subcommands --ref"
+ ;;
+ esac
+ ;;
+ add,--reuse-message=*|append,--reuse-message=*|\
+ add,--reedit-message=*|append,--reedit-message=*)
+ __gitcomp_nl "$(__git_refs)" "" "${cur#*=}"
+ ;;
+ add,--*|append,--*)
+ __gitcomp '--file= --message= --reedit-message=
+ --reuse-message='
+ ;;
+ copy,--*)
+ __gitcomp '--stdin'
+ ;;
+ prune,--*)
+ __gitcomp '--dry-run --verbose'
+ ;;
+ prune,*)
+ ;;
+ *)
+ case "$prev" in
+ -m|-F)
+ ;;
+ *)
+ __gitcomp_nl "$(__git_refs)"
+ ;;
+ esac
+ ;;
+ esac
+}
+
_git_pull ()
{
__git_complete_strategy && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
+ --recurse-submodules=*)
+ __gitcomp "$__git_fetch_recurse_submodules" "" "${cur##--recurse-submodules=}"
+ return
+ ;;
--*)
__gitcomp "
--rebase --no-rebase
@@ -1290,23 +1617,55 @@ _git_pull ()
__git_complete_remote_or_refspec
}
+__git_push_recurse_submodules="check on-demand"
+
+__git_complete_force_with_lease ()
+{
+ local cur_=$1
+
+ case "$cur_" in
+ --*=)
+ ;;
+ *:*)
+ __gitcomp_nl "$(__git_refs)" "" "${cur_#*:}"
+ ;;
+ *)
+ __gitcomp_nl "$(__git_refs)" "" "$cur_"
+ ;;
+ esac
+}
+
_git_push ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
- case "${COMP_WORDS[COMP_CWORD-1]}" in
+ case "$prev" in
--repo)
- __gitcomp "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)"
return
+ ;;
+ --recurse-submodules)
+ __gitcomp "$__git_push_recurse_submodules"
+ return
+ ;;
esac
case "$cur" in
--repo=*)
- __gitcomp "$(__git_remotes)" "" "${cur##--repo=}"
+ __gitcomp_nl "$(__git_remotes)" "" "${cur##--repo=}"
+ return
+ ;;
+ --recurse-submodules=*)
+ __gitcomp "$__git_push_recurse_submodules" "" "${cur##--recurse-submodules=}"
+ return
+ ;;
+ --force-with-lease=*)
+ __git_complete_force_with_lease "${cur##--force-with-lease=}"
return
;;
--*)
__gitcomp "
--all --mirror --tags --dry-run --force --verbose
- --receive-pack= --repo=
+ --quiet --prune --delete --follow-tags
+ --receive-pack= --repo= --set-upstream
+ --force-with-lease --force-with-lease= --recurse-submodules=
"
return
;;
@@ -1316,18 +1675,42 @@ _git_push ()
_git_rebase ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}" dir="$(__gitdir)"
+ local dir="$(__gitdir)"
if [ -d "$dir"/rebase-apply ] || [ -d "$dir"/rebase-merge ]; then
__gitcomp "--continue --skip --abort"
return
fi
__git_complete_strategy && return
case "$cur" in
+ --whitespace=*)
+ __gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
+ return
+ ;;
--*)
- __gitcomp "--onto --merge --strategy --interactive"
+ __gitcomp "
+ --onto --merge --strategy --interactive
+ --preserve-merges --stat --no-stat
+ --committer-date-is-author-date --ignore-date
+ --ignore-whitespace --whitespace=
+ --autosquash --fork-point --no-fork-point
+ --autostash
+ "
+
return
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
+}
+
+_git_reflog ()
+{
+ local subcommands="show delete expire"
+ local subcommand="$(__git_find_on_cmdline "$subcommands")"
+
+ if [ -z "$subcommand" ]; then
+ __gitcomp "$subcommands"
+ else
+ __gitcomp_nl "$(__git_refs)"
+ fi
}
__git_send_email_confirm_options="always never auto cc compose"
@@ -1335,7 +1718,6 @@ __git_send_email_suppresscc_options="author self cc bodycc sob cccmd body all"
_git_send_email ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--confirm=*)
__gitcomp "
@@ -1354,6 +1736,12 @@ _git_send_email ()
__gitcomp "ssl tls" "" "${cur##--smtp-encryption=}"
return
;;
+ --thread=*)
+ __gitcomp "
+ deep shallow
+ " "" "${cur##--thread=}"
+ return
+ ;;
--*)
__gitcomp "--annotate --bcc --cc --cc-cmd --chain-reply-to
--compose --confirm= --dry-run --envelope-sender
@@ -1363,20 +1751,26 @@ _git_send_email ()
--signed-off-by-cc --smtp-pass --smtp-server
--smtp-server-port --smtp-encryption= --smtp-user
--subject --suppress-cc= --suppress-from --thread --to
- --validate --no-validate"
+ --validate --no-validate
+ $__git_format_patch_options"
return
;;
esac
- COMPREPLY=()
+ __git_complete_revlist
+}
+
+_git_stage ()
+{
+ _git_add
}
__git_config_get_set_variables ()
{
- local prevword word config_file= c=$COMP_CWORD
+ local prevword word config_file= c=$cword
while [ $c -gt 1 ]; do
- word="${COMP_WORDS[c]}"
+ word="${words[c]}"
case "$word" in
- --global|--system|--file=*)
+ --system|--global|--local|--file=*)
config_file="$word"
break
;;
@@ -1390,7 +1784,7 @@ __git_config_get_set_variables ()
done
git --git-dir="$(__gitdir)" config $config_file --list 2>/dev/null |
- while read line
+ while read -r line
do
case "$line" in
*.*=*)
@@ -1402,33 +1796,44 @@ __git_config_get_set_variables ()
_git_config ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
- local prv="${COMP_WORDS[COMP_CWORD-1]}"
- case "$prv" in
- branch.*.remote)
- __gitcomp "$(__git_remotes)"
+ case "$prev" in
+ branch.*.remote|branch.*.pushremote)
+ __gitcomp_nl "$(__git_remotes)"
return
;;
branch.*.merge)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
+ return
+ ;;
+ branch.*.rebase)
+ __gitcomp "false true"
+ return
+ ;;
+ remote.pushdefault)
+ __gitcomp_nl "$(__git_remotes)"
return
;;
remote.*.fetch)
- local remote="${prv#remote.}"
+ local remote="${prev#remote.}"
remote="${remote%.fetch}"
- __gitcomp "$(__git_refs_remotes "$remote")"
+ if [ -z "$cur" ]; then
+ __gitcomp_nl "refs/heads/" "" "" ""
+ return
+ fi
+ __gitcomp_nl "$(__git_refs_remotes "$remote")"
return
;;
remote.*.push)
- local remote="${prv#remote.}"
+ local remote="${prev#remote.}"
remote="${remote%.push}"
- __gitcomp "$(git --git-dir="$(__gitdir)" \
+ __gitcomp_nl "$(git --git-dir="$(__gitdir)" \
for-each-ref --format='%(refname):%(refname)' \
refs/heads)"
return
;;
pull.twohead|pull.octopus)
- __gitcomp "$(__git_merge_strategies)"
+ __git_compute_merge_strategies
+ __gitcomp "$__git_merge_strategies"
return
;;
color.branch|color.diff|color.interactive|\
@@ -1447,6 +1852,10 @@ _git_config ()
"
return
;;
+ diff.submodule)
+ __gitcomp "log short"
+ return
+ ;;
help.format)
__gitcomp "man info web html"
return
@@ -1467,19 +1876,22 @@ _git_config ()
__gitcomp "$__git_send_email_suppresscc_options"
return
;;
+ sendemail.transferencoding)
+ __gitcomp "7bit 8bit quoted-printable base64"
+ return
+ ;;
--get|--get-all|--unset|--unset-all)
- __gitcomp "$(__git_config_get_set_variables)"
+ __gitcomp_nl "$(__git_config_get_set_variables)"
return
;;
*.*)
- COMPREPLY=()
return
;;
esac
case "$cur" in
--*)
__gitcomp "
- --global --system --file=
+ --system --global --local --file=
--list --replace-all
--get --get-all --get-regexp
--add --unset --unset-all
@@ -1488,97 +1900,110 @@ _git_config ()
return
;;
branch.*.*)
- local pfx="${cur%.*}."
- cur="${cur##*.}"
- __gitcomp "remote merge mergeoptions rebase" "$pfx" "$cur"
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "remote pushremote merge mergeoptions rebase" "$pfx" "$cur_"
return
;;
branch.*)
- local pfx="${cur%.*}."
- cur="${cur#*.}"
- __gitcomp "$(__git_heads)" "$pfx" "$cur" "."
+ local pfx="${cur%.*}." cur_="${cur#*.}"
+ __gitcomp_nl "$(__git_heads)" "$pfx" "$cur_" "."
+ __gitcomp_nl_append $'autosetupmerge\nautosetuprebase\n' "$pfx" "$cur_"
return
;;
guitool.*.*)
- local pfx="${cur%.*}."
- cur="${cur##*.}"
+ local pfx="${cur%.*}." cur_="${cur##*.}"
__gitcomp "
argprompt cmd confirm needsfile noconsole norescan
prompt revprompt revunmerged title
- " "$pfx" "$cur"
+ " "$pfx" "$cur_"
return
;;
difftool.*.*)
- local pfx="${cur%.*}."
- cur="${cur##*.}"
- __gitcomp "cmd path" "$pfx" "$cur"
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "cmd path" "$pfx" "$cur_"
return
;;
man.*.*)
- local pfx="${cur%.*}."
- cur="${cur##*.}"
- __gitcomp "cmd path" "$pfx" "$cur"
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "cmd path" "$pfx" "$cur_"
return
;;
mergetool.*.*)
- local pfx="${cur%.*}."
- cur="${cur##*.}"
- __gitcomp "cmd path trustExitCode" "$pfx" "$cur"
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "cmd path trustExitCode" "$pfx" "$cur_"
return
;;
pager.*)
- local pfx="${cur%.*}."
- cur="${cur#*.}"
- __gitcomp "$(__git_all_commands)" "$pfx" "$cur"
+ local pfx="${cur%.*}." cur_="${cur#*.}"
+ __git_compute_all_commands
+ __gitcomp_nl "$__git_all_commands" "$pfx" "$cur_"
return
;;
remote.*.*)
- local pfx="${cur%.*}."
- cur="${cur##*.}"
+ local pfx="${cur%.*}." cur_="${cur##*.}"
__gitcomp "
url proxy fetch push mirror skipDefaultUpdate
receivepack uploadpack tagopt pushurl
- " "$pfx" "$cur"
+ " "$pfx" "$cur_"
return
;;
remote.*)
- local pfx="${cur%.*}."
- cur="${cur#*.}"
- __gitcomp "$(__git_remotes)" "$pfx" "$cur" "."
+ local pfx="${cur%.*}." cur_="${cur#*.}"
+ __gitcomp_nl "$(__git_remotes)" "$pfx" "$cur_" "."
+ __gitcomp_nl_append "pushdefault" "$pfx" "$cur_"
return
;;
url.*.*)
- local pfx="${cur%.*}."
- cur="${cur##*.}"
- __gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur"
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur_"
return
;;
esac
__gitcomp "
- add.ignore-errors
+ add.ignoreErrors
+ advice.commitBeforeMerge
+ advice.detachedHead
+ advice.implicitIdentity
+ advice.pushNonFastForward
+ advice.resolveConflict
+ advice.statusHints
alias.
+ am.keepcr
apply.ignorewhitespace
apply.whitespace
branch.autosetupmerge
branch.autosetuprebase
+ browser.
clean.requireForce
color.branch
color.branch.current
color.branch.local
color.branch.plain
color.branch.remote
+ color.decorate.HEAD
+ color.decorate.branch
+ color.decorate.remoteBranch
+ color.decorate.stash
+ color.decorate.tag
color.diff
color.diff.commit
color.diff.frag
+ color.diff.func
color.diff.meta
color.diff.new
color.diff.old
color.diff.plain
color.diff.whitespace
color.grep
- color.grep.external
+ color.grep.context
+ color.grep.filename
+ color.grep.function
+ color.grep.linenumber
color.grep.match
+ color.grep.selected
+ color.grep.separator
color.interactive
+ color.interactive.error
color.interactive.header
color.interactive.help
color.interactive.prompt
@@ -1592,21 +2017,28 @@ _git_config ()
color.status.untracked
color.status.updated
color.ui
+ commit.status
commit.template
+ core.abbrev
+ core.askpass
+ core.attributesfile
core.autocrlf
core.bare
+ core.bigFileThreshold
core.compression
core.createObject
core.deltaBaseCacheLimit
core.editor
+ core.eol
core.excludesfile
core.fileMode
core.fsyncobjectfiles
core.gitProxy
- core.ignoreCygwinFSTricks
core.ignoreStat
+ core.ignorecase
core.logAllRefUpdates
core.loosecompression
+ core.notesRef
core.packedGitLimit
core.packedGitWindowSize
core.pager
@@ -1616,6 +2048,7 @@ _git_config ()
core.repositoryFormatVersion
core.safecrlf
core.sharedRepository
+ core.sparseCheckout
core.symlinks
core.trustctime
core.warnAmbiguousRefs
@@ -1623,25 +2056,34 @@ _git_config ()
core.worktree
diff.autorefreshindex
diff.external
+ diff.ignoreSubmodules
diff.mnemonicprefix
+ diff.noprefix
diff.renameLimit
- diff.renameLimit.
diff.renames
+ diff.statGraphWidth
+ diff.submodule
diff.suppressBlankEmpty
diff.tool
diff.wordRegex
+ diff.algorithm
difftool.
difftool.prompt
+ fetch.recurseSubmodules
fetch.unpackLimit
format.attach
format.cc
+ format.coverLetter
format.headers
format.numbered
format.pretty
+ format.signature
format.signoff
format.subjectprefix
format.suffix
format.thread
+ format.to
+ gc.
gc.aggressiveWindow
gc.auto
gc.autopacklimit
@@ -1679,15 +2121,20 @@ _git_config ()
http.lowSpeedLimit
http.lowSpeedTime
http.maxRequests
+ http.minSessions
http.noEPSV
+ http.postBuffer
http.proxy
http.sslCAInfo
http.sslCAPath
http.sslCert
+ http.sslCertPasswordProtected
http.sslKey
http.sslVerify
+ http.useragent
i18n.commitEncoding
i18n.logOutputEncoding
+ imap.authMethod
imap.folder
imap.host
imap.pass
@@ -1696,6 +2143,7 @@ _git_config ()
imap.sslverify
imap.tunnel
imap.user
+ init.templatedir
instaweb.browser
instaweb.httpd
instaweb.local
@@ -1703,19 +2151,29 @@ _git_config ()
instaweb.port
interactive.singlekey
log.date
+ log.decorate
log.showroot
mailmap.file
man.
man.viewer
+ merge.
merge.conflictstyle
merge.log
merge.renameLimit
+ merge.renormalize
merge.stat
merge.tool
merge.verbosity
mergetool.
mergetool.keepBackup
+ mergetool.keepTemporaries
mergetool.prompt
+ notes.displayRef
+ notes.rewrite.
+ notes.rewrite.amend
+ notes.rewrite.rebase
+ notes.rewriteMode
+ notes.rewriteRef
pack.compression
pack.deltaCacheLimit
pack.deltaCacheSize
@@ -1726,31 +2184,43 @@ _git_config ()
pack.window
pack.windowMemory
pager.
+ pretty.
pull.octopus
pull.twohead
push.default
+ rebase.autosquash
rebase.stat
+ receive.autogc
receive.denyCurrentBranch
+ receive.denyDeleteCurrent
receive.denyDeletes
receive.denyNonFastForwards
receive.fsckObjects
receive.unpackLimit
+ receive.updateserverinfo
+ remote.pushdefault
+ remotes.
repack.usedeltabaseoffset
rerere.autoupdate
rerere.enabled
+ sendemail.
sendemail.aliasesfile
- sendemail.aliasesfiletype
+ sendemail.aliasfiletype
sendemail.bcc
sendemail.cc
sendemail.cccmd
sendemail.chainreplyto
sendemail.confirm
sendemail.envelopesender
+ sendemail.from
+ sendemail.identity
sendemail.multiedit
sendemail.signedoffbycc
+ sendemail.smtpdomain
sendemail.smtpencryption
sendemail.smtppass
sendemail.smtpserver
+ sendemail.smtpserveroption
sendemail.smtpserverport
sendemail.smtpuser
sendemail.suppresscc
@@ -1761,6 +2231,8 @@ _git_config ()
showbranch.default
status.relativePaths
status.showUntrackedFiles
+ status.submodulesummary
+ submodule.
tar.umask
transfer.unpackLimit
url.
@@ -1774,7 +2246,7 @@ _git_config ()
_git_remote ()
{
- local subcommands="add rename rm show prune update set-head"
+ local subcommands="add rename remove set-head set-branches set-url show prune update"
local subcommand="$(__git_find_on_cmdline "$subcommands")"
if [ -z "$subcommand" ]; then
__gitcomp "$subcommands"
@@ -1782,8 +2254,11 @@ _git_remote ()
fi
case "$subcommand" in
- rename|rm|show|prune)
- __gitcomp "$(__git_remotes)"
+ rename|remove|set-url|show|prune)
+ __gitcomp_nl "$(__git_remotes)"
+ ;;
+ set-head|set-branches)
+ __git_complete_remote_or_refspec
;;
update)
local i c='' IFS=$'\n'
@@ -1794,61 +2269,55 @@ _git_remote ()
__gitcomp "$c"
;;
*)
- COMPREPLY=()
;;
esac
}
_git_replace ()
{
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_reset ()
{
__git_has_doubledash && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "--merge --mixed --hard --soft --patch"
return
;;
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_revert ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "--edit --mainline --no-edit --no-commit --signoff"
return
;;
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_rm ()
{
- __git_has_doubledash && return
-
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "--cached --dry-run --ignore-unmatch --quiet"
return
;;
esac
- COMPREPLY=()
+
+ __git_complete_index_file "--cached"
}
_git_shortlog ()
{
__git_has_doubledash && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "
@@ -1866,31 +2335,29 @@ _git_show ()
{
__git_has_doubledash && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
- --pretty=*)
- __gitcomp "$__git_log_pretty_formats
- " "" "${cur##--pretty=}"
+ --pretty=*|--format=*)
+ __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
+ " "" "${cur#*=}"
return
;;
- --format=*)
- __gitcomp "$__git_log_pretty_formats
- " "" "${cur##--format=}"
+ --diff-algorithm=*)
+ __gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}"
return
;;
--*)
__gitcomp "--pretty= --format= --abbrev-commit --oneline
+ --show-signature
$__git_diff_common_options
"
return
;;
esac
- __git_complete_file
+ __git_complete_revlist_file
}
_git_show_branch ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "
@@ -1907,7 +2374,6 @@ _git_show_branch ()
_git_stash ()
{
- local cur="${COMP_WORDS[COMP_CWORD]}"
local save_opts='--keep-index --no-keep-index --quiet --patch'
local subcommands='save list show apply clear drop pop create branch'
local subcommand="$(__git_find_on_cmdline "$subcommands")"
@@ -1919,8 +2385,6 @@ _git_stash ()
*)
if [ -z "$(__git_find_on_cmdline "$save_opts")" ]; then
__gitcomp "$subcommands"
- else
- COMPREPLY=()
fi
;;
esac
@@ -1933,14 +2397,12 @@ _git_stash ()
__gitcomp "--index --quiet"
;;
show,--*|drop,--*|branch,--*)
- COMPREPLY=()
;;
show,*|apply,*|drop,*|pop,*|branch,*)
- __gitcomp "$(git --git-dir="$(__gitdir)" stash list \
+ __gitcomp_nl "$(git --git-dir="$(__gitdir)" stash list \
| sed -n -e 's/:.*//p')"
;;
*)
- COMPREPLY=()
;;
esac
fi
@@ -1950,9 +2412,8 @@ _git_submodule ()
{
__git_has_doubledash && return
- local subcommands="add status init update summary foreach sync"
+ local subcommands="add status init deinit update summary foreach sync"
if [ -z "$(__git_find_on_cmdline "$subcommands")" ]; then
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
__gitcomp "--quiet --cached"
@@ -1971,7 +2432,7 @@ _git_svn ()
init fetch clone rebase dcommit log find-rev
set-tree commit-diff info create-ignore propget
proplist show-ignore show-externals branch tag blame
- migrate
+ migrate mkdirs reset gc
"
local subcommand="$(__git_find_on_cmdline "$subcommands")"
if [ -z "$subcommand" ]; then
@@ -1983,7 +2444,7 @@ _git_svn ()
--no-metadata --use-svm-props --use-svnsync-props
--log-window-size= --no-checkout --quiet
--repack-flags --use-log-author --localtime
- --ignore-paths= $remote_opts
+ --ignore-paths= --include-paths= $remote_opts
"
local init_opts="
--template= --shared= --trunk= --tags=
@@ -1996,7 +2457,6 @@ _git_svn ()
--edit --rmdir --find-copies-harder --copy-similarity=
"
- local cur="${COMP_WORDS[COMP_CWORD]}"
case "$subcommand,$cur" in
fetch,--*)
__gitcomp "--revision= --fetch-all $fc_opts"
@@ -2011,14 +2471,14 @@ _git_svn ()
__gitcomp "
--merge --strategy= --verbose --dry-run
--fetch-all --no-rebase --commit-url
- --revision $cmt_opts $fc_opts
+ --revision --interactive $cmt_opts $fc_opts
"
;;
set-tree,--*)
__gitcomp "--stdin $cmt_opts $fc_opts"
;;
create-ignore,--*|propget,--*|proplist,--*|show-ignore,--*|\
- show-externals,--*)
+ show-externals,--*|mkdirs,--*)
__gitcomp "--revision="
;;
log,--*)
@@ -2055,8 +2515,10 @@ _git_svn ()
--no-auth-cache --username=
"
;;
+ reset,--*)
+ __gitcomp "--revision= --parent"
+ ;;
*)
- COMPREPLY=()
;;
esac
fi
@@ -2065,55 +2527,69 @@ _git_svn ()
_git_tag ()
{
local i c=1 f=0
- while [ $c -lt $COMP_CWORD ]; do
- i="${COMP_WORDS[c]}"
+ while [ $c -lt $cword ]; do
+ i="${words[c]}"
case "$i" in
-d|-v)
- __gitcomp "$(__git_tags)"
+ __gitcomp_nl "$(__git_tags)"
return
;;
-f)
f=1
;;
esac
- c=$((++c))
+ ((c++))
done
- case "${COMP_WORDS[COMP_CWORD-1]}" in
+ case "$prev" in
-m|-F)
- COMPREPLY=()
;;
-*|tag)
if [ $f = 1 ]; then
- __gitcomp "$(__git_tags)"
- else
- COMPREPLY=()
+ __gitcomp_nl "$(__git_tags)"
fi
;;
*)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
+ ;;
+ esac
+
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --list --delete --verify --annotate --message --file
+ --sign --cleanup --local-user --force --column --sort
+ --contains --points-at
+ "
;;
esac
}
-_git ()
+_git_whatchanged ()
+{
+ _git_log
+}
+
+__git_main ()
{
local i c=1 command __git_dir
- while [ $c -lt $COMP_CWORD ]; do
- i="${COMP_WORDS[c]}"
+ while [ $c -lt $cword ]; do
+ i="${words[c]}"
case "$i" in
--git-dir=*) __git_dir="${i#--git-dir=}" ;;
+ --git-dir) ((c++)) ; __git_dir="${words[c]}" ;;
--bare) __git_dir="." ;;
- --version|-p|--paginate) ;;
--help) command="help"; break ;;
+ -c|--work-tree|--namespace) ((c++)) ;;
+ -*) ;;
*) command="$i"; break ;;
esac
- c=$((++c))
+ ((c++))
done
if [ -z "$command" ]; then
- case "${COMP_WORDS[COMP_CWORD]}" in
+ case "$cur" in
--*) __gitcomp "
--paginate
--no-pager
@@ -2121,80 +2597,37 @@ _git ()
--bare
--version
--exec-path
+ --exec-path=
--html-path
+ --man-path
+ --info-path
--work-tree=
+ --namespace=
+ --no-replace-objects
--help
"
;;
- *) __gitcomp "$(__git_porcelain_commands) $(__git_aliases)" ;;
+ *) __git_compute_porcelain_commands
+ __gitcomp "$__git_porcelain_commands $(__git_aliases)" ;;
esac
return
fi
+ local completion_func="_git_${command//-/_}"
+ declare -f $completion_func >/dev/null && $completion_func && return
+
local expansion=$(__git_aliased_command "$command")
- [ "$expansion" ] && command="$expansion"
-
- case "$command" in
- am) _git_am ;;
- add) _git_add ;;
- apply) _git_apply ;;
- archive) _git_archive ;;
- bisect) _git_bisect ;;
- bundle) _git_bundle ;;
- branch) _git_branch ;;
- checkout) _git_checkout ;;
- cherry) _git_cherry ;;
- cherry-pick) _git_cherry_pick ;;
- clean) _git_clean ;;
- clone) _git_clone ;;
- commit) _git_commit ;;
- config) _git_config ;;
- describe) _git_describe ;;
- diff) _git_diff ;;
- difftool) _git_difftool ;;
- fetch) _git_fetch ;;
- format-patch) _git_format_patch ;;
- fsck) _git_fsck ;;
- gc) _git_gc ;;
- grep) _git_grep ;;
- help) _git_help ;;
- init) _git_init ;;
- log) _git_log ;;
- ls-files) _git_ls_files ;;
- ls-remote) _git_ls_remote ;;
- ls-tree) _git_ls_tree ;;
- merge) _git_merge;;
- mergetool) _git_mergetool;;
- merge-base) _git_merge_base ;;
- mv) _git_mv ;;
- name-rev) _git_name_rev ;;
- pull) _git_pull ;;
- push) _git_push ;;
- rebase) _git_rebase ;;
- remote) _git_remote ;;
- replace) _git_replace ;;
- reset) _git_reset ;;
- revert) _git_revert ;;
- rm) _git_rm ;;
- send-email) _git_send_email ;;
- shortlog) _git_shortlog ;;
- show) _git_show ;;
- show-branch) _git_show_branch ;;
- stash) _git_stash ;;
- stage) _git_add ;;
- submodule) _git_submodule ;;
- svn) _git_svn ;;
- tag) _git_tag ;;
- whatchanged) _git_log ;;
- *) COMPREPLY=() ;;
- esac
+ if [ -n "$expansion" ]; then
+ words[1]=$expansion
+ completion_func="_git_${expansion//-/_}"
+ declare -f $completion_func >/dev/null && $completion_func
+ fi
}
-_gitk ()
+__gitk_main ()
{
__git_has_doubledash && return
- local cur="${COMP_WORDS[COMP_CWORD]}"
local g="$(__gitdir)"
local merge=""
if [ -f "$g/MERGE_HEAD" ]; then
@@ -2213,16 +2646,107 @@ _gitk ()
__git_complete_revlist
}
-complete -o bashdefault -o default -o nospace -F _git git 2>/dev/null \
- || complete -o default -o nospace -F _git git
-complete -o bashdefault -o default -o nospace -F _gitk gitk 2>/dev/null \
- || complete -o default -o nospace -F _gitk gitk
+if [[ -n ${ZSH_VERSION-} ]]; then
+ echo "WARNING: this script is deprecated, please see git-completion.zsh" 1>&2
+
+ autoload -U +X compinit && compinit
+
+ __gitcomp ()
+ {
+ emulate -L zsh
+
+ local cur_="${3-$cur}"
+
+ case "$cur_" in
+ --*=)
+ ;;
+ *)
+ local c IFS=$' \t\n'
+ local -a array
+ for c in ${=1}; do
+ c="$c${4-}"
+ case $c in
+ --*=*|*.) ;;
+ *) c="$c " ;;
+ esac
+ array[${#array[@]}+1]="$c"
+ done
+ compset -P '*[=:]'
+ compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
+ ;;
+ esac
+ }
+
+ __gitcomp_nl ()
+ {
+ emulate -L zsh
+
+ local IFS=$'\n'
+ compset -P '*[=:]'
+ compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
+ }
+
+ __gitcomp_file ()
+ {
+ emulate -L zsh
+
+ local IFS=$'\n'
+ compset -P '*[=:]'
+ compadd -Q -p "${2-}" -f -- ${=1} && _ret=0
+ }
+
+ _git ()
+ {
+ local _ret=1 cur cword prev
+ cur=${words[CURRENT]}
+ prev=${words[CURRENT-1]}
+ let cword=CURRENT-1
+ emulate ksh -c __${service}_main
+ let _ret && _default && _ret=0
+ return _ret
+ }
+
+ compdef _git git gitk
+ return
+fi
+
+__git_func_wrap ()
+{
+ local cur words cword prev
+ _get_comp_words_by_ref -n =: cur words cword prev
+ $1
+}
+
+# Setup completion for certain functions defined above by setting common
+# variables and workarounds.
+# This is NOT a public function; use at your own risk.
+__git_complete ()
+{
+ local wrapper="__git_wrap${2}"
+ eval "$wrapper () { __git_func_wrap $2 ; }"
+ complete -o bashdefault -o default -o nospace -F $wrapper $1 2>/dev/null \
+ || complete -o default -o nospace -F $wrapper $1
+}
+
+# wrapper for backwards compatibility
+_git ()
+{
+ __git_wrap__git_main
+}
+
+# wrapper for backwards compatibility
+_gitk ()
+{
+ __git_wrap__gitk_main
+}
+
+__git_complete git __git_main
+__git_complete gitk __gitk_main
# The following are necessary only for Cygwin, and only are needed
# when the user has tab-completed the executable name and consequently
# included the '.exe' suffix.
#
if [ Cygwin = "$(uname -o 2>/dev/null)" ]; then
-complete -o bashdefault -o default -o nospace -F _git git.exe 2>/dev/null \
- || complete -o default -o nospace -F _git git.exe
+__git_complete git.exe __git_main
fi
diff --git a/contrib/completion/git-completion.tcsh b/contrib/completion/git-completion.tcsh
new file mode 100644
index 0000000000..6104a42a23
--- /dev/null
+++ b/contrib/completion/git-completion.tcsh
@@ -0,0 +1,126 @@
+# tcsh completion support for core Git.
+#
+# Copyright (C) 2012 Marc Khouzam <marc.khouzam@gmail.com>
+# Distributed under the GNU General Public License, version 2.0.
+#
+# When sourced, this script will generate a new script that uses
+# the git-completion.bash script provided by core Git. This new
+# script can be used by tcsh to perform git completion.
+# The current script also issues the necessary tcsh 'complete'
+# commands.
+#
+# To use this completion script:
+#
+# 0) You need tcsh 6.16.00 or newer.
+# 1) Copy both this file and the bash completion script to ${HOME}.
+# You _must_ use the name ${HOME}/.git-completion.bash for the
+# bash script.
+# (e.g. ~/.git-completion.tcsh and ~/.git-completion.bash).
+# 2) Add the following line to your .tcshrc/.cshrc:
+# source ~/.git-completion.tcsh
+# 3) For completion similar to bash, it is recommended to also
+# add the following line to your .tcshrc/.cshrc:
+# set autolist=ambiguous
+# It will tell tcsh to list the possible completion choices.
+
+set __git_tcsh_completion_version = `\echo ${tcsh} | \sed 's/\./ /g'`
+if ( ${__git_tcsh_completion_version[1]} < 6 || \
+ ( ${__git_tcsh_completion_version[1]} == 6 && \
+ ${__git_tcsh_completion_version[2]} < 16 ) ) then
+ echo "git-completion.tcsh: Your version of tcsh is too old, you need version 6.16.00 or newer. Git completion will not work."
+ exit
+endif
+unset __git_tcsh_completion_version
+
+set __git_tcsh_completion_original_script = ${HOME}/.git-completion.bash
+set __git_tcsh_completion_script = ${HOME}/.git-completion.tcsh.bash
+
+# Check that the user put the script in the right place
+if ( ! -e ${__git_tcsh_completion_original_script} ) then
+ echo "git-completion.tcsh: Cannot find: ${__git_tcsh_completion_original_script}. Git completion will not work."
+ exit
+endif
+
+cat << EOF > ${__git_tcsh_completion_script}
+#!bash
+#
+# This script is GENERATED and will be overwritten automatically.
+# Do not modify it directly. Instead, modify git-completion.tcsh
+# and source it again.
+
+source ${__git_tcsh_completion_original_script}
+
+# Remove the colon as a completion separator because tcsh cannot handle it
+COMP_WORDBREAKS=\${COMP_WORDBREAKS//:}
+
+# For file completion, tcsh needs the '/' to be appended to directories.
+# By default, the bash script does not do that.
+# We can achieve this by using the below compatibility
+# method of the git-completion.bash script.
+__git_index_file_list_filter ()
+{
+ __git_index_file_list_filter_compat
+}
+
+# Set COMP_WORDS in a way that can be handled by the bash script.
+COMP_WORDS=(\$2)
+
+# The cursor is at the end of parameter #1.
+# We must check for a space as the last character which will
+# tell us that the previous word is complete and the cursor
+# is on the next word.
+if [ "\${2: -1}" == " " ]; then
+ # The last character is a space, so our location is at the end
+ # of the command-line array
+ COMP_CWORD=\${#COMP_WORDS[@]}
+else
+ # The last character is not a space, so our location is on the
+ # last word of the command-line array, so we must decrement the
+ # count by 1
+ COMP_CWORD=\$((\${#COMP_WORDS[@]}-1))
+fi
+
+# Call _git() or _gitk() of the bash script, based on the first argument
+_\${1}
+
+IFS=\$'\n'
+if [ \${#COMPREPLY[*]} -eq 0 ]; then
+ # No completions suggested. In this case, we want tcsh to perform
+ # standard file completion. However, there does not seem to be way
+ # to tell tcsh to do that. To help the user, we try to simulate
+ # file completion directly in this script.
+ #
+ # Known issues:
+ # - Possible completions are shown with their directory prefix.
+ # - Completions containing shell variables are not handled.
+ # - Completions with ~ as the first character are not handled.
+
+ # No file completion should be done unless we are completing beyond
+ # the git sub-command. An improvement on the bash completion :)
+ if [ \${COMP_CWORD} -gt 1 ]; then
+ TO_COMPLETE="\${COMP_WORDS[\${COMP_CWORD}]}"
+
+ # We don't support ~ expansion: too tricky.
+ if [ "\${TO_COMPLETE:0:1}" != "~" ]; then
+ # Use ls so as to add the '/' at the end of directories.
+ COMPREPLY=(\`ls -dp \${TO_COMPLETE}* 2> /dev/null\`)
+ fi
+ fi
+fi
+
+# tcsh does not automatically remove duplicates, so we do it ourselves
+echo "\${COMPREPLY[*]}" | sort | uniq
+
+# If there is a single completion and it is a directory, we output it
+# a second time to trick tcsh into not adding a space after it.
+if [ \${#COMPREPLY[*]} -eq 1 ] && [ "\${COMPREPLY[0]: -1}" == "/" ]; then
+ echo "\${COMPREPLY[*]}"
+fi
+
+EOF
+
+# Don't need this variable anymore, so don't pollute the users environment
+unset __git_tcsh_completion_original_script
+
+complete git 'p,*,`bash ${__git_tcsh_completion_script} git "${COMMAND_LINE}"`,'
+complete gitk 'p,*,`bash ${__git_tcsh_completion_script} gitk "${COMMAND_LINE}"`,'
diff --git a/contrib/completion/git-completion.zsh b/contrib/completion/git-completion.zsh
new file mode 100644
index 0000000000..e25541308a
--- /dev/null
+++ b/contrib/completion/git-completion.zsh
@@ -0,0 +1,225 @@
+#compdef git gitk
+
+# zsh completion wrapper for git
+#
+# Copyright (c) 2012-2013 Felipe Contreras <felipe.contreras@gmail.com>
+#
+# You need git's bash completion script installed somewhere, by default it
+# would be the location bash-completion uses.
+#
+# If your script is somewhere else, you can configure it on your ~/.zshrc:
+#
+# zstyle ':completion:*:*:git:*' script ~/.git-completion.zsh
+#
+# The recommended way to install this script is to copy to '~/.zsh/_git', and
+# then add the following to your ~/.zshrc file:
+#
+# fpath=(~/.zsh $fpath)
+
+complete ()
+{
+ # do nothing
+ return 0
+}
+
+zstyle -T ':completion:*:*:git:*' tag-order && \
+ zstyle ':completion:*:*:git:*' tag-order 'common-commands'
+
+zstyle -s ":completion:*:*:git:*" script script
+if [ -z "$script" ]; then
+ local -a locations
+ local e
+ locations=(
+ $(dirname ${funcsourcetrace[1]%:*})/git-completion.bash
+ '/etc/bash_completion.d/git' # fedora, old debian
+ '/usr/share/bash-completion/completions/git' # arch, ubuntu, new debian
+ '/usr/share/bash-completion/git' # gentoo
+ )
+ for e in $locations; do
+ test -f $e && script="$e" && break
+ done
+fi
+ZSH_VERSION='' . "$script"
+
+__gitcomp ()
+{
+ emulate -L zsh
+
+ local cur_="${3-$cur}"
+
+ case "$cur_" in
+ --*=)
+ ;;
+ *)
+ local c IFS=$' \t\n'
+ local -a array
+ for c in ${=1}; do
+ c="$c${4-}"
+ case $c in
+ --*=*|*.) ;;
+ *) c="$c " ;;
+ esac
+ array+=("$c")
+ done
+ compset -P '*[=:]'
+ compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
+ ;;
+ esac
+}
+
+__gitcomp_nl ()
+{
+ emulate -L zsh
+
+ local IFS=$'\n'
+ compset -P '*[=:]'
+ compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
+}
+
+__gitcomp_nl_append ()
+{
+ emulate -L zsh
+
+ local IFS=$'\n'
+ compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
+}
+
+__gitcomp_file ()
+{
+ emulate -L zsh
+
+ local IFS=$'\n'
+ compset -P '*[=:]'
+ compadd -Q -p "${2-}" -f -- ${=1} && _ret=0
+}
+
+__git_zsh_bash_func ()
+{
+ emulate -L ksh
+
+ local command=$1
+
+ local completion_func="_git_${command//-/_}"
+ declare -f $completion_func >/dev/null && $completion_func && return
+
+ local expansion=$(__git_aliased_command "$command")
+ if [ -n "$expansion" ]; then
+ words[1]=$expansion
+ completion_func="_git_${expansion//-/_}"
+ declare -f $completion_func >/dev/null && $completion_func
+ fi
+}
+
+__git_zsh_cmd_common ()
+{
+ local -a list
+ list=(
+ add:'add file contents to the index'
+ bisect:'find by binary search the change that introduced a bug'
+ branch:'list, create, or delete branches'
+ checkout:'checkout a branch or paths to the working tree'
+ clone:'clone a repository into a new directory'
+ commit:'record changes to the repository'
+ diff:'show changes between commits, commit and working tree, etc'
+ fetch:'download objects and refs from another repository'
+ grep:'print lines matching a pattern'
+ init:'create an empty Git repository or reinitialize an existing one'
+ log:'show commit logs'
+ merge:'join two or more development histories together'
+ mv:'move or rename a file, a directory, or a symlink'
+ pull:'fetch from and merge with another repository or a local branch'
+ push:'update remote refs along with associated objects'
+ rebase:'forward-port local commits to the updated upstream head'
+ reset:'reset current HEAD to the specified state'
+ rm:'remove files from the working tree and from the index'
+ show:'show various types of objects'
+ status:'show the working tree status'
+ tag:'create, list, delete or verify a tag object signed with GPG')
+ _describe -t common-commands 'common commands' list && _ret=0
+}
+
+__git_zsh_cmd_alias ()
+{
+ local -a list
+ list=(${${${(0)"$(git config -z --get-regexp '^alias\.')"}#alias.}%$'\n'*})
+ _describe -t alias-commands 'aliases' list $* && _ret=0
+}
+
+__git_zsh_cmd_all ()
+{
+ local -a list
+ emulate ksh -c __git_compute_all_commands
+ list=( ${=__git_all_commands} )
+ _describe -t all-commands 'all commands' list && _ret=0
+}
+
+__git_zsh_main ()
+{
+ local curcontext="$curcontext" state state_descr line
+ typeset -A opt_args
+ local -a orig_words
+
+ orig_words=( ${words[@]} )
+
+ _arguments -C \
+ '(-p --paginate --no-pager)'{-p,--paginate}'[pipe all output into ''less'']' \
+ '(-p --paginate)--no-pager[do not pipe git output into a pager]' \
+ '--git-dir=-[set the path to the repository]: :_directories' \
+ '--bare[treat the repository as a bare repository]' \
+ '(- :)--version[prints the git suite version]' \
+ '--exec-path=-[path to where your core git programs are installed]:: :_directories' \
+ '--html-path[print the path where git''s HTML documentation is installed]' \
+ '--info-path[print the path where the Info files are installed]' \
+ '--man-path[print the manpath (see `man(1)`) for the man pages]' \
+ '--work-tree=-[set the path to the working tree]: :_directories' \
+ '--namespace=-[set the git namespace]' \
+ '--no-replace-objects[do not use replacement refs to replace git objects]' \
+ '(- :)--help[prints the synopsis and a list of the most commonly used commands]: :->arg' \
+ '(-): :->command' \
+ '(-)*:: :->arg' && return
+
+ case $state in
+ (command)
+ _alternative \
+ 'alias-commands:alias:__git_zsh_cmd_alias' \
+ 'common-commands:common:__git_zsh_cmd_common' \
+ 'all-commands:all:__git_zsh_cmd_all' && _ret=0
+ ;;
+ (arg)
+ local command="${words[1]}" __git_dir
+
+ if (( $+opt_args[--bare] )); then
+ __git_dir='.'
+ else
+ __git_dir=${opt_args[--git-dir]}
+ fi
+
+ (( $+opt_args[--help] )) && command='help'
+
+ words=( ${orig_words[@]} )
+
+ __git_zsh_bash_func $command
+ ;;
+ esac
+}
+
+_git ()
+{
+ local _ret=1
+ local cur cword prev
+
+ cur=${words[CURRENT]}
+ prev=${words[CURRENT-1]}
+ let cword=CURRENT-1
+
+ if (( $+functions[__${service}_zsh_main] )); then
+ __${service}_zsh_main
+ else
+ emulate ksh -c __${service}_main
+ fi
+
+ let _ret && _default && _ret=0
+ return _ret
+}
+
+_git
diff --git a/contrib/completion/git-prompt.sh b/contrib/completion/git-prompt.sh
new file mode 100644
index 0000000000..214e859f99
--- /dev/null
+++ b/contrib/completion/git-prompt.sh
@@ -0,0 +1,528 @@
+# bash/zsh git prompt support
+#
+# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
+# Distributed under the GNU General Public License, version 2.0.
+#
+# This script allows you to see repository status in your prompt.
+#
+# To enable:
+#
+# 1) Copy this file to somewhere (e.g. ~/.git-prompt.sh).
+# 2) Add the following line to your .bashrc/.zshrc:
+# source ~/.git-prompt.sh
+# 3a) Change your PS1 to call __git_ps1 as
+# command-substitution:
+# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
+# ZSH: setopt PROMPT_SUBST ; PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
+# the optional argument will be used as format string.
+# 3b) Alternatively, for a slightly faster prompt, __git_ps1 can
+# be used for PROMPT_COMMAND in Bash or for precmd() in Zsh
+# with two parameters, <pre> and <post>, which are strings
+# you would put in $PS1 before and after the status string
+# generated by the git-prompt machinery. e.g.
+# Bash: PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "'
+# will show username, at-sign, host, colon, cwd, then
+# various status string, followed by dollar and SP, as
+# your prompt.
+# ZSH: precmd () { __git_ps1 "%n" ":%~$ " "|%s" }
+# will show username, pipe, then various status string,
+# followed by colon, cwd, dollar and SP, as your prompt.
+# Optionally, you can supply a third argument with a printf
+# format string to finetune the output of the branch status
+#
+# The repository status will be displayed only if you are currently in a
+# git repository. The %s token is the placeholder for the shown status.
+#
+# The prompt status always includes the current branch name.
+#
+# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty value,
+# unstaged (*) and staged (+) changes will be shown next to the branch
+# name. You can configure this per-repository with the
+# bash.showDirtyState variable, which defaults to true once
+# GIT_PS1_SHOWDIRTYSTATE is enabled.
+#
+# You can also see if currently something is stashed, by setting
+# GIT_PS1_SHOWSTASHSTATE to a nonempty value. If something is stashed,
+# then a '$' will be shown next to the branch name.
+#
+# If you would like to see if there're untracked files, then you can set
+# GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're untracked
+# files, then a '%' will be shown next to the branch name. You can
+# configure this per-repository with the bash.showUntrackedFiles
+# variable, which defaults to true once GIT_PS1_SHOWUNTRACKEDFILES is
+# enabled.
+#
+# If you would like to see the difference between HEAD and its upstream,
+# set GIT_PS1_SHOWUPSTREAM="auto". A "<" indicates you are behind, ">"
+# indicates you are ahead, "<>" indicates you have diverged and "="
+# indicates that there is no difference. You can further control
+# behaviour by setting GIT_PS1_SHOWUPSTREAM to a space-separated list
+# of values:
+#
+# verbose show number of commits ahead/behind (+/-) upstream
+# name if verbose, then also show the upstream abbrev name
+# legacy don't use the '--count' option available in recent
+# versions of git-rev-list
+# git always compare HEAD to @{upstream}
+# svn always compare HEAD to your SVN upstream
+#
+# By default, __git_ps1 will compare HEAD to your SVN upstream if it can
+# find one, or @{upstream} otherwise. Once you have set
+# GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by
+# setting the bash.showUpstream config variable.
+#
+# If you would like to see more information about the identity of
+# commits checked out as a detached HEAD, set GIT_PS1_DESCRIBE_STYLE
+# to one of these values:
+#
+# contains relative to newer annotated tag (v1.6.3.2~35)
+# branch relative to newer tag or branch (master~4)
+# describe relative to older annotated tag (v1.6.3.1-13-gdd42c2f)
+# default exactly matching tag
+#
+# If you would like a colored hint about the current dirty state, set
+# GIT_PS1_SHOWCOLORHINTS to a nonempty value. The colors are based on
+# the colored output of "git status -sb" and are available only when
+# using __git_ps1 for PROMPT_COMMAND or precmd.
+#
+# If you would like __git_ps1 to do nothing in the case when the current
+# directory is set up to be ignored by git, then set
+# GIT_PS1_HIDE_IF_PWD_IGNORED to a nonempty value. Override this on the
+# repository level by setting bash.hideIfPwdIgnored to "false".
+
+# check whether printf supports -v
+__git_printf_supports_v=
+printf -v __git_printf_supports_v -- '%s' yes >/dev/null 2>&1
+
+# stores the divergence from upstream in $p
+# used by GIT_PS1_SHOWUPSTREAM
+__git_ps1_show_upstream ()
+{
+ local key value
+ local svn_remote svn_url_pattern count n
+ local upstream=git legacy="" verbose="" name=""
+
+ svn_remote=()
+ # get some config options from git-config
+ local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')"
+ while read -r key value; do
+ case "$key" in
+ bash.showupstream)
+ GIT_PS1_SHOWUPSTREAM="$value"
+ if [[ -z "${GIT_PS1_SHOWUPSTREAM}" ]]; then
+ p=""
+ return
+ fi
+ ;;
+ svn-remote.*.url)
+ svn_remote[$((${#svn_remote[@]} + 1))]="$value"
+ svn_url_pattern="$svn_url_pattern\\|$value"
+ upstream=svn+git # default upstream is SVN if available, else git
+ ;;
+ esac
+ done <<< "$output"
+
+ # parse configuration values
+ for option in ${GIT_PS1_SHOWUPSTREAM}; do
+ case "$option" in
+ git|svn) upstream="$option" ;;
+ verbose) verbose=1 ;;
+ legacy) legacy=1 ;;
+ name) name=1 ;;
+ esac
+ done
+
+ # Find our upstream
+ case "$upstream" in
+ git) upstream="@{upstream}" ;;
+ svn*)
+ # get the upstream from the "git-svn-id: ..." in a commit message
+ # (git-svn uses essentially the same procedure internally)
+ local -a svn_upstream
+ svn_upstream=($(git log --first-parent -1 \
+ --grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null))
+ if [[ 0 -ne ${#svn_upstream[@]} ]]; then
+ svn_upstream=${svn_upstream[${#svn_upstream[@]} - 2]}
+ svn_upstream=${svn_upstream%@*}
+ local n_stop="${#svn_remote[@]}"
+ for ((n=1; n <= n_stop; n++)); do
+ svn_upstream=${svn_upstream#${svn_remote[$n]}}
+ done
+
+ if [[ -z "$svn_upstream" ]]; then
+ # default branch name for checkouts with no layout:
+ upstream=${GIT_SVN_ID:-git-svn}
+ else
+ upstream=${svn_upstream#/}
+ fi
+ elif [[ "svn+git" = "$upstream" ]]; then
+ upstream="@{upstream}"
+ fi
+ ;;
+ esac
+
+ # Find how many commits we are ahead/behind our upstream
+ if [[ -z "$legacy" ]]; then
+ count="$(git rev-list --count --left-right \
+ "$upstream"...HEAD 2>/dev/null)"
+ else
+ # produce equivalent output to --count for older versions of git
+ local commits
+ if commits="$(git rev-list --left-right "$upstream"...HEAD 2>/dev/null)"
+ then
+ local commit behind=0 ahead=0
+ for commit in $commits
+ do
+ case "$commit" in
+ "<"*) ((behind++)) ;;
+ *) ((ahead++)) ;;
+ esac
+ done
+ count="$behind $ahead"
+ else
+ count=""
+ fi
+ fi
+
+ # calculate the result
+ if [[ -z "$verbose" ]]; then
+ case "$count" in
+ "") # no upstream
+ p="" ;;
+ "0 0") # equal to upstream
+ p="=" ;;
+ "0 "*) # ahead of upstream
+ p=">" ;;
+ *" 0") # behind upstream
+ p="<" ;;
+ *) # diverged from upstream
+ p="<>" ;;
+ esac
+ else
+ case "$count" in
+ "") # no upstream
+ p="" ;;
+ "0 0") # equal to upstream
+ p=" u=" ;;
+ "0 "*) # ahead of upstream
+ p=" u+${count#0 }" ;;
+ *" 0") # behind upstream
+ p=" u-${count% 0}" ;;
+ *) # diverged from upstream
+ p=" u+${count#* }-${count% *}" ;;
+ esac
+ if [[ -n "$count" && -n "$name" ]]; then
+ __git_ps1_upstream_name=$(git rev-parse \
+ --abbrev-ref "$upstream" 2>/dev/null)
+ if [ $pcmode = yes ] && [ $ps1_expanded = yes ]; then
+ p="$p \${__git_ps1_upstream_name}"
+ else
+ p="$p ${__git_ps1_upstream_name}"
+ # not needed anymore; keep user's
+ # environment clean
+ unset __git_ps1_upstream_name
+ fi
+ fi
+ fi
+
+}
+
+# Helper function that is meant to be called from __git_ps1. It
+# injects color codes into the appropriate gitstring variables used
+# to build a gitstring.
+__git_ps1_colorize_gitstring ()
+{
+ if [[ -n ${ZSH_VERSION-} ]]; then
+ local c_red='%F{red}'
+ local c_green='%F{green}'
+ local c_lblue='%F{blue}'
+ local c_clear='%f'
+ else
+ # Using \[ and \] around colors is necessary to prevent
+ # issues with command line editing/browsing/completion!
+ local c_red='\[\e[31m\]'
+ local c_green='\[\e[32m\]'
+ local c_lblue='\[\e[1;34m\]'
+ local c_clear='\[\e[0m\]'
+ fi
+ local bad_color=$c_red
+ local ok_color=$c_green
+ local flags_color="$c_lblue"
+
+ local branch_color=""
+ if [ $detached = no ]; then
+ branch_color="$ok_color"
+ else
+ branch_color="$bad_color"
+ fi
+ c="$branch_color$c"
+
+ z="$c_clear$z"
+ if [ "$w" = "*" ]; then
+ w="$bad_color$w"
+ fi
+ if [ -n "$i" ]; then
+ i="$ok_color$i"
+ fi
+ if [ -n "$s" ]; then
+ s="$flags_color$s"
+ fi
+ if [ -n "$u" ]; then
+ u="$bad_color$u"
+ fi
+ r="$c_clear$r"
+}
+
+__git_eread ()
+{
+ local f="$1"
+ shift
+ test -r "$f" && read "$@" <"$f"
+}
+
+# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
+# when called from PS1 using command substitution
+# in this mode it prints text to add to bash PS1 prompt (includes branch name)
+#
+# __git_ps1 requires 2 or 3 arguments when called from PROMPT_COMMAND (pc)
+# in that case it _sets_ PS1. The arguments are parts of a PS1 string.
+# when two arguments are given, the first is prepended and the second appended
+# to the state string when assigned to PS1.
+# The optional third parameter will be used as printf format string to further
+# customize the output of the git-status string.
+# In this mode you can request colored hints using GIT_PS1_SHOWCOLORHINTS=true
+__git_ps1 ()
+{
+ # preserve exit status
+ local exit=$?
+ local pcmode=no
+ local detached=no
+ local ps1pc_start='\u@\h:\w '
+ local ps1pc_end='\$ '
+ local printf_format=' (%s)'
+
+ case "$#" in
+ 2|3) pcmode=yes
+ ps1pc_start="$1"
+ ps1pc_end="$2"
+ printf_format="${3:-$printf_format}"
+ # set PS1 to a plain prompt so that we can
+ # simply return early if the prompt should not
+ # be decorated
+ PS1="$ps1pc_start$ps1pc_end"
+ ;;
+ 0|1) printf_format="${1:-$printf_format}"
+ ;;
+ *) return $exit
+ ;;
+ esac
+
+ # ps1_expanded: This variable is set to 'yes' if the shell
+ # subjects the value of PS1 to parameter expansion:
+ #
+ # * bash does unless the promptvars option is disabled
+ # * zsh does not unless the PROMPT_SUBST option is set
+ # * POSIX shells always do
+ #
+ # If the shell would expand the contents of PS1 when drawing
+ # the prompt, a raw ref name must not be included in PS1.
+ # This protects the user from arbitrary code execution via
+ # specially crafted ref names. For example, a ref named
+ # 'refs/heads/$(IFS=_;cmd=sudo_rm_-rf_/;$cmd)' might cause the
+ # shell to execute 'sudo rm -rf /' when the prompt is drawn.
+ #
+ # Instead, the ref name should be placed in a separate global
+ # variable (in the __git_ps1_* namespace to avoid colliding
+ # with the user's environment) and that variable should be
+ # referenced from PS1. For example:
+ #
+ # __git_ps1_foo=$(do_something_to_get_ref_name)
+ # PS1="...stuff...\${__git_ps1_foo}...stuff..."
+ #
+ # If the shell does not expand the contents of PS1, the raw
+ # ref name must be included in PS1.
+ #
+ # The value of this variable is only relevant when in pcmode.
+ #
+ # Assume that the shell follows the POSIX specification and
+ # expands PS1 unless determined otherwise. (This is more
+ # likely to be correct if the user has a non-bash, non-zsh
+ # shell and safer than the alternative if the assumption is
+ # incorrect.)
+ #
+ local ps1_expanded=yes
+ [ -z "$ZSH_VERSION" ] || [[ -o PROMPT_SUBST ]] || ps1_expanded=no
+ [ -z "$BASH_VERSION" ] || shopt -q promptvars || ps1_expanded=no
+
+ local repo_info rev_parse_exit_code
+ repo_info="$(git rev-parse --git-dir --is-inside-git-dir \
+ --is-bare-repository --is-inside-work-tree \
+ --short HEAD 2>/dev/null)"
+ rev_parse_exit_code="$?"
+
+ if [ -z "$repo_info" ]; then
+ return $exit
+ fi
+
+ local short_sha
+ if [ "$rev_parse_exit_code" = "0" ]; then
+ short_sha="${repo_info##*$'\n'}"
+ repo_info="${repo_info%$'\n'*}"
+ fi
+ local inside_worktree="${repo_info##*$'\n'}"
+ repo_info="${repo_info%$'\n'*}"
+ local bare_repo="${repo_info##*$'\n'}"
+ repo_info="${repo_info%$'\n'*}"
+ local inside_gitdir="${repo_info##*$'\n'}"
+ local g="${repo_info%$'\n'*}"
+
+ if [ "true" = "$inside_worktree" ] &&
+ [ -n "${GIT_PS1_HIDE_IF_PWD_IGNORED-}" ] &&
+ [ "$(git config --bool bash.hideIfPwdIgnored)" != "false" ] &&
+ git check-ignore -q .
+ then
+ return $exit
+ fi
+
+ local r=""
+ local b=""
+ local step=""
+ local total=""
+ if [ -d "$g/rebase-merge" ]; then
+ __git_eread "$g/rebase-merge/head-name" b
+ __git_eread "$g/rebase-merge/msgnum" step
+ __git_eread "$g/rebase-merge/end" total
+ if [ -f "$g/rebase-merge/interactive" ]; then
+ r="|REBASE-i"
+ else
+ r="|REBASE-m"
+ fi
+ else
+ if [ -d "$g/rebase-apply" ]; then
+ __git_eread "$g/rebase-apply/next" step
+ __git_eread "$g/rebase-apply/last" total
+ if [ -f "$g/rebase-apply/rebasing" ]; then
+ __git_eread "$g/rebase-apply/head-name" b
+ r="|REBASE"
+ elif [ -f "$g/rebase-apply/applying" ]; then
+ r="|AM"
+ else
+ r="|AM/REBASE"
+ fi
+ elif [ -f "$g/MERGE_HEAD" ]; then
+ r="|MERGING"
+ elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
+ r="|CHERRY-PICKING"
+ elif [ -f "$g/REVERT_HEAD" ]; then
+ r="|REVERTING"
+ elif [ -f "$g/BISECT_LOG" ]; then
+ r="|BISECTING"
+ fi
+
+ if [ -n "$b" ]; then
+ :
+ elif [ -h "$g/HEAD" ]; then
+ # symlink symbolic ref
+ b="$(git symbolic-ref HEAD 2>/dev/null)"
+ else
+ local head=""
+ if ! __git_eread "$g/HEAD" head; then
+ return $exit
+ fi
+ # is it a symbolic ref?
+ b="${head#ref: }"
+ if [ "$head" = "$b" ]; then
+ detached=yes
+ b="$(
+ case "${GIT_PS1_DESCRIBE_STYLE-}" in
+ (contains)
+ git describe --contains HEAD ;;
+ (branch)
+ git describe --contains --all HEAD ;;
+ (describe)
+ git describe HEAD ;;
+ (* | default)
+ git describe --tags --exact-match HEAD ;;
+ esac 2>/dev/null)" ||
+
+ b="$short_sha..."
+ b="($b)"
+ fi
+ fi
+ fi
+
+ if [ -n "$step" ] && [ -n "$total" ]; then
+ r="$r $step/$total"
+ fi
+
+ local w=""
+ local i=""
+ local s=""
+ local u=""
+ local c=""
+ local p=""
+
+ if [ "true" = "$inside_gitdir" ]; then
+ if [ "true" = "$bare_repo" ]; then
+ c="BARE:"
+ else
+ b="GIT_DIR!"
+ fi
+ elif [ "true" = "$inside_worktree" ]; then
+ if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ] &&
+ [ "$(git config --bool bash.showDirtyState)" != "false" ]
+ then
+ git diff --no-ext-diff --quiet --exit-code || w="*"
+ if [ -n "$short_sha" ]; then
+ git diff-index --cached --quiet HEAD -- || i="+"
+ else
+ i="#"
+ fi
+ fi
+ if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ] &&
+ git rev-parse --verify --quiet refs/stash >/dev/null
+ then
+ s="$"
+ fi
+
+ if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ] &&
+ [ "$(git config --bool bash.showUntrackedFiles)" != "false" ] &&
+ git ls-files --others --exclude-standard --error-unmatch -- '*' >/dev/null 2>/dev/null
+ then
+ u="%${ZSH_VERSION+%}"
+ fi
+
+ if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
+ __git_ps1_show_upstream
+ fi
+ fi
+
+ local z="${GIT_PS1_STATESEPARATOR-" "}"
+
+ # NO color option unless in PROMPT_COMMAND mode
+ if [ $pcmode = yes ] && [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then
+ __git_ps1_colorize_gitstring
+ fi
+
+ b=${b##refs/heads/}
+ if [ $pcmode = yes ] && [ $ps1_expanded = yes ]; then
+ __git_ps1_branch_name=$b
+ b="\${__git_ps1_branch_name}"
+ fi
+
+ local f="$w$i$s$u"
+ local gitstring="$c$b${f:+$z$f}$r$p"
+
+ if [ $pcmode = yes ]; then
+ if [ "${__git_printf_supports_v-}" != yes ]; then
+ gitstring=$(printf -- "$printf_format" "$gitstring")
+ else
+ printf -v gitstring -- "$printf_format" "$gitstring"
+ fi
+ PS1="$ps1pc_start$gitstring$ps1pc_end"
+ else
+ printf -- "$printf_format" "$gitstring"
+ fi
+
+ return $exit
+}
diff --git a/contrib/contacts/.gitignore b/contrib/contacts/.gitignore
new file mode 100644
index 0000000000..f385ee643c
--- /dev/null
+++ b/contrib/contacts/.gitignore
@@ -0,0 +1,3 @@
+git-contacts.1
+git-contacts.html
+git-contacts.xml
diff --git a/contrib/contacts/Makefile b/contrib/contacts/Makefile
new file mode 100644
index 0000000000..a2990f0dcb
--- /dev/null
+++ b/contrib/contacts/Makefile
@@ -0,0 +1,71 @@
+# The default target of this Makefile is...
+all::
+
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+prefix ?= /usr/local
+gitexecdir ?= $(prefix)/libexec/git-core
+mandir ?= $(prefix)/share/man
+man1dir ?= $(mandir)/man1
+htmldir ?= $(prefix)/share/doc/git-doc
+
+../../GIT-VERSION-FILE: FORCE
+ $(MAKE) -C ../../ GIT-VERSION-FILE
+
+-include ../../GIT-VERSION-FILE
+
+# this should be set to a 'standard' bsd-type install program
+INSTALL ?= install
+RM ?= rm -f
+
+ASCIIDOC = asciidoc
+XMLTO = xmlto
+
+ifndef SHELL_PATH
+ SHELL_PATH = /bin/sh
+endif
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+
+ASCIIDOC_CONF = ../../Documentation/asciidoc.conf
+MANPAGE_XSL = ../../Documentation/manpage-normal.xsl
+
+GIT_CONTACTS := git-contacts
+
+GIT_CONTACTS_DOC := git-contacts.1
+GIT_CONTACTS_XML := git-contacts.xml
+GIT_CONTACTS_TXT := git-contacts.txt
+GIT_CONTACTS_HTML := git-contacts.html
+
+doc: $(GIT_CONTACTS_DOC) $(GIT_CONTACTS_HTML)
+
+install: $(GIT_CONTACTS)
+ $(INSTALL) -d -m 755 $(DESTDIR)$(gitexecdir)
+ $(INSTALL) -m 755 $(GIT_CONTACTS) $(DESTDIR)$(gitexecdir)
+
+install-doc: install-man install-html
+
+install-man: $(GIT_CONTACTS_DOC)
+ $(INSTALL) -d -m 755 $(DESTDIR)$(man1dir)
+ $(INSTALL) -m 644 $^ $(DESTDIR)$(man1dir)
+
+install-html: $(GIT_CONTACTS_HTML)
+ $(INSTALL) -d -m 755 $(DESTDIR)$(htmldir)
+ $(INSTALL) -m 644 $^ $(DESTDIR)$(htmldir)
+
+$(GIT_CONTACTS_DOC): $(GIT_CONTACTS_XML)
+ $(XMLTO) -m $(MANPAGE_XSL) man $^
+
+$(GIT_CONTACTS_XML): $(GIT_CONTACTS_TXT)
+ $(ASCIIDOC) -b docbook -d manpage -f $(ASCIIDOC_CONF) \
+ -agit_version=$(GIT_VERSION) $^
+
+$(GIT_CONTACTS_HTML): $(GIT_CONTACTS_TXT)
+ $(ASCIIDOC) -b xhtml11 -d manpage -f $(ASCIIDOC_CONF) \
+ -agit_version=$(GIT_VERSION) $^
+
+clean:
+ $(RM) $(GIT_CONTACTS)
+ $(RM) *.xml *.html *.1
+
+.PHONY: FORCE
diff --git a/contrib/contacts/git-contacts b/contrib/contacts/git-contacts
new file mode 100755
index 0000000000..dbe2abf277
--- /dev/null
+++ b/contrib/contacts/git-contacts
@@ -0,0 +1,203 @@
+#!/usr/bin/perl
+
+# List people who might be interested in a patch. Useful as the argument to
+# git-send-email --cc-cmd option, and in other situations.
+#
+# Usage: git contacts <file | rev-list option> ...
+
+use strict;
+use warnings;
+use IPC::Open2;
+
+my $since = '5-years-ago';
+my $min_percent = 10;
+my $labels_rx = qr/Signed-off-by|Reviewed-by|Acked-by|Cc/i;
+my %seen;
+
+sub format_contact {
+ my ($name, $email) = @_;
+ return "$name <$email>";
+}
+
+sub parse_commit {
+ my ($commit, $data) = @_;
+ my $contacts = $commit->{contacts};
+ my $inbody = 0;
+ for (split(/^/m, $data)) {
+ if (not $inbody) {
+ if (/^author ([^<>]+) <(\S+)> .+$/) {
+ $contacts->{format_contact($1, $2)} = 1;
+ } elsif (/^$/) {
+ $inbody = 1;
+ }
+ } elsif (/^$labels_rx:\s+([^<>]+)\s+<(\S+?)>$/o) {
+ $contacts->{format_contact($1, $2)} = 1;
+ }
+ }
+}
+
+sub import_commits {
+ my ($commits) = @_;
+ return unless %$commits;
+ my $pid = open2 my $reader, my $writer, qw(git cat-file --batch);
+ for my $id (keys(%$commits)) {
+ print $writer "$id\n";
+ my $line = <$reader>;
+ if ($line =~ /^([0-9a-f]{40}) commit (\d+)/) {
+ my ($cid, $len) = ($1, $2);
+ die "expected $id but got $cid\n" unless $id eq $cid;
+ my $data;
+ # cat-file emits newline after data, so read len+1
+ read $reader, $data, $len + 1;
+ parse_commit($commits->{$id}, $data);
+ }
+ }
+ close $reader;
+ close $writer;
+ waitpid($pid, 0);
+ die "git-cat-file error: $?\n" if $?;
+}
+
+sub get_blame {
+ my ($commits, $source, $from, $ranges) = @_;
+ return unless @$ranges;
+ open my $f, '-|',
+ qw(git blame --porcelain -C),
+ map({"-L$_->[0],+$_->[1]"} @$ranges),
+ '--since', $since, "$from^", '--', $source or die;
+ while (<$f>) {
+ if (/^([0-9a-f]{40}) \d+ \d+ \d+$/) {
+ my $id = $1;
+ $commits->{$id} = { id => $id, contacts => {} }
+ unless $seen{$id};
+ $seen{$id} = 1;
+ }
+ }
+ close $f;
+}
+
+sub blame_sources {
+ my ($sources, $commits) = @_;
+ for my $s (keys %$sources) {
+ for my $id (keys %{$sources->{$s}}) {
+ get_blame($commits, $s, $id, $sources->{$s}{$id});
+ }
+ }
+}
+
+sub scan_patches {
+ my ($sources, $id, $f) = @_;
+ my $source;
+ while (<$f>) {
+ if (/^From ([0-9a-f]{40}) Mon Sep 17 00:00:00 2001$/) {
+ $id = $1;
+ $seen{$id} = 1;
+ }
+ next unless $id;
+ if (m{^--- (?:a/(.+)|/dev/null)$}) {
+ $source = $1;
+ } elsif (/^@@ -(\d+)(?:,(\d+))?/ && $source) {
+ my $len = defined($2) ? $2 : 1;
+ push @{$sources->{$source}{$id}}, [$1, $len] if $len;
+ }
+ }
+}
+
+sub scan_patch_file {
+ my ($commits, $file) = @_;
+ open my $f, '<', $file or die "read failure: $file: $!\n";
+ scan_patches($commits, undef, $f);
+ close $f;
+}
+
+sub parse_rev_args {
+ my @args = @_;
+ open my $f, '-|',
+ qw(git rev-parse --revs-only --default HEAD --symbolic), @args
+ or die;
+ my @revs;
+ while (<$f>) {
+ chomp;
+ push @revs, $_;
+ }
+ close $f;
+ return @revs if scalar(@revs) != 1;
+ return "^$revs[0]", 'HEAD' unless $revs[0] =~ /^-/;
+ return $revs[0], 'HEAD';
+}
+
+sub scan_rev_args {
+ my ($commits, $args) = @_;
+ my @revs = parse_rev_args(@$args);
+ open my $f, '-|', qw(git rev-list --reverse), @revs or die;
+ while (<$f>) {
+ chomp;
+ my $id = $_;
+ $seen{$id} = 1;
+ open my $g, '-|', qw(git show -C --oneline), $id or die;
+ scan_patches($commits, $id, $g);
+ close $g;
+ }
+ close $f;
+}
+
+sub mailmap_contacts {
+ my ($contacts) = @_;
+ my %mapped;
+ my $pid = open2 my $reader, my $writer, qw(git check-mailmap --stdin);
+ for my $contact (keys(%$contacts)) {
+ print $writer "$contact\n";
+ my $canonical = <$reader>;
+ chomp $canonical;
+ $mapped{$canonical} += $contacts->{$contact};
+ }
+ close $reader;
+ close $writer;
+ waitpid($pid, 0);
+ die "git-check-mailmap error: $?\n" if $?;
+ return \%mapped;
+}
+
+if (!@ARGV) {
+ die "No input revisions or patch files\n";
+}
+
+my (@files, @rev_args);
+for (@ARGV) {
+ if (-e) {
+ push @files, $_;
+ } else {
+ push @rev_args, $_;
+ }
+}
+
+my %sources;
+for (@files) {
+ scan_patch_file(\%sources, $_);
+}
+if (@rev_args) {
+ scan_rev_args(\%sources, \@rev_args)
+}
+
+my $toplevel = `git rev-parse --show-toplevel`;
+chomp $toplevel;
+chdir($toplevel) or die "chdir failure: $toplevel: $!\n";
+
+my %commits;
+blame_sources(\%sources, \%commits);
+import_commits(\%commits);
+
+my $contacts = {};
+for my $commit (values %commits) {
+ for my $contact (keys %{$commit->{contacts}}) {
+ $contacts->{$contact}++;
+ }
+}
+$contacts = mailmap_contacts($contacts);
+
+my $ncommits = scalar(keys %commits);
+for my $contact (keys %$contacts) {
+ my $percent = $contacts->{$contact} * 100 / $ncommits;
+ next if $percent < $min_percent;
+ print "$contact\n";
+}
diff --git a/contrib/contacts/git-contacts.txt b/contrib/contacts/git-contacts.txt
new file mode 100644
index 0000000000..dd914d1261
--- /dev/null
+++ b/contrib/contacts/git-contacts.txt
@@ -0,0 +1,94 @@
+git-contacts(1)
+===============
+
+NAME
+----
+git-contacts - List people who might be interested in a set of changes
+
+
+SYNOPSIS
+--------
+[verse]
+'git contacts' (<patch>|<range>|<rev>)...
+
+
+DESCRIPTION
+-----------
+
+Given a set of changes, specified as patch files or revisions, determine people
+who might be interested in those changes. This is done by consulting the
+history of each patch or revision hunk to find people mentioned by commits
+which touched the lines of files under consideration.
+
+Input consists of one or more patch files or revision arguments. A revision
+argument can be a range or a single `<rev>` which is interpreted as
+`<rev>..HEAD`, thus the same revision arguments are accepted as for
+linkgit:git-format-patch[1]. Patch files and revision arguments can be combined
+in the same invocation.
+
+This command can be useful for determining the list of people with whom to
+discuss proposed changes, or for finding the list of recipients to Cc: when
+submitting a patch series via `git send-email`. For the latter case, `git
+contacts` can be used as the argument to `git send-email`'s `--cc-cmd` option.
+
+
+DISCUSSION
+----------
+
+`git blame` is invoked for each hunk in a patch file or revision. For each
+commit mentioned by `git blame`, the commit message is consulted for people who
+authored, reviewed, signed, acknowledged, or were Cc:'d. Once the list of
+participants is known, each person's relevance is computed by considering how
+many commits mentioned that person compared with the total number of commits
+under consideration. The final output consists only of participants who exceed
+a minimum threshold of participation.
+
+
+OUTPUT
+------
+
+For each person of interest, a single line is output, terminated by a newline.
+If the person's name is known, ``Name $$<user@host>$$'' is printed; otherwise
+only ``$$<user@host>$$'' is printed.
+
+
+EXAMPLES
+--------
+
+* Consult patch files:
++
+------------
+$ git contacts feature/*.patch
+------------
+
+* Revision range:
++
+------------
+$ git contacts R1..R2
+------------
+
+* From a single revision to `HEAD`:
++
+------------
+$ git contacts origin
+------------
+
+* Helper for `git send-email`:
++
+------------
+$ git send-email --cc-cmd='git contacts' feature/*.patch
+------------
+
+
+LIMITATIONS
+-----------
+
+Several conditions controlling a person's significance are currently
+hard-coded, such as minimum participation level (10%), blame date-limiting (5
+years), and `-C` level for detecting moved and copied lines (a single `-C`). In
+the future, these conditions may become configurable.
+
+
+GIT
+---
+Part of the linkgit:git[1] suite
diff --git a/contrib/continuous/cidaemon b/contrib/continuous/cidaemon
deleted file mode 100644
index 4009a151de..0000000000
--- a/contrib/continuous/cidaemon
+++ /dev/null
@@ -1,503 +0,0 @@
-#!/usr/bin/perl
-#
-# A daemon that waits for update events sent by its companion
-# post-receive-cinotify hook, checks out a new copy of source,
-# compiles it, and emails the guilty parties if the compile
-# (and optionally test suite) fails.
-#
-# To use this daemon, configure it and run it. It will disconnect
-# from your terminal and fork into the background. The daemon must
-# have local filesystem access to the source repositories, as it
-# uses objects/info/alternates to avoid copying objects.
-#
-# Add its companion post-receive-cinotify hook as the post-receive
-# hook to each repository that the daemon should monitor. Yes, a
-# single daemon can monitor more than one repository.
-#
-# To use multiple daemons on the same system, give them each a
-# unique queue file and tmpdir.
-#
-# Global Config
-# -------------
-# Reads from a Git style configuration file. This will be
-# ~/.gitconfig by default but can be overridden by setting
-# the GIT_CONFIG_FILE environment variable before starting.
-#
-# cidaemon.smtpHost
-# Hostname of the SMTP server the daemon will send email
-# through. Defaults to 'localhost'.
-#
-# cidaemon.smtpUser
-# Username to authenticate to the SMTP server as. This
-# variable is optional; if it is not supplied then no
-# authentication will be performed.
-#
-# cidaemon.smtpPassword
-# Password to authenticate to the SMTP server as. This
-# variable is optional. If not supplied but smtpUser was,
-# the daemon prompts for the password before forking into
-# the background.
-#
-# cidaemon.smtpAuth
-# Type of authentication to perform with the SMTP server.
-# If set to 'login' and smtpUser was defined, this will
-# use the AUTH LOGIN command, which is suitable for use
-# with at least one version of Microsoft Exchange Server.
-# If not set the daemon will use whatever auth methods
-# are supported by your version of Net::SMTP.
-#
-# cidaemon.email
-# Email address that daemon generated emails will be sent
-# from. This should be a useful email address within your
-# organization. Required.
-#
-# cidaemon.name
-# Human friendly name that the daemon will send emails as.
-# Defaults to 'cidaemon'.
-#
-# cidaemon.scanDelay
-# Number of seconds to sleep between polls of the queue file.
-# Defaults to 60.
-#
-# cidaemon.recentCache
-# Number of recent commit SHA-1s per repository to cache and
-# skip building if they appear again. This is useful to avoid
-# rebuilding the same commit multiple times just because it was
-# pushed into more than one branch. Defaults to 100.
-#
-# cidaemon.tmpdir
-# Scratch directory to create the builds within. The daemon
-# makes a new subdirectory for each build, then deletes it when
-# the build has finished. The pid file is also placed here.
-# Defaults to '/tmp'.
-#
-# cidaemon.queue
-# Path to the queue file that the post-receive-cinotify hook
-# appends events to. This file is polled by the daemon. It
-# must not be on an NFS mount (uses flock). Required.
-#
-# cidaemon.nocc
-# Perl regex patterns to match against author and committer
-# lines. If a pattern matches, that author or committer will
-# not be notified of a build failure.
-#
-# Per Repository Config
-# ----------------------
-# Read from the source repository's config file.
-#
-# builder.command
-# Shell command to execute the build. This command must
-# return 0 on "success" and non-zero on failure. If you
-# also want to run a test suite, make sure your command
-# does that too. Required.
-#
-# builder.queue
-# Queue file to notify the cidaemon through. Should match
-# cidaemon.queue. If not set the hook will not notify the
-# cidaemon.
-#
-# builder.skip
-# Perl regex patterns of refs that should not be sent to
-# cidaemon. Updates of these refs will be ignored.
-#
-# builder.newBranchBase
-# Glob patterns of refs that should be used to form the
-# 'old' revions of a newly created ref. This should set
-# to be globs that match your 'mainline' branches. This
-# way a build failure of a brand new topic branch does not
-# attempt to email everyone since the beginning of time;
-# instead it only emails those authors of commits not in
-# these 'mainline' branches.
-
-local $ENV{PATH} = join ':', qw(
- /opt/git/bin
- /usr/bin
- /bin
- );
-
-use strict;
-use warnings;
-use FindBin qw($RealBin);
-use File::Spec;
-use lib File::Spec->catfile($RealBin, '..', 'perl5');
-use Storable qw(retrieve nstore);
-use Fcntl ':flock';
-use POSIX qw(strftime);
-use Getopt::Long qw(:config no_auto_abbrev auto_help);
-
-sub git_config ($;$)
-{
- my $var = shift;
- my $required = shift || 0;
- local *GIT;
- open GIT, '-|','git','config','--get',$var;
- my $r = <GIT>;
- chop $r if $r;
- close GIT;
- die "error: $var not set.\n" if ($required && !$r);
- return $r;
-}
-
-package EXCHANGE_NET_SMTP;
-
-# Microsoft Exchange Server requires an 'AUTH LOGIN'
-# style of authentication. This is different from
-# the default supported by Net::SMTP so we subclass
-# and override the auth method to support that.
-
-use Net::SMTP;
-use Net::Cmd;
-use MIME::Base64 qw(encode_base64);
-our @ISA = qw(Net::SMTP);
-our $auth_type = ::git_config 'cidaemon.smtpAuth';
-
-sub new
-{
- my $self = shift;
- my $type = ref($self) || $self;
- $type->SUPER::new(@_);
-}
-
-sub auth
-{
- my $self = shift;
- return $self->SUPER::auth(@_) unless $auth_type eq 'login';
-
- my $user = encode_base64 shift, '';
- my $pass = encode_base64 shift, '';
- return 0 unless CMD_MORE == $self->command("AUTH LOGIN")->response;
- return 0 unless CMD_MORE == $self->command($user)->response;
- CMD_OK == $self->command($pass)->response;
-}
-
-package main;
-
-my ($debug_flag, %recent);
-
-my $ex_host = git_config('cidaemon.smtpHost') || 'localhost';
-my $ex_user = git_config('cidaemon.smtpUser');
-my $ex_pass = git_config('cidaemon.smtpPassword');
-
-my $ex_from_addr = git_config('cidaemon.email', 1);
-my $ex_from_name = git_config('cidaemon.name') || 'cidaemon';
-
-my $scan_delay = git_config('cidaemon.scanDelay') || 60;
-my $recent_size = git_config('cidaemon.recentCache') || 100;
-my $tmpdir = git_config('cidaemon.tmpdir') || '/tmp';
-my $queue_name = git_config('cidaemon.queue', 1);
-my $queue_lock = "$queue_name.lock";
-
-my @nocc_list;
-open GIT,'git config --get-all cidaemon.nocc|';
-while (<GIT>) {
- chop;
- push @nocc_list, $_;
-}
-close GIT;
-
-sub nocc_author ($)
-{
- local $_ = shift;
- foreach my $pat (@nocc_list) {
- return 1 if /$pat/;
- }
- 0;
-}
-
-sub input_echo ($)
-{
- my $prompt = shift;
-
- local $| = 1;
- print $prompt;
- my $input = <STDIN>;
- chop $input;
- return $input;
-}
-
-sub input_noecho ($)
-{
- my $prompt = shift;
-
- my $end = sub {system('stty','echo');print "\n";exit};
- local $SIG{TERM} = $end;
- local $SIG{INT} = $end;
- system('stty','-echo');
-
- local $| = 1;
- print $prompt;
- my $input = <STDIN>;
- system('stty','echo');
- print "\n";
- chop $input;
- return $input;
-}
-
-sub rfc2822_date ()
-{
- strftime("%a, %d %b %Y %H:%M:%S %Z", localtime);
-}
-
-sub send_email ($$$)
-{
- my ($subj, $body, $to) = @_;
- my $now = rfc2822_date;
- my $to_str = '';
- my @rcpt_to;
- foreach (@$to) {
- my $s = $_;
- $s =~ s/^/"/;
- $s =~ s/(\s+<)/"$1/;
- $to_str .= ', ' if $to_str;
- $to_str .= $s;
- push @rcpt_to, $1 if $s =~ /<(.*)>/;
- }
- die "Nobody to send to.\n" unless @rcpt_to;
- my $msg = <<EOF;
-From: "$ex_from_name" <$ex_from_addr>
-To: $to_str
-Date: $now
-Subject: $subj
-
-$body
-EOF
-
- my $smtp = EXCHANGE_NET_SMTP->new(Host => $ex_host)
- or die "Cannot connect to $ex_host: $!\n";
- if ($ex_user && $ex_pass) {
- $smtp->auth($ex_user,$ex_pass)
- or die "$ex_host rejected $ex_user\n";
- }
- $smtp->mail($ex_from_addr)
- or die "$ex_host rejected $ex_from_addr\n";
- scalar($smtp->recipient(@rcpt_to, { SkipBad => 1 }))
- or die "$ex_host did not accept any addresses.\n";
- $smtp->data($msg)
- or die "$ex_host rejected message data\n";
- $smtp->quit;
-}
-
-sub pop_queue ()
-{
- open LOCK, ">$queue_lock" or die "Can't open $queue_lock: $!";
- flock LOCK, LOCK_EX;
-
- my $queue = -f $queue_name ? retrieve $queue_name : [];
- my $ent = shift @$queue;
- nstore $queue, $queue_name;
-
- flock LOCK, LOCK_UN;
- close LOCK;
- $ent;
-}
-
-sub git_exec (@)
-{
- system('git',@_) == 0 or die "Cannot git " . join(' ', @_) . "\n";
-}
-
-sub git_val (@)
-{
- open(C, '-|','git',@_);
- my $r = <C>;
- chop $r if $r;
- close C;
- $r;
-}
-
-sub do_build ($$)
-{
- my ($git_dir, $new) = @_;
-
- my $tmp = File::Spec->catfile($tmpdir, "builder$$");
- system('rm','-rf',$tmp) == 0 or die "Cannot clear $tmp\n";
- die "Cannot clear $tmp.\n" if -e $tmp;
-
- my $result = 1;
- eval {
- my $command;
- {
- local $ENV{GIT_DIR} = $git_dir;
- $command = git_val 'config','builder.command';
- }
- die "No builder.command for $git_dir.\n" unless $command;
-
- git_exec 'clone','-n','-l','-s',$git_dir,$tmp;
- chmod 0700, $tmp or die "Cannot lock $tmp\n";
- chdir $tmp or die "Cannot enter $tmp\n";
-
- git_exec 'update-ref','HEAD',$new;
- git_exec 'read-tree','-m','-u','HEAD','HEAD';
- system $command;
- if ($? == -1) {
- print STDERR "failed to execute '$command': $!\n";
- $result = 1;
- } elsif ($? & 127) {
- my $sig = $? & 127;
- print STDERR "'$command' died from signal $sig\n";
- $result = 1;
- } else {
- my $r = $? >> 8;
- print STDERR "'$command' exited with $r\n" if $r;
- $result = $r;
- }
- };
- if ($@) {
- $result = 2;
- print STDERR "$@\n";
- }
-
- chdir '/';
- system('rm','-rf',$tmp);
- rmdir $tmp;
- $result;
-}
-
-sub build_failed ($$$$$)
-{
- my ($git_dir, $ref, $old, $new, $msg) = @_;
-
- $git_dir =~ m,/([^/]+)$,;
- my $repo_name = $1;
- $ref =~ s,^refs/(heads|tags)/,,;
-
- my %authors;
- my $shortlog;
- my $revstr;
- {
- local $ENV{GIT_DIR} = $git_dir;
- my @revs = ($new);
- push @revs, '--not', @$old if @$old;
- open LOG,'-|','git','rev-list','--pretty=raw',@revs;
- while (<LOG>) {
- if (s/^(author|committer) //) {
- chomp;
- s/>.*$/>/;
- $authors{$_} = 1 unless nocc_author $_;
- }
- }
- close LOG;
- open LOG,'-|','git','shortlog',@revs;
- $shortlog .= $_ while <LOG>;
- close LOG;
- $revstr = join(' ', @revs);
- }
-
- my @to = sort keys %authors;
- unless (@to) {
- print STDERR "error: No authors in $revstr\n";
- return;
- }
-
- my $subject = "[$repo_name] $ref : Build Failed";
- my $body = <<EOF;
-Project: $git_dir
-Branch: $ref
-Commits: $revstr
-
-$shortlog
-Build Output:
---------------------------------------------------------------
-$msg
-EOF
- send_email($subject, $body, \@to);
-}
-
-sub run_build ($$$$)
-{
- my ($git_dir, $ref, $old, $new) = @_;
-
- if ($debug_flag) {
- my @revs = ($new);
- push @revs, '--not', @$old if @$old;
- print "BUILDING $git_dir\n";
- print " BRANCH: $ref\n";
- print " COMMITS: ", join(' ', @revs), "\n";
- }
-
- local(*R, *W);
- pipe R, W or die "cannot pipe builder: $!";
-
- my $builder = fork();
- if (!defined $builder) {
- die "cannot fork builder: $!";
- } elsif (0 == $builder) {
- close R;
- close STDIN;open(STDIN, '/dev/null');
- open(STDOUT, '>&W');
- open(STDERR, '>&W');
- exit do_build $git_dir, $new;
- } else {
- close W;
- my $out = '';
- $out .= $_ while <R>;
- close R;
- waitpid $builder, 0;
- build_failed $git_dir, $ref, $old, $new, $out if $?;
- }
-
- print "DONE\n\n" if $debug_flag;
-}
-
-sub daemon_loop ()
-{
- my $run = 1;
- my $stop_sub = sub {$run = 0};
- $SIG{HUP} = $stop_sub;
- $SIG{INT} = $stop_sub;
- $SIG{TERM} = $stop_sub;
-
- mkdir $tmpdir, 0755;
- my $pidfile = File::Spec->catfile($tmpdir, "cidaemon.pid");
- open(O, ">$pidfile"); print O "$$\n"; close O;
-
- while ($run) {
- my $ent = pop_queue;
- if ($ent) {
- my ($git_dir, $ref, $old, $new) = @$ent;
-
- $ent = $recent{$git_dir};
- $recent{$git_dir} = $ent = [[], {}] unless $ent;
- my ($rec_arr, $rec_hash) = @$ent;
- next if $rec_hash->{$new}++;
- while (@$rec_arr >= $recent_size) {
- my $to_kill = shift @$rec_arr;
- delete $rec_hash->{$to_kill};
- }
- push @$rec_arr, $new;
-
- run_build $git_dir, $ref, $old, $new;
- } else {
- sleep $scan_delay;
- }
- }
-
- unlink $pidfile;
-}
-
-$debug_flag = 0;
-GetOptions(
- 'debug|d' => \$debug_flag,
- 'smtp-user=s' => \$ex_user,
-) or die "usage: $0 [--debug] [--smtp-user=user]\n";
-
-$ex_pass = input_noecho("$ex_user SMTP password: ")
- if ($ex_user && !$ex_pass);
-
-if ($debug_flag) {
- daemon_loop;
- exit 0;
-}
-
-my $daemon = fork();
-if (!defined $daemon) {
- die "cannot fork daemon: $!";
-} elsif (0 == $daemon) {
- close STDIN;open(STDIN, '/dev/null');
- close STDOUT;open(STDOUT, '>/dev/null');
- close STDERR;open(STDERR, '>/dev/null');
- daemon_loop;
- exit 0;
-} else {
- print "Daemon $daemon running in the background.\n";
-}
diff --git a/contrib/continuous/post-receive-cinotify b/contrib/continuous/post-receive-cinotify
deleted file mode 100644
index b8f5a609af..0000000000
--- a/contrib/continuous/post-receive-cinotify
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/perl
-#
-# A hook that notifies its companion cidaemon through a simple
-# queue file that a ref has been updated via a push (actually
-# by a receive-pack running on the server).
-#
-# See cidaemon for per-repository configuration details.
-#
-# To use this hook, add it as the post-receive hook, make it
-# executable, and set its configuration options.
-#
-
-local $ENV{PATH} = '/opt/git/bin';
-
-use strict;
-use warnings;
-use File::Spec;
-use Storable qw(retrieve nstore);
-use Fcntl ':flock';
-
-my $git_dir = File::Spec->rel2abs($ENV{GIT_DIR});
-my $queue_name = `git config --get builder.queue`;chop $queue_name;
-$queue_name =~ m,^([^\s]+)$,; $queue_name = $1; # untaint
-unless ($queue_name) {
- 1 while <STDIN>;
- print STDERR "\nerror: builder.queue not set. Not enqueing.\n\n";
- exit;
-}
-my $queue_lock = "$queue_name.lock";
-
-my @skip;
-open S, "git config --get-all builder.skip|";
-while (<S>) {
- chop;
- push @skip, $_;
-}
-close S;
-
-my @new_branch_base;
-open S, "git config --get-all builder.newBranchBase|";
-while (<S>) {
- chop;
- push @new_branch_base, $_;
-}
-close S;
-
-sub skip ($)
-{
- local $_ = shift;
- foreach my $p (@skip) {
- return 1 if /^$p/;
- }
- 0;
-}
-
-open LOCK, ">$queue_lock" or die "Can't open $queue_lock: $!";
-flock LOCK, LOCK_EX;
-
-my $queue = -f $queue_name ? retrieve $queue_name : [];
-my %existing;
-foreach my $r (@$queue) {
- my ($gd, $ref) = @$r;
- $existing{$gd}{$ref} = $r;
-}
-
-my @new_branch_commits;
-my $loaded_new_branch_commits = 0;
-
-while (<STDIN>) {
- chop;
- my ($old, $new, $ref) = split / /, $_, 3;
-
- next if $old eq $new;
- next if $new =~ /^0{40}$/;
- next if skip $ref;
-
- my $r = $existing{$git_dir}{$ref};
- if ($r) {
- $r->[3] = $new;
- } else {
- if ($old =~ /^0{40}$/) {
- if (!$loaded_new_branch_commits && @new_branch_base) {
- open M,'-|','git','show-ref',@new_branch_base;
- while (<M>) {
- ($_) = split / /, $_;
- push @new_branch_commits, $_;
- }
- close M;
- $loaded_new_branch_commits = 1;
- }
- $old = [@new_branch_commits];
- } else {
- $old = [$old];
- }
-
- $r = [$git_dir, $ref, $old, $new];
- $existing{$git_dir}{$ref} = $r;
- push @$queue, $r;
- }
-}
-nstore $queue, $queue_name;
-
-flock LOCK, LOCK_UN;
-close LOCK;
diff --git a/contrib/convert-grafts-to-replace-refs.sh b/contrib/convert-grafts-to-replace-refs.sh
new file mode 100755
index 0000000000..0cbc917b8c
--- /dev/null
+++ b/contrib/convert-grafts-to-replace-refs.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+# You should execute this script in the repository where you
+# want to convert grafts to replace refs.
+
+GRAFTS_FILE="${GIT_DIR:-.git}/info/grafts"
+
+. $(git --exec-path)/git-sh-setup
+
+test -f "$GRAFTS_FILE" || die "Could not find graft file: '$GRAFTS_FILE'"
+
+grep '^[^# ]' "$GRAFTS_FILE" |
+while read definition
+do
+ if test -n "$definition"
+ then
+ echo "Converting: $definition"
+ git replace --graft $definition ||
+ die "Conversion failed for: $definition"
+ fi
+done
+
+mv "$GRAFTS_FILE" "$GRAFTS_FILE.bak" ||
+ die "Could not rename '$GRAFTS_FILE' to '$GRAFTS_FILE.bak'"
+
+echo "Success!"
+echo "All the grafts in '$GRAFTS_FILE' have been converted to replace refs!"
+echo "The grafts file '$GRAFTS_FILE' has been renamed: '$GRAFTS_FILE.bak'"
diff --git a/contrib/convert-objects/git-convert-objects.txt b/contrib/convert-objects/git-convert-objects.txt
index 9718abf86d..f871880cfb 100644
--- a/contrib/convert-objects/git-convert-objects.txt
+++ b/contrib/convert-objects/git-convert-objects.txt
@@ -8,6 +8,7 @@ git-convert-objects - Converts old-style git repository
SYNOPSIS
--------
+[verse]
'git-convert-objects'
DESCRIPTION
@@ -25,4 +26,4 @@ Documentation by David Greaves, Junio C Hamano and the git-list <git@vger.kernel
GIT
---
-Part of the gitlink:git[7] suite
+Part of the linkgit:git[7] suite
diff --git a/contrib/credential/gnome-keyring/.gitignore b/contrib/credential/gnome-keyring/.gitignore
new file mode 100644
index 0000000000..88d8fcdbce
--- /dev/null
+++ b/contrib/credential/gnome-keyring/.gitignore
@@ -0,0 +1 @@
+git-credential-gnome-keyring
diff --git a/contrib/credential/gnome-keyring/Makefile b/contrib/credential/gnome-keyring/Makefile
new file mode 100644
index 0000000000..c3c7c98aa1
--- /dev/null
+++ b/contrib/credential/gnome-keyring/Makefile
@@ -0,0 +1,24 @@
+MAIN:=git-credential-gnome-keyring
+all:: $(MAIN)
+
+CC = gcc
+RM = rm -f
+CFLAGS = -g -O2 -Wall
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+INCS:=$(shell pkg-config --cflags gnome-keyring-1 glib-2.0)
+LIBS:=$(shell pkg-config --libs gnome-keyring-1 glib-2.0)
+
+SRCS:=$(MAIN).c
+OBJS:=$(SRCS:.c=.o)
+
+%.o: %.c
+ $(CC) $(CFLAGS) $(CPPFLAGS) $(INCS) -o $@ -c $<
+
+$(MAIN): $(OBJS)
+ $(CC) -o $@ $(LDFLAGS) $^ $(LIBS)
+
+clean:
+ @$(RM) $(MAIN) $(OBJS)
diff --git a/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c b/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c
new file mode 100644
index 0000000000..2a317fca44
--- /dev/null
+++ b/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c
@@ -0,0 +1,471 @@
+/*
+ * Copyright (C) 2011 John Szakmeister <john@szakmeister.net>
+ * 2012 Philipp A. Hartmann <pah@qo.cx>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+/*
+ * Credits:
+ * - GNOME Keyring API handling originally written by John Szakmeister
+ * - ported to credential helper API by Philipp A. Hartmann
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <glib.h>
+#include <gnome-keyring.h>
+
+#ifdef GNOME_KEYRING_DEFAULT
+
+ /* Modern gnome-keyring */
+
+#include <gnome-keyring-memory.h>
+
+#else
+
+ /*
+ * Support ancient gnome-keyring, circ. RHEL 5.X.
+ * GNOME_KEYRING_DEFAULT seems to have been introduced with Gnome 2.22,
+ * and the other features roughly around Gnome 2.20, 6 months before.
+ * Ubuntu 8.04 used Gnome 2.22 (I think). Not sure any distro used 2.20.
+ * So the existence/non-existence of GNOME_KEYRING_DEFAULT seems like
+ * a decent thing to use as an indicator.
+ */
+
+#define GNOME_KEYRING_DEFAULT NULL
+
+/*
+ * ancient gnome-keyring returns DENIED when an entry is not found.
+ * Setting NO_MATCH to DENIED will prevent us from reporting DENIED
+ * errors during get and erase operations, but we will still report
+ * DENIED errors during a store.
+ */
+#define GNOME_KEYRING_RESULT_NO_MATCH GNOME_KEYRING_RESULT_DENIED
+
+#define gnome_keyring_memory_alloc g_malloc
+#define gnome_keyring_memory_free gnome_keyring_free_password
+#define gnome_keyring_memory_strdup g_strdup
+
+static const char *gnome_keyring_result_to_message(GnomeKeyringResult result)
+{
+ switch (result) {
+ case GNOME_KEYRING_RESULT_OK:
+ return "OK";
+ case GNOME_KEYRING_RESULT_DENIED:
+ return "Denied";
+ case GNOME_KEYRING_RESULT_NO_KEYRING_DAEMON:
+ return "No Keyring Daemon";
+ case GNOME_KEYRING_RESULT_ALREADY_UNLOCKED:
+ return "Already UnLocked";
+ case GNOME_KEYRING_RESULT_NO_SUCH_KEYRING:
+ return "No Such Keyring";
+ case GNOME_KEYRING_RESULT_BAD_ARGUMENTS:
+ return "Bad Arguments";
+ case GNOME_KEYRING_RESULT_IO_ERROR:
+ return "IO Error";
+ case GNOME_KEYRING_RESULT_CANCELLED:
+ return "Cancelled";
+ case GNOME_KEYRING_RESULT_ALREADY_EXISTS:
+ return "Already Exists";
+ default:
+ return "Unknown Error";
+ }
+}
+
+/*
+ * Support really ancient gnome-keyring, circ. RHEL 4.X.
+ * Just a guess for the Glib version. Glib 2.8 was roughly Gnome 2.12 ?
+ * Which was released with gnome-keyring 0.4.3 ??
+ */
+#if GLIB_MAJOR_VERSION == 2 && GLIB_MINOR_VERSION < 8
+
+static void gnome_keyring_done_cb(GnomeKeyringResult result, gpointer user_data)
+{
+ gpointer *data = (gpointer *)user_data;
+ int *done = (int *)data[0];
+ GnomeKeyringResult *r = (GnomeKeyringResult *)data[1];
+
+ *r = result;
+ *done = 1;
+}
+
+static void wait_for_request_completion(int *done)
+{
+ GMainContext *mc = g_main_context_default();
+ while (!*done)
+ g_main_context_iteration(mc, TRUE);
+}
+
+static GnomeKeyringResult gnome_keyring_item_delete_sync(const char *keyring, guint32 id)
+{
+ int done = 0;
+ GnomeKeyringResult result;
+ gpointer data[] = { &done, &result };
+
+ gnome_keyring_item_delete(keyring, id, gnome_keyring_done_cb, data,
+ NULL);
+
+ wait_for_request_completion(&done);
+
+ return result;
+}
+
+#endif
+#endif
+
+/*
+ * This credential struct and API is simplified from git's credential.{h,c}
+ */
+struct credential {
+ char *protocol;
+ char *host;
+ unsigned short port;
+ char *path;
+ char *username;
+ char *password;
+};
+
+#define CREDENTIAL_INIT { NULL, NULL, 0, NULL, NULL, NULL }
+
+typedef int (*credential_op_cb)(struct credential *);
+
+struct credential_operation {
+ char *name;
+ credential_op_cb op;
+};
+
+#define CREDENTIAL_OP_END { NULL, NULL }
+
+/* ----------------- GNOME Keyring functions ----------------- */
+
+/* create a special keyring option string, if path is given */
+static char *keyring_object(struct credential *c)
+{
+ if (!c->path)
+ return NULL;
+
+ if (c->port)
+ return g_strdup_printf("%s:%hd/%s", c->host, c->port, c->path);
+
+ return g_strdup_printf("%s/%s", c->host, c->path);
+}
+
+static int keyring_get(struct credential *c)
+{
+ char *object = NULL;
+ GList *entries;
+ GnomeKeyringNetworkPasswordData *password_data;
+ GnomeKeyringResult result;
+
+ if (!c->protocol || !(c->host || c->path))
+ return EXIT_FAILURE;
+
+ object = keyring_object(c);
+
+ result = gnome_keyring_find_network_password_sync(
+ c->username,
+ NULL /* domain */,
+ c->host,
+ object,
+ c->protocol,
+ NULL /* authtype */,
+ c->port,
+ &entries);
+
+ g_free(object);
+
+ if (result == GNOME_KEYRING_RESULT_NO_MATCH)
+ return EXIT_SUCCESS;
+
+ if (result == GNOME_KEYRING_RESULT_CANCELLED)
+ return EXIT_SUCCESS;
+
+ if (result != GNOME_KEYRING_RESULT_OK) {
+ g_critical("%s", gnome_keyring_result_to_message(result));
+ return EXIT_FAILURE;
+ }
+
+ /* pick the first one from the list */
+ password_data = (GnomeKeyringNetworkPasswordData *)entries->data;
+
+ gnome_keyring_memory_free(c->password);
+ c->password = gnome_keyring_memory_strdup(password_data->password);
+
+ if (!c->username)
+ c->username = g_strdup(password_data->user);
+
+ gnome_keyring_network_password_list_free(entries);
+
+ return EXIT_SUCCESS;
+}
+
+
+static int keyring_store(struct credential *c)
+{
+ guint32 item_id;
+ char *object = NULL;
+ GnomeKeyringResult result;
+
+ /*
+ * Sanity check that what we are storing is actually sensible.
+ * In particular, we can't make a URL without a protocol field.
+ * Without either a host or pathname (depending on the scheme),
+ * we have no primary key. And without a username and password,
+ * we are not actually storing a credential.
+ */
+ if (!c->protocol || !(c->host || c->path) ||
+ !c->username || !c->password)
+ return EXIT_FAILURE;
+
+ object = keyring_object(c);
+
+ result = gnome_keyring_set_network_password_sync(
+ GNOME_KEYRING_DEFAULT,
+ c->username,
+ NULL /* domain */,
+ c->host,
+ object,
+ c->protocol,
+ NULL /* authtype */,
+ c->port,
+ c->password,
+ &item_id);
+
+ g_free(object);
+
+ if (result != GNOME_KEYRING_RESULT_OK &&
+ result != GNOME_KEYRING_RESULT_CANCELLED) {
+ g_critical("%s", gnome_keyring_result_to_message(result));
+ return EXIT_FAILURE;
+ }
+
+ return EXIT_SUCCESS;
+}
+
+static int keyring_erase(struct credential *c)
+{
+ char *object = NULL;
+ GList *entries;
+ GnomeKeyringNetworkPasswordData *password_data;
+ GnomeKeyringResult result;
+
+ /*
+ * Sanity check that we actually have something to match
+ * against. The input we get is a restrictive pattern,
+ * so technically a blank credential means "erase everything".
+ * But it is too easy to accidentally send this, since it is equivalent
+ * to empty input. So explicitly disallow it, and require that the
+ * pattern have some actual content to match.
+ */
+ if (!c->protocol && !c->host && !c->path && !c->username)
+ return EXIT_FAILURE;
+
+ object = keyring_object(c);
+
+ result = gnome_keyring_find_network_password_sync(
+ c->username,
+ NULL /* domain */,
+ c->host,
+ object,
+ c->protocol,
+ NULL /* authtype */,
+ c->port,
+ &entries);
+
+ g_free(object);
+
+ if (result == GNOME_KEYRING_RESULT_NO_MATCH)
+ return EXIT_SUCCESS;
+
+ if (result == GNOME_KEYRING_RESULT_CANCELLED)
+ return EXIT_SUCCESS;
+
+ if (result != GNOME_KEYRING_RESULT_OK) {
+ g_critical("%s", gnome_keyring_result_to_message(result));
+ return EXIT_FAILURE;
+ }
+
+ /* pick the first one from the list (delete all matches?) */
+ password_data = (GnomeKeyringNetworkPasswordData *)entries->data;
+
+ result = gnome_keyring_item_delete_sync(
+ password_data->keyring, password_data->item_id);
+
+ gnome_keyring_network_password_list_free(entries);
+
+ if (result != GNOME_KEYRING_RESULT_OK) {
+ g_critical("%s", gnome_keyring_result_to_message(result));
+ return EXIT_FAILURE;
+ }
+
+ return EXIT_SUCCESS;
+}
+
+/*
+ * Table with helper operation callbacks, used by generic
+ * credential helper main function.
+ */
+static struct credential_operation const credential_helper_ops[] = {
+ { "get", keyring_get },
+ { "store", keyring_store },
+ { "erase", keyring_erase },
+ CREDENTIAL_OP_END
+};
+
+/* ------------------ credential functions ------------------ */
+
+static void credential_init(struct credential *c)
+{
+ memset(c, 0, sizeof(*c));
+}
+
+static void credential_clear(struct credential *c)
+{
+ g_free(c->protocol);
+ g_free(c->host);
+ g_free(c->path);
+ g_free(c->username);
+ gnome_keyring_memory_free(c->password);
+
+ credential_init(c);
+}
+
+static int credential_read(struct credential *c)
+{
+ char *buf;
+ size_t line_len;
+ char *key;
+ char *value;
+
+ key = buf = gnome_keyring_memory_alloc(1024);
+
+ while (fgets(buf, 1024, stdin)) {
+ line_len = strlen(buf);
+
+ if (line_len && buf[line_len-1] == '\n')
+ buf[--line_len] = '\0';
+
+ if (!line_len)
+ break;
+
+ value = strchr(buf, '=');
+ if (!value) {
+ g_warning("invalid credential line: %s", key);
+ gnome_keyring_memory_free(buf);
+ return -1;
+ }
+ *value++ = '\0';
+
+ if (!strcmp(key, "protocol")) {
+ g_free(c->protocol);
+ c->protocol = g_strdup(value);
+ } else if (!strcmp(key, "host")) {
+ g_free(c->host);
+ c->host = g_strdup(value);
+ value = strrchr(c->host, ':');
+ if (value) {
+ *value++ = '\0';
+ c->port = atoi(value);
+ }
+ } else if (!strcmp(key, "path")) {
+ g_free(c->path);
+ c->path = g_strdup(value);
+ } else if (!strcmp(key, "username")) {
+ g_free(c->username);
+ c->username = g_strdup(value);
+ } else if (!strcmp(key, "password")) {
+ gnome_keyring_memory_free(c->password);
+ c->password = gnome_keyring_memory_strdup(value);
+ while (*value)
+ *value++ = '\0';
+ }
+ /*
+ * Ignore other lines; we don't know what they mean, but
+ * this future-proofs us when later versions of git do
+ * learn new lines, and the helpers are updated to match.
+ */
+ }
+
+ gnome_keyring_memory_free(buf);
+
+ return 0;
+}
+
+static void credential_write_item(FILE *fp, const char *key, const char *value)
+{
+ if (!value)
+ return;
+ fprintf(fp, "%s=%s\n", key, value);
+}
+
+static void credential_write(const struct credential *c)
+{
+ /* only write username/password, if set */
+ credential_write_item(stdout, "username", c->username);
+ credential_write_item(stdout, "password", c->password);
+}
+
+static void usage(const char *name)
+{
+ struct credential_operation const *try_op = credential_helper_ops;
+ const char *basename = strrchr(name, '/');
+
+ basename = (basename) ? basename + 1 : name;
+ fprintf(stderr, "usage: %s <", basename);
+ while (try_op->name) {
+ fprintf(stderr, "%s", (try_op++)->name);
+ if (try_op->name)
+ fprintf(stderr, "%s", "|");
+ }
+ fprintf(stderr, "%s", ">\n");
+}
+
+int main(int argc, char *argv[])
+{
+ int ret = EXIT_SUCCESS;
+
+ struct credential_operation const *try_op = credential_helper_ops;
+ struct credential cred = CREDENTIAL_INIT;
+
+ if (!argv[1]) {
+ usage(argv[0]);
+ exit(EXIT_FAILURE);
+ }
+
+ g_set_application_name("Git Credential Helper");
+
+ /* lookup operation callback */
+ while (try_op->name && strcmp(argv[1], try_op->name))
+ try_op++;
+
+ /* unsupported operation given -- ignore silently */
+ if (!try_op->name || !try_op->op)
+ goto out;
+
+ ret = credential_read(&cred);
+ if (ret)
+ goto out;
+
+ /* perform credential operation */
+ ret = (*try_op->op)(&cred);
+
+ credential_write(&cred);
+
+out:
+ credential_clear(&cred);
+ return ret;
+}
diff --git a/contrib/credential/netrc/Makefile b/contrib/credential/netrc/Makefile
new file mode 100644
index 0000000000..51b76138a5
--- /dev/null
+++ b/contrib/credential/netrc/Makefile
@@ -0,0 +1,5 @@
+test:
+ ./test.pl
+
+testverbose:
+ ./test.pl -d -v
diff --git a/contrib/credential/netrc/git-credential-netrc b/contrib/credential/netrc/git-credential-netrc
new file mode 100755
index 0000000000..1571a7b269
--- /dev/null
+++ b/contrib/credential/netrc/git-credential-netrc
@@ -0,0 +1,423 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+use Getopt::Long;
+use File::Basename;
+
+my $VERSION = "0.1";
+
+my %options = (
+ help => 0,
+ debug => 0,
+ verbose => 0,
+ insecure => 0,
+ file => [],
+
+ # identical token maps, e.g. host -> host, will be inserted later
+ tmap => {
+ port => 'protocol',
+ machine => 'host',
+ path => 'path',
+ login => 'username',
+ user => 'username',
+ password => 'password',
+ }
+ );
+
+# Map each credential protocol token to itself on the netrc side.
+foreach (values %{$options{tmap}}) {
+ $options{tmap}->{$_} = $_;
+}
+
+# Now, $options{tmap} has a mapping from the netrc format to the Git credential
+# helper protocol.
+
+# Next, we build the reverse token map.
+
+# When $rmap{foo} contains 'bar', that means that what the Git credential helper
+# protocol calls 'bar' is found as 'foo' in the netrc/authinfo file. Keys in
+# %rmap are what we expect to read from the netrc/authinfo file.
+
+my %rmap;
+foreach my $k (keys %{$options{tmap}}) {
+ push @{$rmap{$options{tmap}->{$k}}}, $k;
+}
+
+Getopt::Long::Configure("bundling");
+
+# TODO: maybe allow the token map $options{tmap} to be configurable.
+GetOptions(\%options,
+ "help|h",
+ "debug|d",
+ "insecure|k",
+ "verbose|v",
+ "file|f=s@",
+ );
+
+if ($options{help}) {
+ my $shortname = basename($0);
+ $shortname =~ s/git-credential-//;
+
+ print <<EOHIPPUS;
+
+$0 [-f AUTHFILE1] [-f AUTHFILEN] [-d] [-v] [-k] get
+
+Version $VERSION by tzz\@lifelogs.com. License: BSD.
+
+Options:
+
+ -f|--file AUTHFILE : specify netrc-style files. Files with the .gpg extension
+ will be decrypted by GPG before parsing. Multiple -f
+ arguments are OK. They are processed in order, and the
+ first matching entry found is returned via the credential
+ helper protocol (see below).
+
+ When no -f option is given, .authinfo.gpg, .netrc.gpg,
+ .authinfo, and .netrc files in your home directory are used
+ in this order.
+
+ -k|--insecure : ignore bad file ownership or permissions
+
+ -d|--debug : turn on debugging (developer info)
+
+ -v|--verbose : be more verbose (show files and information found)
+
+To enable this credential helper:
+
+ git config credential.helper '$shortname -f AUTHFILE1 -f AUTHFILE2'
+
+(Note that Git will prepend "git-credential-" to the helper name and look for it
+in the path.)
+
+...and if you want lots of debugging info:
+
+ git config credential.helper '$shortname -f AUTHFILE -d'
+
+...or to see the files opened and data found:
+
+ git config credential.helper '$shortname -f AUTHFILE -v'
+
+Only "get" mode is supported by this credential helper. It opens every AUTHFILE
+and looks for the first entry that matches the requested search criteria:
+
+ 'port|protocol':
+ The protocol that will be used (e.g., https). (protocol=X)
+
+ 'machine|host':
+ The remote hostname for a network credential. (host=X)
+
+ 'path':
+ The path with which the credential will be used. (path=X)
+
+ 'login|user|username':
+ The credential’s username, if we already have one. (username=X)
+
+Thus, when we get this query on STDIN:
+
+host=github.com
+protocol=https
+username=tzz
+
+this credential helper will look for the first entry in every AUTHFILE that
+matches
+
+machine github.com port https login tzz
+
+OR
+
+machine github.com protocol https login tzz
+
+OR... etc. acceptable tokens as listed above. Any unknown tokens are
+simply ignored.
+
+Then, the helper will print out whatever tokens it got from the entry, including
+"password" tokens, mapping back to Git's helper protocol; e.g. "port" is mapped
+back to "protocol". Any redundant entry tokens (part of the original query) are
+skipped.
+
+Again, note that only the first matching entry from all the AUTHFILEs, processed
+in the sequence given on the command line, is used.
+
+Netrc/authinfo tokens can be quoted as 'STRING' or "STRING".
+
+No caching is performed by this credential helper.
+
+EOHIPPUS
+
+ exit 0;
+}
+
+my $mode = shift @ARGV;
+
+# Credentials must get a parameter, so die if it's missing.
+die "Syntax: $0 [-f AUTHFILE1] [-f AUTHFILEN] [-d] get" unless defined $mode;
+
+# Only support 'get' mode; with any other unsupported ones we just exit.
+exit 0 unless $mode eq 'get';
+
+my $files = $options{file};
+
+# if no files were given, use a predefined list.
+# note that .gpg files come first
+unless (scalar @$files) {
+ my @candidates = qw[
+ ~/.authinfo.gpg
+ ~/.netrc.gpg
+ ~/.authinfo
+ ~/.netrc
+ ];
+
+ $files = $options{file} = [ map { glob $_ } @candidates ];
+}
+
+my $query = read_credential_data_from_stdin();
+
+FILE:
+foreach my $file (@$files) {
+ my $gpgmode = $file =~ m/\.gpg$/;
+ unless (-r $file) {
+ log_verbose("Unable to read $file; skipping it");
+ next FILE;
+ }
+
+ # the following check is copied from Net::Netrc, for non-GPG files
+ # OS/2 and Win32 do not handle stat in a way compatible with this check :-(
+ unless ($gpgmode || $options{insecure} ||
+ $^O eq 'os2'
+ || $^O eq 'MSWin32'
+ || $^O eq 'MacOS'
+ || $^O =~ /^cygwin/) {
+ my @stat = stat($file);
+
+ if (@stat) {
+ if ($stat[2] & 077) {
+ log_verbose("Insecure $file (mode=%04o); skipping it",
+ $stat[2] & 07777);
+ next FILE;
+ }
+
+ if ($stat[4] != $<) {
+ log_verbose("Not owner of $file; skipping it");
+ next FILE;
+ }
+ }
+ }
+
+ my @entries = load_netrc($file, $gpgmode);
+
+ unless (scalar @entries) {
+ if ($!) {
+ log_verbose("Unable to open $file: $!");
+ } else {
+ log_verbose("No netrc entries found in $file");
+ }
+
+ next FILE;
+ }
+
+ my $entry = find_netrc_entry($query, @entries);
+ if ($entry) {
+ print_credential_data($entry, $query);
+ # we're done!
+ last FILE;
+ }
+}
+
+exit 0;
+
+sub load_netrc {
+ my $file = shift @_;
+ my $gpgmode = shift @_;
+
+ my $io;
+ if ($gpgmode) {
+ my @cmd = (qw(gpg --decrypt), $file);
+ log_verbose("Using GPG to open $file: [@cmd]");
+ open $io, "-|", @cmd;
+ } else {
+ log_verbose("Opening $file...");
+ open $io, '<', $file;
+ }
+
+ # nothing to do if the open failed (we log the error later)
+ return unless $io;
+
+ # Net::Netrc does this, but the functionality is merged with the file
+ # detection logic, so we have to extract just the part we need
+ my @netrc_entries = net_netrc_loader($io);
+
+ # these entries will use the credential helper protocol token names
+ my @entries;
+
+ foreach my $nentry (@netrc_entries) {
+ my %entry;
+ my $num_port;
+
+ if (!defined $nentry->{machine}) {
+ next;
+ }
+ if (defined $nentry->{port} && $nentry->{port} =~ m/^\d+$/) {
+ $num_port = $nentry->{port};
+ delete $nentry->{port};
+ }
+
+ # create the new entry for the credential helper protocol
+ $entry{$options{tmap}->{$_}} = $nentry->{$_} foreach keys %$nentry;
+
+ # for "host X port Y" where Y is an integer (captured by
+ # $num_port above), set the host to "X:Y"
+ if (defined $entry{host} && defined $num_port) {
+ $entry{host} = join(':', $entry{host}, $num_port);
+ }
+
+ push @entries, \%entry;
+ }
+
+ return @entries;
+}
+
+sub net_netrc_loader {
+ my $fh = shift @_;
+ my @entries;
+ my ($mach, $macdef, $tok, @tok);
+
+ LINE:
+ while (<$fh>) {
+ undef $macdef if /\A\n\Z/;
+
+ if ($macdef) {
+ next LINE;
+ }
+
+ s/^\s*//;
+ chomp;
+
+ while (length && s/^("((?:[^"]+|\\.)*)"|((?:[^\\\s]+|\\.)*))\s*//) {
+ (my $tok = $+) =~ s/\\(.)/$1/g;
+ push(@tok, $tok);
+ }
+
+ TOKEN:
+ while (@tok) {
+ if ($tok[0] eq "default") {
+ shift(@tok);
+ $mach = { machine => undef };
+ next TOKEN;
+ }
+
+ $tok = shift(@tok);
+
+ if ($tok eq "machine") {
+ my $host = shift @tok;
+ $mach = { machine => $host };
+ push @entries, $mach;
+ } elsif (exists $options{tmap}->{$tok}) {
+ unless ($mach) {
+ log_debug("Skipping token $tok because no machine was given");
+ next TOKEN;
+ }
+
+ my $value = shift @tok;
+ unless (defined $value) {
+ log_debug("Token $tok had no value, skipping it.");
+ next TOKEN;
+ }
+
+ # Following line added by rmerrell to remove '/' escape char in .netrc
+ $value =~ s/\/\\/\\/g;
+ $mach->{$tok} = $value;
+ } elsif ($tok eq "macdef") { # we ignore macros
+ next TOKEN unless $mach;
+ my $value = shift @tok;
+ $macdef = 1;
+ }
+ }
+ }
+
+ return @entries;
+}
+
+sub read_credential_data_from_stdin {
+ # the query: start with every token with no value
+ my %q = map { $_ => undef } values(%{$options{tmap}});
+
+ while (<STDIN>) {
+ next unless m/^([^=]+)=(.+)/;
+
+ my ($token, $value) = ($1, $2);
+ die "Unknown search token $token" unless exists $q{$token};
+ $q{$token} = $value;
+ log_debug("We were given search token $token and value $value");
+ }
+
+ foreach (sort keys %q) {
+ log_debug("Searching for %s = %s", $_, $q{$_} || '(any value)');
+ }
+
+ return \%q;
+}
+
+# takes the search tokens and then a list of entries
+# each entry is a hash reference
+sub find_netrc_entry {
+ my $query = shift @_;
+
+ ENTRY:
+ foreach my $entry (@_)
+ {
+ my $entry_text = join ', ', map { "$_=$entry->{$_}" } keys %$entry;
+ foreach my $check (sort keys %$query) {
+ if (!defined $entry->{$check}) {
+ log_debug("OK: entry has no $check token, so any value satisfies check $check");
+ } elsif (defined $query->{$check}) {
+ log_debug("compare %s [%s] to [%s] (entry: %s)",
+ $check,
+ $entry->{$check},
+ $query->{$check},
+ $entry_text);
+ unless ($query->{$check} eq $entry->{$check}) {
+ next ENTRY;
+ }
+ } else {
+ log_debug("OK: any value satisfies check $check");
+ }
+ }
+
+ return $entry;
+ }
+
+ # nothing was found
+ return;
+}
+
+sub print_credential_data {
+ my $entry = shift @_;
+ my $query = shift @_;
+
+ log_debug("entry has passed all the search checks");
+ TOKEN:
+ foreach my $git_token (sort keys %$entry) {
+ log_debug("looking for useful token $git_token");
+ # don't print unknown (to the credential helper protocol) tokens
+ next TOKEN unless exists $query->{$git_token};
+
+ # don't print things asked in the query (the entry matches them)
+ next TOKEN if defined $query->{$git_token};
+
+ log_debug("FOUND: $git_token=$entry->{$git_token}");
+ printf "%s=%s\n", $git_token, $entry->{$git_token};
+ }
+}
+sub log_verbose {
+ return unless $options{verbose};
+ printf STDERR @_;
+ printf STDERR "\n";
+}
+
+sub log_debug {
+ return unless $options{debug};
+ printf STDERR @_;
+ printf STDERR "\n";
+}
diff --git a/contrib/credential/netrc/test.netrc b/contrib/credential/netrc/test.netrc
new file mode 100644
index 0000000000..ba119a937f
--- /dev/null
+++ b/contrib/credential/netrc/test.netrc
@@ -0,0 +1,13 @@
+machine imap login tzz@lifelogs.com port imaps password letmeknow
+machine imap login bob port imaps password bobwillknow
+
+# comment test
+
+machine imap2 login tzz port 1099 password tzzknow
+machine imap2 login bob password bobwillknow
+
+# another command
+
+machine github.com
+ multilinetoken anothervalue
+ login carol password carolknows
diff --git a/contrib/credential/netrc/test.pl b/contrib/credential/netrc/test.pl
new file mode 100755
index 0000000000..169b6463c3
--- /dev/null
+++ b/contrib/credential/netrc/test.pl
@@ -0,0 +1,106 @@
+#!/usr/bin/perl
+
+use warnings;
+use strict;
+use Test;
+use IPC::Open2;
+
+BEGIN { plan tests => 15 }
+
+my @global_credential_args = @ARGV;
+my $netrc = './test.netrc';
+print "# Testing insecure file, nothing should be found\n";
+chmod 0644, $netrc;
+my $cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'github.com' });
+
+ok(scalar keys %$cred, 0, "Got 0 keys from insecure file");
+
+print "# Testing missing file, nothing should be found\n";
+chmod 0644, $netrc;
+$cred = run_credential(['-f', '///nosuchfile///', 'get'],
+ { host => 'github.com' });
+
+ok(scalar keys %$cred, 0, "Got 0 keys from missing file");
+
+chmod 0600, $netrc;
+
+print "# Testing with invalid data\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ "bad data");
+ok(scalar keys %$cred, 4, "Got first found keys with bad data");
+
+print "# Testing netrc file for a missing corovamilkbar entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'corovamilkbar' });
+
+ok(scalar keys %$cred, 0, "Got no corovamilkbar keys");
+
+print "# Testing netrc file for a github.com entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'github.com' });
+
+ok(scalar keys %$cred, 2, "Got 2 Github keys");
+
+ok($cred->{password}, 'carolknows', "Got correct Github password");
+ok($cred->{username}, 'carol', "Got correct Github username");
+
+print "# Testing netrc file for a username-specific entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'imap', username => 'bob' });
+
+ok(scalar keys %$cred, 2, "Got 2 username-specific keys");
+
+ok($cred->{password}, 'bobwillknow', "Got correct user-specific password");
+ok($cred->{protocol}, 'imaps', "Got correct user-specific protocol");
+
+print "# Testing netrc file for a host:port-specific entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'imap2:1099' });
+
+ok(scalar keys %$cred, 2, "Got 2 host:port-specific keys");
+
+ok($cred->{password}, 'tzzknow', "Got correct host:port-specific password");
+ok($cred->{username}, 'tzz', "Got correct host:port-specific username");
+
+print "# Testing netrc file that 'host:port kills host' entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'imap2' });
+
+ok(scalar keys %$cred, 2, "Got 2 'host:port kills host' keys");
+
+ok($cred->{password}, 'bobwillknow', "Got correct 'host:port kills host' password");
+ok($cred->{username}, 'bob', "Got correct 'host:port kills host' username");
+
+sub run_credential
+{
+ my $args = shift @_;
+ my $data = shift @_;
+ my $pid = open2(my $chld_out, my $chld_in,
+ './git-credential-netrc', @global_credential_args,
+ @$args);
+
+ die "Couldn't open pipe to netrc credential helper: $!" unless $pid;
+
+ if (ref $data eq 'HASH')
+ {
+ print $chld_in "$_=$data->{$_}\n" foreach sort keys %$data;
+ }
+ else
+ {
+ print $chld_in "$data\n";
+ }
+
+ close $chld_in;
+ my %ret;
+
+ while (<$chld_out>)
+ {
+ chomp;
+ next unless m/^([^=]+)=(.+)/;
+
+ $ret{$1} = $2;
+ }
+
+ return \%ret;
+}
diff --git a/contrib/credential/osxkeychain/.gitignore b/contrib/credential/osxkeychain/.gitignore
new file mode 100644
index 0000000000..6c5b7026c5
--- /dev/null
+++ b/contrib/credential/osxkeychain/.gitignore
@@ -0,0 +1 @@
+git-credential-osxkeychain
diff --git a/contrib/credential/osxkeychain/Makefile b/contrib/credential/osxkeychain/Makefile
new file mode 100644
index 0000000000..4b3a08a2ba
--- /dev/null
+++ b/contrib/credential/osxkeychain/Makefile
@@ -0,0 +1,17 @@
+all:: git-credential-osxkeychain
+
+CC = gcc
+RM = rm -f
+CFLAGS = -g -O2 -Wall
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+git-credential-osxkeychain: git-credential-osxkeychain.o
+ $(CC) $(CFLAGS) -o $@ $< $(LDFLAGS) -Wl,-framework -Wl,Security
+
+git-credential-osxkeychain.o: git-credential-osxkeychain.c
+ $(CC) -c $(CFLAGS) $<
+
+clean:
+ $(RM) git-credential-osxkeychain git-credential-osxkeychain.o
diff --git a/contrib/credential/osxkeychain/git-credential-osxkeychain.c b/contrib/credential/osxkeychain/git-credential-osxkeychain.c
new file mode 100644
index 0000000000..bcd3f575a3
--- /dev/null
+++ b/contrib/credential/osxkeychain/git-credential-osxkeychain.c
@@ -0,0 +1,183 @@
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <Security/Security.h>
+
+static SecProtocolType protocol;
+static char *host;
+static char *path;
+static char *username;
+static char *password;
+static UInt16 port;
+
+static void die(const char *err, ...)
+{
+ char msg[4096];
+ va_list params;
+ va_start(params, err);
+ vsnprintf(msg, sizeof(msg), err, params);
+ fprintf(stderr, "%s\n", msg);
+ va_end(params);
+ exit(1);
+}
+
+static void *xstrdup(const char *s1)
+{
+ void *ret = strdup(s1);
+ if (!ret)
+ die("Out of memory");
+ return ret;
+}
+
+#define KEYCHAIN_ITEM(x) (x ? strlen(x) : 0), x
+#define KEYCHAIN_ARGS \
+ NULL, /* default keychain */ \
+ KEYCHAIN_ITEM(host), \
+ 0, NULL, /* account domain */ \
+ KEYCHAIN_ITEM(username), \
+ KEYCHAIN_ITEM(path), \
+ port, \
+ protocol, \
+ kSecAuthenticationTypeDefault
+
+static void write_item(const char *what, const char *buf, int len)
+{
+ printf("%s=", what);
+ fwrite(buf, 1, len, stdout);
+ putchar('\n');
+}
+
+static void find_username_in_item(SecKeychainItemRef item)
+{
+ SecKeychainAttributeList list;
+ SecKeychainAttribute attr;
+
+ list.count = 1;
+ list.attr = &attr;
+ attr.tag = kSecAccountItemAttr;
+
+ if (SecKeychainItemCopyContent(item, NULL, &list, NULL, NULL))
+ return;
+
+ write_item("username", attr.data, attr.length);
+ SecKeychainItemFreeContent(&list, NULL);
+}
+
+static void find_internet_password(void)
+{
+ void *buf;
+ UInt32 len;
+ SecKeychainItemRef item;
+
+ if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, &len, &buf, &item))
+ return;
+
+ write_item("password", buf, len);
+ if (!username)
+ find_username_in_item(item);
+
+ SecKeychainItemFreeContent(NULL, buf);
+}
+
+static void delete_internet_password(void)
+{
+ SecKeychainItemRef item;
+
+ /*
+ * Require at least a protocol and host for removal, which is what git
+ * will give us; if you want to do something more fancy, use the
+ * Keychain manager.
+ */
+ if (!protocol || !host)
+ return;
+
+ if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, 0, NULL, &item))
+ return;
+
+ SecKeychainItemDelete(item);
+}
+
+static void add_internet_password(void)
+{
+ /* Only store complete credentials */
+ if (!protocol || !host || !username || !password)
+ return;
+
+ if (SecKeychainAddInternetPassword(
+ KEYCHAIN_ARGS,
+ KEYCHAIN_ITEM(password),
+ NULL))
+ return;
+}
+
+static void read_credential(void)
+{
+ char buf[1024];
+
+ while (fgets(buf, sizeof(buf), stdin)) {
+ char *v;
+
+ if (!strcmp(buf, "\n"))
+ break;
+ buf[strlen(buf)-1] = '\0';
+
+ v = strchr(buf, '=');
+ if (!v)
+ die("bad input: %s", buf);
+ *v++ = '\0';
+
+ if (!strcmp(buf, "protocol")) {
+ if (!strcmp(v, "imap"))
+ protocol = kSecProtocolTypeIMAP;
+ else if (!strcmp(v, "imaps"))
+ protocol = kSecProtocolTypeIMAPS;
+ else if (!strcmp(v, "ftp"))
+ protocol = kSecProtocolTypeFTP;
+ else if (!strcmp(v, "ftps"))
+ protocol = kSecProtocolTypeFTPS;
+ else if (!strcmp(v, "https"))
+ protocol = kSecProtocolTypeHTTPS;
+ else if (!strcmp(v, "http"))
+ protocol = kSecProtocolTypeHTTP;
+ else if (!strcmp(v, "smtp"))
+ protocol = kSecProtocolTypeSMTP;
+ else /* we don't yet handle other protocols */
+ exit(0);
+ }
+ else if (!strcmp(buf, "host")) {
+ char *colon = strchr(v, ':');
+ if (colon) {
+ *colon++ = '\0';
+ port = atoi(colon);
+ }
+ host = xstrdup(v);
+ }
+ else if (!strcmp(buf, "path"))
+ path = xstrdup(v);
+ else if (!strcmp(buf, "username"))
+ username = xstrdup(v);
+ else if (!strcmp(buf, "password"))
+ password = xstrdup(v);
+ }
+}
+
+int main(int argc, const char **argv)
+{
+ const char *usage =
+ "usage: git credential-osxkeychain <get|store|erase>";
+
+ if (!argv[1])
+ die(usage);
+
+ read_credential();
+
+ if (!strcmp(argv[1], "get"))
+ find_internet_password();
+ else if (!strcmp(argv[1], "store"))
+ add_internet_password();
+ else if (!strcmp(argv[1], "erase"))
+ delete_internet_password();
+ /* otherwise, ignore unknown action */
+
+ return 0;
+}
diff --git a/contrib/credential/wincred/Makefile b/contrib/credential/wincred/Makefile
new file mode 100644
index 0000000000..6e992c0866
--- /dev/null
+++ b/contrib/credential/wincred/Makefile
@@ -0,0 +1,22 @@
+all: git-credential-wincred.exe
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+CC ?= gcc
+RM ?= rm -f
+CFLAGS ?= -O2 -Wall
+
+prefix ?= /usr/local
+libexecdir ?= $(prefix)/libexec/git-core
+
+INSTALL ?= install
+
+git-credential-wincred.exe : git-credential-wincred.c
+ $(LINK.c) $^ $(LOADLIBES) $(LDLIBS) -o $@
+
+install: git-credential-wincred.exe
+ $(INSTALL) -m 755 $^ $(libexecdir)
+
+clean:
+ $(RM) git-credential-wincred.exe
diff --git a/contrib/credential/wincred/git-credential-wincred.c b/contrib/credential/wincred/git-credential-wincred.c
new file mode 100644
index 0000000000..a1d38f035b
--- /dev/null
+++ b/contrib/credential/wincred/git-credential-wincred.c
@@ -0,0 +1,301 @@
+/*
+ * A git credential helper that interface with Windows' Credential Manager
+ *
+ */
+#include <windows.h>
+#include <stdio.h>
+#include <io.h>
+#include <fcntl.h>
+
+/* common helpers */
+
+#define ARRAY_SIZE(x) (sizeof(x)/sizeof(x[0]))
+
+static void die(const char *err, ...)
+{
+ char msg[4096];
+ va_list params;
+ va_start(params, err);
+ vsnprintf(msg, sizeof(msg), err, params);
+ fprintf(stderr, "%s\n", msg);
+ va_end(params);
+ exit(1);
+}
+
+static void *xmalloc(size_t size)
+{
+ void *ret = malloc(size);
+ if (!ret && !size)
+ ret = malloc(1);
+ if (!ret)
+ die("Out of memory");
+ return ret;
+}
+
+/* MinGW doesn't have wincred.h, so we need to define stuff */
+
+typedef struct _CREDENTIAL_ATTRIBUTEW {
+ LPWSTR Keyword;
+ DWORD Flags;
+ DWORD ValueSize;
+ LPBYTE Value;
+} CREDENTIAL_ATTRIBUTEW, *PCREDENTIAL_ATTRIBUTEW;
+
+typedef struct _CREDENTIALW {
+ DWORD Flags;
+ DWORD Type;
+ LPWSTR TargetName;
+ LPWSTR Comment;
+ FILETIME LastWritten;
+ DWORD CredentialBlobSize;
+ LPBYTE CredentialBlob;
+ DWORD Persist;
+ DWORD AttributeCount;
+ PCREDENTIAL_ATTRIBUTEW Attributes;
+ LPWSTR TargetAlias;
+ LPWSTR UserName;
+} CREDENTIALW, *PCREDENTIALW;
+
+#define CRED_TYPE_GENERIC 1
+#define CRED_PERSIST_LOCAL_MACHINE 2
+#define CRED_MAX_ATTRIBUTES 64
+
+typedef BOOL (WINAPI *CredWriteWT)(PCREDENTIALW, DWORD);
+typedef BOOL (WINAPI *CredEnumerateWT)(LPCWSTR, DWORD, DWORD *,
+ PCREDENTIALW **);
+typedef VOID (WINAPI *CredFreeT)(PVOID);
+typedef BOOL (WINAPI *CredDeleteWT)(LPCWSTR, DWORD, DWORD);
+
+static HMODULE advapi;
+static CredWriteWT CredWriteW;
+static CredEnumerateWT CredEnumerateW;
+static CredFreeT CredFree;
+static CredDeleteWT CredDeleteW;
+
+static void load_cred_funcs(void)
+{
+ /* load DLLs */
+ advapi = LoadLibrary("advapi32.dll");
+ if (!advapi)
+ die("failed to load advapi32.dll");
+
+ /* get function pointers */
+ CredWriteW = (CredWriteWT)GetProcAddress(advapi, "CredWriteW");
+ CredEnumerateW = (CredEnumerateWT)GetProcAddress(advapi,
+ "CredEnumerateW");
+ CredFree = (CredFreeT)GetProcAddress(advapi, "CredFree");
+ CredDeleteW = (CredDeleteWT)GetProcAddress(advapi, "CredDeleteW");
+ if (!CredWriteW || !CredEnumerateW || !CredFree || !CredDeleteW)
+ die("failed to load functions");
+}
+
+static WCHAR *wusername, *password, *protocol, *host, *path, target[1024];
+
+static void write_item(const char *what, LPCWSTR wbuf, int wlen)
+{
+ char *buf;
+ int len = WideCharToMultiByte(CP_UTF8, 0, wbuf, wlen, NULL, 0, NULL,
+ FALSE);
+ buf = xmalloc(len);
+
+ if (!WideCharToMultiByte(CP_UTF8, 0, wbuf, wlen, buf, len, NULL, FALSE))
+ die("WideCharToMultiByte failed!");
+
+ printf("%s=", what);
+ fwrite(buf, 1, len, stdout);
+ putchar('\n');
+ free(buf);
+}
+
+/*
+ * Match an (optional) expected string and a delimiter in the target string,
+ * consuming the matched text by updating the target pointer.
+ */
+static int match_part(LPCWSTR *ptarget, LPCWSTR want, LPCWSTR delim)
+{
+ LPCWSTR delim_pos, start = *ptarget;
+ int len;
+
+ /* find start of delimiter (or end-of-string if delim is empty) */
+ if (*delim)
+ delim_pos = wcsstr(start, delim);
+ else
+ delim_pos = start + wcslen(start);
+
+ /*
+ * match text up to delimiter, or end of string (e.g. the '/' after
+ * host is optional if not followed by a path)
+ */
+ if (delim_pos)
+ len = delim_pos - start;
+ else
+ len = wcslen(start);
+
+ /* update ptarget if we either found a delimiter or need a match */
+ if (delim_pos || want)
+ *ptarget = delim_pos ? delim_pos + wcslen(delim) : start + len;
+
+ return !want || (!wcsncmp(want, start, len) && !want[len]);
+}
+
+static int match_cred(const CREDENTIALW *cred)
+{
+ LPCWSTR target = cred->TargetName;
+ if (wusername && wcscmp(wusername, cred->UserName))
+ return 0;
+
+ return match_part(&target, L"git", L":") &&
+ match_part(&target, protocol, L"://") &&
+ match_part(&target, wusername, L"@") &&
+ match_part(&target, host, L"/") &&
+ match_part(&target, path, L"");
+}
+
+static void get_credential(void)
+{
+ CREDENTIALW **creds;
+ DWORD num_creds;
+ int i;
+
+ if (!CredEnumerateW(L"git:*", 0, &num_creds, &creds))
+ return;
+
+ /* search for the first credential that matches username */
+ for (i = 0; i < num_creds; ++i)
+ if (match_cred(creds[i])) {
+ write_item("username", creds[i]->UserName,
+ wcslen(creds[i]->UserName));
+ write_item("password",
+ (LPCWSTR)creds[i]->CredentialBlob,
+ creds[i]->CredentialBlobSize / sizeof(WCHAR));
+ break;
+ }
+
+ CredFree(creds);
+}
+
+static void store_credential(void)
+{
+ CREDENTIALW cred;
+
+ if (!wusername || !password)
+ return;
+
+ cred.Flags = 0;
+ cred.Type = CRED_TYPE_GENERIC;
+ cred.TargetName = target;
+ cred.Comment = L"saved by git-credential-wincred";
+ cred.CredentialBlobSize = (wcslen(password)) * sizeof(WCHAR);
+ cred.CredentialBlob = (LPVOID)password;
+ cred.Persist = CRED_PERSIST_LOCAL_MACHINE;
+ cred.AttributeCount = 0;
+ cred.Attributes = NULL;
+ cred.TargetAlias = NULL;
+ cred.UserName = wusername;
+
+ if (!CredWriteW(&cred, 0))
+ die("CredWrite failed");
+}
+
+static void erase_credential(void)
+{
+ CREDENTIALW **creds;
+ DWORD num_creds;
+ int i;
+
+ if (!CredEnumerateW(L"git:*", 0, &num_creds, &creds))
+ return;
+
+ for (i = 0; i < num_creds; ++i) {
+ if (match_cred(creds[i]))
+ CredDeleteW(creds[i]->TargetName, creds[i]->Type, 0);
+ }
+
+ CredFree(creds);
+}
+
+static WCHAR *utf8_to_utf16_dup(const char *str)
+{
+ int wlen = MultiByteToWideChar(CP_UTF8, 0, str, -1, NULL, 0);
+ WCHAR *wstr = xmalloc(sizeof(WCHAR) * wlen);
+ MultiByteToWideChar(CP_UTF8, 0, str, -1, wstr, wlen);
+ return wstr;
+}
+
+static void read_credential(void)
+{
+ char buf[1024];
+
+ while (fgets(buf, sizeof(buf), stdin)) {
+ char *v;
+ int len = strlen(buf);
+ /* strip trailing CR / LF */
+ while (len && strchr("\r\n", buf[len - 1]))
+ buf[--len] = 0;
+
+ if (!*buf)
+ break;
+
+ v = strchr(buf, '=');
+ if (!v)
+ die("bad input: %s", buf);
+ *v++ = '\0';
+
+ if (!strcmp(buf, "protocol"))
+ protocol = utf8_to_utf16_dup(v);
+ else if (!strcmp(buf, "host"))
+ host = utf8_to_utf16_dup(v);
+ else if (!strcmp(buf, "path"))
+ path = utf8_to_utf16_dup(v);
+ else if (!strcmp(buf, "username")) {
+ wusername = utf8_to_utf16_dup(v);
+ } else if (!strcmp(buf, "password"))
+ password = utf8_to_utf16_dup(v);
+ else
+ die("unrecognized input");
+ }
+}
+
+int main(int argc, char *argv[])
+{
+ const char *usage =
+ "usage: git credential-wincred <get|store|erase>\n";
+
+ if (!argv[1])
+ die(usage);
+
+ /* git use binary pipes to avoid CRLF-issues */
+ _setmode(_fileno(stdin), _O_BINARY);
+ _setmode(_fileno(stdout), _O_BINARY);
+
+ read_credential();
+
+ load_cred_funcs();
+
+ if (!protocol || !(host || path))
+ return 0;
+
+ /* prepare 'target', the unique key for the credential */
+ wcscpy(target, L"git:");
+ wcsncat(target, protocol, ARRAY_SIZE(target));
+ wcsncat(target, L"://", ARRAY_SIZE(target));
+ if (wusername) {
+ wcsncat(target, wusername, ARRAY_SIZE(target));
+ wcsncat(target, L"@", ARRAY_SIZE(target));
+ }
+ if (host)
+ wcsncat(target, host, ARRAY_SIZE(target));
+ if (path) {
+ wcsncat(target, L"/", ARRAY_SIZE(target));
+ wcsncat(target, path, ARRAY_SIZE(target));
+ }
+
+ if (!strcmp(argv[1], "get"))
+ get_credential();
+ else if (!strcmp(argv[1], "store"))
+ store_credential();
+ else if (!strcmp(argv[1], "erase"))
+ erase_credential();
+ /* otherwise, ignore unknown action */
+ return 0;
+}
diff --git a/contrib/diff-highlight/README b/contrib/diff-highlight/README
new file mode 100644
index 0000000000..836b97a730
--- /dev/null
+++ b/contrib/diff-highlight/README
@@ -0,0 +1,193 @@
+diff-highlight
+==============
+
+Line oriented diffs are great for reviewing code, because for most
+hunks, you want to see the old and the new segments of code next to each
+other. Sometimes, though, when an old line and a new line are very
+similar, it's hard to immediately see the difference.
+
+You can use "--color-words" to highlight only the changed portions of
+lines. However, this can often be hard to read for code, as it loses
+the line structure, and you end up with oddly formatted bits.
+
+Instead, this script post-processes the line-oriented diff, finds pairs
+of lines, and highlights the differing segments. It's currently very
+simple and stupid about doing these tasks. In particular:
+
+ 1. It will only highlight hunks in which the number of removed and
+ added lines is the same, and it will pair lines within the hunk by
+ position (so the first removed line is compared to the first added
+ line, and so forth). This is simple and tends to work well in
+ practice. More complex changes don't highlight well, so we tend to
+ exclude them due to the "same number of removed and added lines"
+ restriction. Or even if we do try to highlight them, they end up
+ not highlighting because of our "don't highlight if the whole line
+ would be highlighted" rule.
+
+ 2. It will find the common prefix and suffix of two lines, and
+ consider everything in the middle to be "different". It could
+ instead do a real diff of the characters between the two lines and
+ find common subsequences. However, the point of the highlight is to
+ call attention to a certain area. Even if some small subset of the
+ highlighted area actually didn't change, that's OK. In practice it
+ ends up being more readable to just have a single blob on the line
+ showing the interesting bit.
+
+The goal of the script is therefore not to be exact about highlighting
+changes, but to call attention to areas of interest without being
+visually distracting. Non-diff lines and existing diff coloration is
+preserved; the intent is that the output should look exactly the same as
+the input, except for the occasional highlight.
+
+Use
+---
+
+You can try out the diff-highlight program with:
+
+---------------------------------------------
+git log -p --color | /path/to/diff-highlight
+---------------------------------------------
+
+If you want to use it all the time, drop it in your $PATH and put the
+following in your git configuration:
+
+---------------------------------------------
+[pager]
+ log = diff-highlight | less
+ show = diff-highlight | less
+ diff = diff-highlight | less
+---------------------------------------------
+
+
+Color Config
+------------
+
+You can configure the highlight colors and attributes using git's
+config. The colors for "old" and "new" lines can be specified
+independently. There are two "modes" of configuration:
+
+ 1. You can specify a "highlight" color and a matching "reset" color.
+ This will retain any existing colors in the diff, and apply the
+ "highlight" and "reset" colors before and after the highlighted
+ portion.
+
+ 2. You can specify a "normal" color and a "highlight" color. In this
+ case, existing colors are dropped from that line. The non-highlighted
+ bits of the line get the "normal" color, and the highlights get the
+ "highlight" color.
+
+If no "new" colors are specified, they default to the "old" colors. If
+no "old" colors are specified, the default is to reverse the foreground
+and background for highlighted portions.
+
+Examples:
+
+---------------------------------------------
+# Underline highlighted portions
+[color "diff-highlight"]
+oldHighlight = ul
+oldReset = noul
+---------------------------------------------
+
+---------------------------------------------
+# Varying background intensities
+[color "diff-highlight"]
+oldNormal = "black #f8cbcb"
+oldHighlight = "black #ffaaaa"
+newNormal = "black #cbeecb"
+newHighlight = "black #aaffaa"
+---------------------------------------------
+
+
+Bugs
+----
+
+Because diff-highlight relies on heuristics to guess which parts of
+changes are important, there are some cases where the highlighting is
+more distracting than useful. Fortunately, these cases are rare in
+practice, and when they do occur, the worst case is simply a little
+extra highlighting. This section documents some cases known to be
+sub-optimal, in case somebody feels like working on improving the
+heuristics.
+
+1. Two changes on the same line get highlighted in a blob. For example,
+ highlighting:
+
+----------------------------------------------
+-foo(buf, size);
++foo(obj->buf, obj->size);
+----------------------------------------------
+
+ yields (where the inside of "+{}" would be highlighted):
+
+----------------------------------------------
+-foo(buf, size);
++foo(+{obj->buf, obj->}size);
+----------------------------------------------
+
+ whereas a more semantically meaningful output would be:
+
+----------------------------------------------
+-foo(buf, size);
++foo(+{obj->}buf, +{obj->}size);
+----------------------------------------------
+
+ Note that doing this right would probably involve a set of
+ content-specific boundary patterns, similar to word-diff. Otherwise
+ you get junk like:
+
+-----------------------------------------------------
+-this line has some -{i}nt-{ere}sti-{ng} text on it
++this line has some +{fa}nt+{a}sti+{c} text on it
+-----------------------------------------------------
+
+ which is less readable than the current output.
+
+2. The multi-line matching assumes that lines in the pre- and post-image
+ match by position. This is often the case, but can be fooled when a
+ line is removed from the top and a new one added at the bottom (or
+ vice versa). Unless the lines in the middle are also changed, diffs
+ will show this as two hunks, and it will not get highlighted at all
+ (which is good). But if the lines in the middle are changed, the
+ highlighting can be misleading. Here's a pathological case:
+
+-----------------------------------------------------
+-one
+-two
+-three
+-four
++two 2
++three 3
++four 4
++five 5
+-----------------------------------------------------
+
+ which gets highlighted as:
+
+-----------------------------------------------------
+-one
+-t-{wo}
+-three
+-f-{our}
++two 2
++t+{hree 3}
++four 4
++f+{ive 5}
+-----------------------------------------------------
+
+ because it matches "two" to "three 3", and so forth. It would be
+ nicer as:
+
+-----------------------------------------------------
+-one
+-two
+-three
+-four
++two +{2}
++three +{3}
++four +{4}
++five 5
+-----------------------------------------------------
+
+ which would probably involve pre-matching the lines into pairs
+ according to some heuristic.
diff --git a/contrib/diff-highlight/diff-highlight b/contrib/diff-highlight/diff-highlight
new file mode 100755
index 0000000000..08c88bbc87
--- /dev/null
+++ b/contrib/diff-highlight/diff-highlight
@@ -0,0 +1,213 @@
+#!/usr/bin/perl
+
+use warnings FATAL => 'all';
+use strict;
+
+# Highlight by reversing foreground and background. You could do
+# other things like bold or underline if you prefer.
+my @OLD_HIGHLIGHT = (
+ color_config('color.diff-highlight.oldnormal'),
+ color_config('color.diff-highlight.oldhighlight', "\x1b[7m"),
+ color_config('color.diff-highlight.oldreset', "\x1b[27m")
+);
+my @NEW_HIGHLIGHT = (
+ color_config('color.diff-highlight.newnormal', $OLD_HIGHLIGHT[0]),
+ color_config('color.diff-highlight.newhighlight', $OLD_HIGHLIGHT[1]),
+ color_config('color.diff-highlight.newreset', $OLD_HIGHLIGHT[2])
+);
+
+my $RESET = "\x1b[m";
+my $COLOR = qr/\x1b\[[0-9;]*m/;
+my $BORING = qr/$COLOR|\s/;
+
+my @removed;
+my @added;
+my $in_hunk;
+
+# Some scripts may not realize that SIGPIPE is being ignored when launching the
+# pager--for instance scripts written in Python.
+$SIG{PIPE} = 'DEFAULT';
+
+while (<>) {
+ if (!$in_hunk) {
+ print;
+ $in_hunk = /^$COLOR*\@/;
+ }
+ elsif (/^$COLOR*-/) {
+ push @removed, $_;
+ }
+ elsif (/^$COLOR*\+/) {
+ push @added, $_;
+ }
+ else {
+ show_hunk(\@removed, \@added);
+ @removed = ();
+ @added = ();
+
+ print;
+ $in_hunk = /^$COLOR*[\@ ]/;
+ }
+
+ # Most of the time there is enough output to keep things streaming,
+ # but for something like "git log -Sfoo", you can get one early
+ # commit and then many seconds of nothing. We want to show
+ # that one commit as soon as possible.
+ #
+ # Since we can receive arbitrary input, there's no optimal
+ # place to flush. Flushing on a blank line is a heuristic that
+ # happens to match git-log output.
+ if (!length) {
+ local $| = 1;
+ }
+}
+
+# Flush any queued hunk (this can happen when there is no trailing context in
+# the final diff of the input).
+show_hunk(\@removed, \@added);
+
+exit 0;
+
+# Ideally we would feed the default as a human-readable color to
+# git-config as the fallback value. But diff-highlight does
+# not otherwise depend on git at all, and there are reports
+# of it being used in other settings. Let's handle our own
+# fallback, which means we will work even if git can't be run.
+sub color_config {
+ my ($key, $default) = @_;
+ my $s = `git config --get-color $key 2>/dev/null`;
+ return length($s) ? $s : $default;
+}
+
+sub show_hunk {
+ my ($a, $b) = @_;
+
+ # If one side is empty, then there is nothing to compare or highlight.
+ if (!@$a || !@$b) {
+ print @$a, @$b;
+ return;
+ }
+
+ # If we have mismatched numbers of lines on each side, we could try to
+ # be clever and match up similar lines. But for now we are simple and
+ # stupid, and only handle multi-line hunks that remove and add the same
+ # number of lines.
+ if (@$a != @$b) {
+ print @$a, @$b;
+ return;
+ }
+
+ my @queue;
+ for (my $i = 0; $i < @$a; $i++) {
+ my ($rm, $add) = highlight_pair($a->[$i], $b->[$i]);
+ print $rm;
+ push @queue, $add;
+ }
+ print @queue;
+}
+
+sub highlight_pair {
+ my @a = split_line(shift);
+ my @b = split_line(shift);
+
+ # Find common prefix, taking care to skip any ansi
+ # color codes.
+ my $seen_plusminus;
+ my ($pa, $pb) = (0, 0);
+ while ($pa < @a && $pb < @b) {
+ if ($a[$pa] =~ /$COLOR/) {
+ $pa++;
+ }
+ elsif ($b[$pb] =~ /$COLOR/) {
+ $pb++;
+ }
+ elsif ($a[$pa] eq $b[$pb]) {
+ $pa++;
+ $pb++;
+ }
+ elsif (!$seen_plusminus && $a[$pa] eq '-' && $b[$pb] eq '+') {
+ $seen_plusminus = 1;
+ $pa++;
+ $pb++;
+ }
+ else {
+ last;
+ }
+ }
+
+ # Find common suffix, ignoring colors.
+ my ($sa, $sb) = ($#a, $#b);
+ while ($sa >= $pa && $sb >= $pb) {
+ if ($a[$sa] =~ /$COLOR/) {
+ $sa--;
+ }
+ elsif ($b[$sb] =~ /$COLOR/) {
+ $sb--;
+ }
+ elsif ($a[$sa] eq $b[$sb]) {
+ $sa--;
+ $sb--;
+ }
+ else {
+ last;
+ }
+ }
+
+ if (is_pair_interesting(\@a, $pa, $sa, \@b, $pb, $sb)) {
+ return highlight_line(\@a, $pa, $sa, \@OLD_HIGHLIGHT),
+ highlight_line(\@b, $pb, $sb, \@NEW_HIGHLIGHT);
+ }
+ else {
+ return join('', @a),
+ join('', @b);
+ }
+}
+
+sub split_line {
+ local $_ = shift;
+ return map { /$COLOR/ ? $_ : (split //) }
+ split /($COLOR*)/;
+}
+
+sub highlight_line {
+ my ($line, $prefix, $suffix, $theme) = @_;
+
+ my $start = join('', @{$line}[0..($prefix-1)]);
+ my $mid = join('', @{$line}[$prefix..$suffix]);
+ my $end = join('', @{$line}[($suffix+1)..$#$line]);
+
+ # If we have a "normal" color specified, then take over the whole line.
+ # Otherwise, we try to just manipulate the highlighted bits.
+ if (defined $theme->[0]) {
+ s/$COLOR//g for ($start, $mid, $end);
+ chomp $end;
+ return join('',
+ $theme->[0], $start, $RESET,
+ $theme->[1], $mid, $RESET,
+ $theme->[0], $end, $RESET,
+ "\n"
+ );
+ } else {
+ return join('',
+ $start,
+ $theme->[1], $mid, $theme->[2],
+ $end
+ );
+ }
+}
+
+# Pairs are interesting to highlight only if we are going to end up
+# highlighting a subset (i.e., not the whole line). Otherwise, the highlighting
+# is just useless noise. We can detect this by finding either a matching prefix
+# or suffix (disregarding boring bits like whitespace and colorization).
+sub is_pair_interesting {
+ my ($a, $pa, $sa, $b, $pb, $sb) = @_;
+ my $prefix_a = join('', @$a[0..($pa-1)]);
+ my $prefix_b = join('', @$b[0..($pb-1)]);
+ my $suffix_a = join('', @$a[($sa+1)..$#$a]);
+ my $suffix_b = join('', @$b[($sb+1)..$#$b]);
+
+ return $prefix_a !~ /^$COLOR*-$BORING*$/ ||
+ $prefix_b !~ /^$COLOR*\+$BORING*$/ ||
+ $suffix_a !~ /^$BORING*$/ ||
+ $suffix_b !~ /^$BORING*$/;
+}
diff --git a/contrib/emacs/git-blame.el b/contrib/emacs/git-blame.el
index 4fa70c5ad4..e671f6c1c6 100644
--- a/contrib/emacs/git-blame.el
+++ b/contrib/emacs/git-blame.el
@@ -79,6 +79,58 @@
;;; Code:
(eval-when-compile (require 'cl)) ; to use `push', `pop'
+(require 'format-spec)
+
+(defface git-blame-prefix-face
+ '((((background dark)) (:foreground "gray"
+ :background "black"))
+ (((background light)) (:foreground "gray"
+ :background "white"))
+ (t (:weight bold)))
+ "The face used for the hash prefix."
+ :group 'git-blame)
+
+(defgroup git-blame nil
+ "A minor mode showing Git blame information."
+ :group 'git
+ :link '(function-link git-blame-mode))
+
+
+(defcustom git-blame-use-colors t
+ "Use colors to indicate commits in `git-blame-mode'."
+ :type 'boolean
+ :group 'git-blame)
+
+(defcustom git-blame-prefix-format
+ "%h %20A:"
+ "The format of the prefix added to each line in `git-blame'
+mode. The format is passed to `format-spec' with the following format keys:
+
+ %h - the abbreviated hash
+ %H - the full hash
+ %a - the author name
+ %A - the author email
+ %c - the committer name
+ %C - the committer email
+ %s - the commit summary
+"
+ :group 'git-blame)
+
+(defcustom git-blame-mouseover-format
+ "%h %a %A: %s"
+ "The format of the description shown when pointing at a line in
+`git-blame' mode. The format string is passed to `format-spec'
+with the following format keys:
+
+ %h - the abbreviated hash
+ %H - the full hash
+ %a - the author name
+ %A - the author email
+ %c - the committer name
+ %C - the committer email
+ %s - the commit summary
+"
+ :group 'git-blame)
(defun git-blame-color-scale (&rest elements)
@@ -252,7 +304,7 @@ See also function `git-blame-mode'."
(defun git-blame-cleanup ()
"Remove all blame properties"
- (mapcar 'delete-overlay git-blame-overlays)
+ (mapc 'delete-overlay git-blame-overlays)
(setq git-blame-overlays nil)
(remove-git-blame-text-properties (point-min) (point-max)))
@@ -285,16 +337,16 @@ See also function `git-blame-mode'."
(defvar in-blame-filter nil)
(defun git-blame-filter (proc str)
- (save-excursion
- (set-buffer (process-buffer proc))
- (goto-char (process-mark proc))
- (insert-before-markers str)
- (goto-char 0)
- (unless in-blame-filter
- (let ((more t)
- (in-blame-filter t))
- (while more
- (setq more (git-blame-parse)))))))
+ (with-current-buffer (process-buffer proc)
+ (save-excursion
+ (goto-char (process-mark proc))
+ (insert-before-markers str)
+ (goto-char (point-min))
+ (unless in-blame-filter
+ (let ((more t)
+ (in-blame-filter t))
+ (while more
+ (setq more (git-blame-parse))))))))
(defun git-blame-parse ()
(cond ((looking-at "\\([0-9a-f]\\{40\\}\\) \\([0-9]+\\) \\([0-9]+\\) \\([0-9]+\\)\n")
@@ -302,72 +354,70 @@ See also function `git-blame-mode'."
(src-line (string-to-number (match-string 2)))
(res-line (string-to-number (match-string 3)))
(num-lines (string-to-number (match-string 4))))
- (setq git-blame-current
- (if (string= hash "0000000000000000000000000000000000000000")
- nil
- (git-blame-new-commit
- hash src-line res-line num-lines))))
- (delete-region (point) (match-end 0))
- t)
- ((looking-at "filename \\(.+\\)\n")
- (let ((filename (match-string 1)))
- (git-blame-add-info "filename" filename))
- (delete-region (point) (match-end 0))
+ (delete-region (point) (match-end 0))
+ (setq git-blame-current (list (git-blame-new-commit hash)
+ src-line res-line num-lines)))
t)
((looking-at "\\([a-z-]+\\) \\(.+\\)\n")
(let ((key (match-string 1))
(value (match-string 2)))
- (git-blame-add-info key value))
- (delete-region (point) (match-end 0))
- t)
- ((looking-at "boundary\n")
- (setq git-blame-current nil)
- (delete-region (point) (match-end 0))
+ (delete-region (point) (match-end 0))
+ (git-blame-add-info (car git-blame-current) key value)
+ (when (string= key "filename")
+ (git-blame-create-overlay (car git-blame-current)
+ (caddr git-blame-current)
+ (cadddr git-blame-current))
+ (setq git-blame-current nil)))
t)
(t
nil)))
-(defun git-blame-new-commit (hash src-line res-line num-lines)
- (save-excursion
- (set-buffer git-blame-file)
- (let ((info (gethash hash git-blame-cache))
- (inhibit-point-motion-hooks t)
- (inhibit-modification-hooks t))
- (when (not info)
- ;; Assign a random color to each new commit info
- ;; Take care not to select the same color multiple times
- (let ((color (if git-blame-colors
- (git-blame-random-pop git-blame-colors)
- git-blame-ancient-color)))
- (setq info (list hash src-line res-line num-lines
- (git-describe-commit hash)
- (cons 'color color))))
- (puthash hash info git-blame-cache))
- (goto-line res-line)
- (while (> num-lines 0)
- (if (get-text-property (point) 'git-blame)
- (forward-line)
- (let* ((start (point))
- (end (progn (forward-line 1) (point)))
- (ovl (make-overlay start end)))
- (push ovl git-blame-overlays)
- (overlay-put ovl 'git-blame info)
- (overlay-put ovl 'help-echo hash)
- (overlay-put ovl 'face (list :background
- (cdr (assq 'color (nthcdr 5 info)))))
- ;; the point-entered property doesn't seem to work in overlays
- ;;(overlay-put ovl 'point-entered
- ;; `(lambda (x y) (git-blame-identify ,hash)))
- (let ((modified (buffer-modified-p)))
- (put-text-property (if (= start 1) start (1- start)) (1- end)
- 'point-entered
- `(lambda (x y) (git-blame-identify ,hash)))
- (set-buffer-modified-p modified))))
- (setq num-lines (1- num-lines))))))
-
-(defun git-blame-add-info (key value)
- (if git-blame-current
- (nconc git-blame-current (list (cons (intern key) value)))))
+(defun git-blame-new-commit (hash)
+ (with-current-buffer git-blame-file
+ (or (gethash hash git-blame-cache)
+ ;; Assign a random color to each new commit info
+ ;; Take care not to select the same color multiple times
+ (let* ((color (if git-blame-colors
+ (git-blame-random-pop git-blame-colors)
+ git-blame-ancient-color))
+ (info `(,hash (color . ,color))))
+ (puthash hash info git-blame-cache)
+ info))))
+
+(defun git-blame-create-overlay (info start-line num-lines)
+ (with-current-buffer git-blame-file
+ (save-excursion
+ (let ((inhibit-point-motion-hooks t)
+ (inhibit-modification-hooks t))
+ (goto-char (point-min))
+ (forward-line (1- start-line))
+ (let* ((start (point))
+ (end (progn (forward-line num-lines) (point)))
+ (ovl (make-overlay start end))
+ (hash (car info))
+ (spec `((?h . ,(substring hash 0 6))
+ (?H . ,hash)
+ (?a . ,(git-blame-get-info info 'author))
+ (?A . ,(git-blame-get-info info 'author-mail))
+ (?c . ,(git-blame-get-info info 'committer))
+ (?C . ,(git-blame-get-info info 'committer-mail))
+ (?s . ,(git-blame-get-info info 'summary)))))
+ (push ovl git-blame-overlays)
+ (overlay-put ovl 'git-blame info)
+ (overlay-put ovl 'help-echo
+ (format-spec git-blame-mouseover-format spec))
+ (if git-blame-use-colors
+ (overlay-put ovl 'face (list :background
+ (cdr (assq 'color (cdr info))))))
+ (overlay-put ovl 'line-prefix
+ (propertize (format-spec git-blame-prefix-format spec)
+ 'face 'git-blame-prefix-face)))))))
+
+(defun git-blame-add-info (info key value)
+ (nconc info (list (cons (intern key) value))))
+
+(defun git-blame-get-info (info key)
+ (cdr (assq key (cdr info))))
(defun git-blame-current-commit ()
(let ((info (get-char-property (point) 'git-blame)))
diff --git a/contrib/emacs/git.el b/contrib/emacs/git.el
index 214930a021..5ffc506f6d 100644
--- a/contrib/emacs/git.el
+++ b/contrib/emacs/git.el
@@ -1310,6 +1310,13 @@ The FILES list must be sorted."
(when sign-off (git-append-sign-off committer-name committer-email)))
buffer))
+(define-derived-mode git-log-edit-mode log-edit-mode "Git-Log-Edit"
+ "Major mode for editing git log messages.
+
+Set up git-specific `font-lock-keywords' for `log-edit-mode'."
+ (set (make-local-variable 'font-lock-defaults)
+ '(git-log-edit-font-lock-keywords t t)))
+
(defun git-commit-file ()
"Commit the marked file(s), asking for a commit message."
(interactive)
@@ -1335,9 +1342,9 @@ The FILES list must be sorted."
(git-setup-log-buffer buffer (git-get-merge-heads) author-name author-email subject date))
(if (boundp 'log-edit-diff-function)
(log-edit 'git-do-commit nil '((log-edit-listfun . git-log-edit-files)
- (log-edit-diff-function . git-log-edit-diff)) buffer)
- (log-edit 'git-do-commit nil 'git-log-edit-files buffer))
- (setq font-lock-keywords (font-lock-compile-keywords git-log-edit-font-lock-keywords))
+ (log-edit-diff-function . git-log-edit-diff)) buffer 'git-log-edit-mode)
+ (log-edit 'git-do-commit nil 'git-log-edit-files buffer
+ 'git-log-edit-mode))
(setq paragraph-separate (concat (regexp-quote git-log-msg-separator) "$\\|Author: \\|Date: \\|Merge: \\|Signed-off-by: \\|\f\\|[ ]*$"))
(setq buffer-file-coding-system coding-system)
(re-search-forward (regexp-quote (concat git-log-msg-separator "\n")) nil t))))
@@ -1664,7 +1671,7 @@ Commands:
"Entry point into git-status mode."
(interactive "DSelect directory: ")
(setq dir (git-get-top-dir dir))
- (if (file-directory-p (concat (file-name-as-directory dir) ".git"))
+ (if (file-exists-p (concat (file-name-as-directory dir) ".git"))
(let ((buffer (or (and git-reuse-status-buffer (git-find-status-buffer dir))
(create-file-buffer (expand-file-name "*git-status*" dir)))))
(switch-to-buffer buffer)
diff --git a/contrib/examples/builtin-fetch--tool.c b/contrib/examples/builtin-fetch--tool.c
new file mode 100644
index 0000000000..ee1916641e
--- /dev/null
+++ b/contrib/examples/builtin-fetch--tool.c
@@ -0,0 +1,575 @@
+#include "builtin.h"
+#include "cache.h"
+#include "refs.h"
+#include "commit.h"
+#include "sigchain.h"
+
+static char *get_stdin(void)
+{
+ struct strbuf buf = STRBUF_INIT;
+ if (strbuf_read(&buf, 0, 1024) < 0) {
+ die_errno("error reading standard input");
+ }
+ return strbuf_detach(&buf, NULL);
+}
+
+static void show_new(enum object_type type, unsigned char *sha1_new)
+{
+ fprintf(stderr, " %s: %s\n", typename(type),
+ find_unique_abbrev(sha1_new, DEFAULT_ABBREV));
+}
+
+static int update_ref_env(const char *action,
+ const char *refname,
+ unsigned char *sha1,
+ unsigned char *oldval)
+{
+ char msg[1024];
+ const char *rla = getenv("GIT_REFLOG_ACTION");
+
+ if (!rla)
+ rla = "(reflog update)";
+ if (snprintf(msg, sizeof(msg), "%s: %s", rla, action) >= sizeof(msg))
+ warning("reflog message too long: %.*s...", 50, msg);
+ return update_ref(msg, refname, sha1, oldval, 0,
+ UPDATE_REFS_QUIET_ON_ERR);
+}
+
+static int update_local_ref(const char *name,
+ const char *new_head,
+ const char *note,
+ int verbose, int force)
+{
+ unsigned char sha1_old[20], sha1_new[20];
+ char oldh[41], newh[41];
+ struct commit *current, *updated;
+ enum object_type type;
+
+ if (get_sha1_hex(new_head, sha1_new))
+ die("malformed object name %s", new_head);
+
+ type = sha1_object_info(sha1_new, NULL);
+ if (type < 0)
+ die("object %s not found", new_head);
+
+ if (!*name) {
+ /* Not storing */
+ if (verbose) {
+ fprintf(stderr, "* fetched %s\n", note);
+ show_new(type, sha1_new);
+ }
+ return 0;
+ }
+
+ if (get_sha1(name, sha1_old)) {
+ const char *msg;
+ just_store:
+ /* new ref */
+ if (!strncmp(name, "refs/tags/", 10))
+ msg = "storing tag";
+ else
+ msg = "storing head";
+ fprintf(stderr, "* %s: storing %s\n",
+ name, note);
+ show_new(type, sha1_new);
+ return update_ref_env(msg, name, sha1_new, NULL);
+ }
+
+ if (!hashcmp(sha1_old, sha1_new)) {
+ if (verbose) {
+ fprintf(stderr, "* %s: same as %s\n", name, note);
+ show_new(type, sha1_new);
+ }
+ return 0;
+ }
+
+ if (!strncmp(name, "refs/tags/", 10)) {
+ fprintf(stderr, "* %s: updating with %s\n", name, note);
+ show_new(type, sha1_new);
+ return update_ref_env("updating tag", name, sha1_new, NULL);
+ }
+
+ current = lookup_commit_reference(sha1_old);
+ updated = lookup_commit_reference(sha1_new);
+ if (!current || !updated)
+ goto just_store;
+
+ strcpy(oldh, find_unique_abbrev(current->object.sha1, DEFAULT_ABBREV));
+ strcpy(newh, find_unique_abbrev(sha1_new, DEFAULT_ABBREV));
+
+ if (in_merge_bases(current, updated)) {
+ fprintf(stderr, "* %s: fast-forward to %s\n",
+ name, note);
+ fprintf(stderr, " old..new: %s..%s\n", oldh, newh);
+ return update_ref_env("fast-forward", name, sha1_new, sha1_old);
+ }
+ if (!force) {
+ fprintf(stderr,
+ "* %s: not updating to non-fast-forward %s\n",
+ name, note);
+ fprintf(stderr,
+ " old...new: %s...%s\n", oldh, newh);
+ return 1;
+ }
+ fprintf(stderr,
+ "* %s: forcing update to non-fast-forward %s\n",
+ name, note);
+ fprintf(stderr, " old...new: %s...%s\n", oldh, newh);
+ return update_ref_env("forced-update", name, sha1_new, sha1_old);
+}
+
+static int append_fetch_head(FILE *fp,
+ const char *head, const char *remote,
+ const char *remote_name, const char *remote_nick,
+ const char *local_name, int not_for_merge,
+ int verbose, int force)
+{
+ struct commit *commit;
+ int remote_len, i, note_len;
+ unsigned char sha1[20];
+ char note[1024];
+ const char *what, *kind;
+
+ if (get_sha1(head, sha1))
+ return error("Not a valid object name: %s", head);
+ commit = lookup_commit_reference_gently(sha1, 1);
+ if (!commit)
+ not_for_merge = 1;
+
+ if (!strcmp(remote_name, "HEAD")) {
+ kind = "";
+ what = "";
+ }
+ else if (!strncmp(remote_name, "refs/heads/", 11)) {
+ kind = "branch";
+ what = remote_name + 11;
+ }
+ else if (!strncmp(remote_name, "refs/tags/", 10)) {
+ kind = "tag";
+ what = remote_name + 10;
+ }
+ else if (!strncmp(remote_name, "refs/remotes/", 13)) {
+ kind = "remote-tracking branch";
+ what = remote_name + 13;
+ }
+ else {
+ kind = "";
+ what = remote_name;
+ }
+
+ remote_len = strlen(remote);
+ for (i = remote_len - 1; remote[i] == '/' && 0 <= i; i--)
+ ;
+ remote_len = i + 1;
+ if (4 < i && !strncmp(".git", remote + i - 3, 4))
+ remote_len = i - 3;
+
+ note_len = 0;
+ if (*what) {
+ if (*kind)
+ note_len += sprintf(note + note_len, "%s ", kind);
+ note_len += sprintf(note + note_len, "'%s' of ", what);
+ }
+ note_len += sprintf(note + note_len, "%.*s", remote_len, remote);
+ fprintf(fp, "%s\t%s\t%s\n",
+ sha1_to_hex(commit ? commit->object.sha1 : sha1),
+ not_for_merge ? "not-for-merge" : "",
+ note);
+ return update_local_ref(local_name, head, note, verbose, force);
+}
+
+static char *keep;
+static void remove_keep(void)
+{
+ if (keep && *keep)
+ unlink(keep);
+}
+
+static void remove_keep_on_signal(int signo)
+{
+ remove_keep();
+ sigchain_pop(signo);
+ raise(signo);
+}
+
+static char *find_local_name(const char *remote_name, const char *refs,
+ int *force_p, int *not_for_merge_p)
+{
+ const char *ref = refs;
+ int len = strlen(remote_name);
+
+ while (ref) {
+ const char *next;
+ int single_force, not_for_merge;
+
+ while (*ref == '\n')
+ ref++;
+ if (!*ref)
+ break;
+ next = strchr(ref, '\n');
+
+ single_force = not_for_merge = 0;
+ if (*ref == '+') {
+ single_force = 1;
+ ref++;
+ }
+ if (*ref == '.') {
+ not_for_merge = 1;
+ ref++;
+ if (*ref == '+') {
+ single_force = 1;
+ ref++;
+ }
+ }
+ if (!strncmp(remote_name, ref, len) && ref[len] == ':') {
+ const char *local_part = ref + len + 1;
+ int retlen;
+
+ if (!next)
+ retlen = strlen(local_part);
+ else
+ retlen = next - local_part;
+ *force_p = single_force;
+ *not_for_merge_p = not_for_merge;
+ return xmemdupz(local_part, retlen);
+ }
+ ref = next;
+ }
+ return NULL;
+}
+
+static int fetch_native_store(FILE *fp,
+ const char *remote,
+ const char *remote_nick,
+ const char *refs,
+ int verbose, int force)
+{
+ char buffer[1024];
+ int err = 0;
+
+ sigchain_push_common(remove_keep_on_signal);
+ atexit(remove_keep);
+
+ while (fgets(buffer, sizeof(buffer), stdin)) {
+ int len;
+ char *cp;
+ char *local_name;
+ int single_force, not_for_merge;
+
+ for (cp = buffer; *cp && !isspace(*cp); cp++)
+ ;
+ if (*cp)
+ *cp++ = 0;
+ len = strlen(cp);
+ if (len && cp[len-1] == '\n')
+ cp[--len] = 0;
+ if (!strcmp(buffer, "failed"))
+ die("Fetch failure: %s", remote);
+ if (!strcmp(buffer, "pack"))
+ continue;
+ if (!strcmp(buffer, "keep")) {
+ char *od = get_object_directory();
+ int len = strlen(od) + strlen(cp) + 50;
+ keep = xmalloc(len);
+ sprintf(keep, "%s/pack/pack-%s.keep", od, cp);
+ continue;
+ }
+
+ local_name = find_local_name(cp, refs,
+ &single_force, &not_for_merge);
+ if (!local_name)
+ continue;
+ err |= append_fetch_head(fp,
+ buffer, remote, cp, remote_nick,
+ local_name, not_for_merge,
+ verbose, force || single_force);
+ }
+ return err;
+}
+
+static int parse_reflist(const char *reflist)
+{
+ const char *ref;
+
+ printf("refs='");
+ for (ref = reflist; ref; ) {
+ const char *next;
+ while (*ref && isspace(*ref))
+ ref++;
+ if (!*ref)
+ break;
+ for (next = ref; *next && !isspace(*next); next++)
+ ;
+ printf("\n%.*s", (int)(next - ref), ref);
+ ref = next;
+ }
+ printf("'\n");
+
+ printf("rref='");
+ for (ref = reflist; ref; ) {
+ const char *next, *colon;
+ while (*ref && isspace(*ref))
+ ref++;
+ if (!*ref)
+ break;
+ for (next = ref; *next && !isspace(*next); next++)
+ ;
+ if (*ref == '.')
+ ref++;
+ if (*ref == '+')
+ ref++;
+ colon = strchr(ref, ':');
+ putchar('\n');
+ printf("%.*s", (int)((colon ? colon : next) - ref), ref);
+ ref = next;
+ }
+ printf("'\n");
+ return 0;
+}
+
+static int expand_refs_wildcard(const char *ls_remote_result, int numrefs,
+ const char **refs)
+{
+ int i, matchlen, replacelen;
+ int found_one = 0;
+ const char *remote = *refs++;
+ numrefs--;
+
+ if (numrefs == 0) {
+ fprintf(stderr, "Nothing specified for fetching with remote.%s.fetch\n",
+ remote);
+ printf("empty\n");
+ }
+
+ for (i = 0; i < numrefs; i++) {
+ const char *ref = refs[i];
+ const char *lref = ref;
+ const char *colon;
+ const char *tail;
+ const char *ls;
+ const char *next;
+
+ if (*lref == '+')
+ lref++;
+ colon = strchr(lref, ':');
+ tail = lref + strlen(lref);
+ if (!(colon &&
+ 2 < colon - lref &&
+ colon[-1] == '*' &&
+ colon[-2] == '/' &&
+ 2 < tail - (colon + 1) &&
+ tail[-1] == '*' &&
+ tail[-2] == '/')) {
+ /* not a glob */
+ if (!found_one++)
+ printf("explicit\n");
+ printf("%s\n", ref);
+ continue;
+ }
+
+ /* glob */
+ if (!found_one++)
+ printf("glob\n");
+
+ /* lref to colon-2 is remote hierarchy name;
+ * colon+1 to tail-2 is local.
+ */
+ matchlen = (colon-1) - lref;
+ replacelen = (tail-1) - (colon+1);
+ for (ls = ls_remote_result; ls; ls = next) {
+ const char *eol;
+ unsigned char sha1[20];
+ int namelen;
+
+ while (*ls && isspace(*ls))
+ ls++;
+ next = strchr(ls, '\n');
+ eol = !next ? (ls + strlen(ls)) : next;
+ if (!memcmp("^{}", eol-3, 3))
+ continue;
+ if (eol - ls < 40)
+ continue;
+ if (get_sha1_hex(ls, sha1))
+ continue;
+ ls += 40;
+ while (ls < eol && isspace(*ls))
+ ls++;
+ /* ls to next (or eol) is the name.
+ * is it identical to lref to colon-2?
+ */
+ if ((eol - ls) <= matchlen ||
+ strncmp(ls, lref, matchlen))
+ continue;
+
+ /* Yes, it is a match */
+ namelen = eol - ls;
+ if (lref != ref)
+ putchar('+');
+ printf("%.*s:%.*s%.*s\n",
+ namelen, ls,
+ replacelen, colon + 1,
+ namelen - matchlen, ls + matchlen);
+ }
+ }
+ return 0;
+}
+
+static int pick_rref(int sha1_only, const char *rref, const char *ls_remote_result)
+{
+ int err = 0;
+ int lrr_count = lrr_count, i, pass;
+ const char *cp;
+ struct lrr {
+ const char *line;
+ const char *name;
+ int namelen;
+ int shown;
+ } *lrr_list = lrr_list;
+
+ for (pass = 0; pass < 2; pass++) {
+ /* pass 0 counts and allocates, pass 1 fills... */
+ cp = ls_remote_result;
+ i = 0;
+ while (1) {
+ const char *np;
+ while (*cp && isspace(*cp))
+ cp++;
+ if (!*cp)
+ break;
+ np = strchrnul(cp, '\n');
+ if (pass) {
+ lrr_list[i].line = cp;
+ lrr_list[i].name = cp + 41;
+ lrr_list[i].namelen = np - (cp + 41);
+ }
+ i++;
+ cp = np;
+ }
+ if (!pass) {
+ lrr_count = i;
+ lrr_list = xcalloc(lrr_count, sizeof(*lrr_list));
+ }
+ }
+
+ while (1) {
+ const char *next;
+ int rreflen;
+ int i;
+
+ while (*rref && isspace(*rref))
+ rref++;
+ if (!*rref)
+ break;
+ next = strchrnul(rref, '\n');
+ rreflen = next - rref;
+
+ for (i = 0; i < lrr_count; i++) {
+ struct lrr *lrr = &(lrr_list[i]);
+
+ if (rreflen == lrr->namelen &&
+ !memcmp(lrr->name, rref, rreflen)) {
+ if (!lrr->shown)
+ printf("%.*s\n",
+ sha1_only ? 40 : lrr->namelen + 41,
+ lrr->line);
+ lrr->shown = 1;
+ break;
+ }
+ }
+ if (lrr_count <= i) {
+ error("pick-rref: %.*s not found", rreflen, rref);
+ err = 1;
+ }
+ rref = next;
+ }
+ free(lrr_list);
+ return err;
+}
+
+int cmd_fetch__tool(int argc, const char **argv, const char *prefix)
+{
+ int verbose = 0;
+ int force = 0;
+ int sopt = 0;
+
+ while (1 < argc) {
+ const char *arg = argv[1];
+ if (!strcmp("-v", arg))
+ verbose = 1;
+ else if (!strcmp("-f", arg))
+ force = 1;
+ else if (!strcmp("-s", arg))
+ sopt = 1;
+ else
+ break;
+ argc--;
+ argv++;
+ }
+
+ if (argc <= 1)
+ return error("Missing subcommand");
+
+ if (!strcmp("append-fetch-head", argv[1])) {
+ int result;
+ FILE *fp;
+ char *filename;
+
+ if (argc != 8)
+ return error("append-fetch-head takes 6 args");
+ filename = git_path("FETCH_HEAD");
+ fp = fopen(filename, "a");
+ if (!fp)
+ return error("cannot open %s: %s", filename, strerror(errno));
+ result = append_fetch_head(fp, argv[2], argv[3],
+ argv[4], argv[5],
+ argv[6], !!argv[7][0],
+ verbose, force);
+ fclose(fp);
+ return result;
+ }
+ if (!strcmp("native-store", argv[1])) {
+ int result;
+ FILE *fp;
+ char *filename;
+
+ if (argc != 5)
+ return error("fetch-native-store takes 3 args");
+ filename = git_path("FETCH_HEAD");
+ fp = fopen(filename, "a");
+ if (!fp)
+ return error("cannot open %s: %s", filename, strerror(errno));
+ result = fetch_native_store(fp, argv[2], argv[3], argv[4],
+ verbose, force);
+ fclose(fp);
+ return result;
+ }
+ if (!strcmp("parse-reflist", argv[1])) {
+ const char *reflist;
+ if (argc != 3)
+ return error("parse-reflist takes 1 arg");
+ reflist = argv[2];
+ if (!strcmp(reflist, "-"))
+ reflist = get_stdin();
+ return parse_reflist(reflist);
+ }
+ if (!strcmp("pick-rref", argv[1])) {
+ const char *ls_remote_result;
+ if (argc != 4)
+ return error("pick-rref takes 2 args");
+ ls_remote_result = argv[3];
+ if (!strcmp(ls_remote_result, "-"))
+ ls_remote_result = get_stdin();
+ return pick_rref(sopt, argv[2], ls_remote_result);
+ }
+ if (!strcmp("expand-refs-wildcard", argv[1])) {
+ const char *reflist;
+ if (argc < 4)
+ return error("expand-refs-wildcard takes at least 2 args");
+ reflist = argv[2];
+ if (!strcmp(reflist, "-"))
+ reflist = get_stdin();
+ return expand_refs_wildcard(reflist, argc - 3, argv + 3);
+ }
+
+ return error("Unknown subcommand: %s", argv[1]);
+}
diff --git a/contrib/examples/git-checkout.sh b/contrib/examples/git-checkout.sh
index 1a7689a48f..683cae7c3f 100755
--- a/contrib/examples/git-checkout.sh
+++ b/contrib/examples/git-checkout.sh
@@ -168,7 +168,7 @@ cd_to_toplevel
# branch. However, if "git checkout HEAD" detaches the HEAD
# from the current branch, even though that may be logically
# correct, it feels somewhat funny. More importantly, we do not
-# want "git checkout" nor "git checkout -f" to detach HEAD.
+# want "git checkout" or "git checkout -f" to detach HEAD.
detached=
detach_warn=
@@ -222,7 +222,7 @@ else
# Match the index to the working tree, and do a three-way.
git diff-files --name-only | git update-index --remove --stdin &&
- work=`git write-tree` &&
+ work=$(git write-tree) &&
git read-tree $v --reset -u $new || exit
eval GITHEAD_$new='${new_name:-${branch:-$new}}' &&
@@ -233,7 +233,7 @@ else
# Do not register the cleanly merged paths in the index yet.
# this is not a real merge before committing, but just carrying
# the working tree changes along.
- unmerged=`git ls-files -u`
+ unmerged=$(git ls-files -u)
git read-tree $v --reset $new
case "$unmerged" in
'') ;;
@@ -269,7 +269,7 @@ if [ "$?" -eq 0 ]; then
fi
if test -n "$branch"
then
- old_branch_name=`expr "z$oldbranch" : 'zrefs/heads/\(.*\)'`
+ old_branch_name=$(expr "z$oldbranch" : 'zrefs/heads/\(.*\)')
GIT_DIR="$GIT_DIR" git symbolic-ref -m "checkout: moving from ${old_branch_name:-$old} to $branch" HEAD "refs/heads/$branch"
if test -n "$quiet"
then
@@ -282,7 +282,7 @@ if [ "$?" -eq 0 ]; then
fi
elif test -n "$detached"
then
- old_branch_name=`expr "z$oldbranch" : 'zrefs/heads/\(.*\)'`
+ old_branch_name=$(expr "z$oldbranch" : 'zrefs/heads/\(.*\)')
git update-ref --no-deref -m "checkout: moving from ${old_branch_name:-$old} to $arg" HEAD "$detached" ||
die "Cannot detach HEAD"
if test -n "$detach_warn"
diff --git a/contrib/examples/git-clone.sh b/contrib/examples/git-clone.sh
index 547228e13c..08cf246bbb 100755
--- a/contrib/examples/git-clone.sh
+++ b/contrib/examples/git-clone.sh
@@ -40,7 +40,7 @@ eval "$(echo "$OPTIONS_SPEC" | git rev-parse --parseopt -- "$@" || echo exit $?)
get_repo_base() {
(
- cd "`/bin/pwd`" &&
+ cd "$(/bin/pwd)" &&
cd "$1" || cd "$1.git" &&
{
cd .git
@@ -50,7 +50,7 @@ get_repo_base() {
}
if [ -n "$GIT_SSL_NO_VERIFY" -o \
- "`git config --bool http.sslVerify`" = false ]; then
+ "$(git config --bool http.sslVerify)" = false ]; then
curl_extra_args="-k"
fi
@@ -70,7 +70,7 @@ clone_dumb_http () {
clone_tmp="$GIT_DIR/clone-tmp" &&
mkdir -p "$clone_tmp" || exit 1
if [ -n "$GIT_CURL_FTP_NO_EPSV" -o \
- "`git config --bool http.noEPSV`" = true ]; then
+ "$(git config --bool http.noEPSV)" = true ]; then
curl_extra_args="${curl_extra_args} --disable-epsv"
fi
http_fetch "$1/info/refs" "$clone_tmp/refs" ||
@@ -79,7 +79,7 @@ Perhaps git-update-server-info needs to be run there?"
test "z$quiet" = z && v=-v || v=
while read sha1 refname
do
- name=`expr "z$refname" : 'zrefs/\(.*\)'` &&
+ name=$(expr "z$refname" : 'zrefs/\(.*\)') &&
case "$name" in
*^*) continue;;
esac
@@ -88,7 +88,7 @@ Perhaps git-update-server-info needs to be run there?"
*) continue ;;
esac
if test -n "$use_separate_remote" &&
- branch_name=`expr "z$name" : 'zheads/\(.*\)'`
+ branch_name=$(expr "z$name" : 'zheads/\(.*\)')
then
tname="remotes/$origin/$branch_name"
else
@@ -100,7 +100,7 @@ Perhaps git-update-server-info needs to be run there?"
http_fetch "$1/HEAD" "$GIT_DIR/REMOTE_HEAD" ||
rm -f "$GIT_DIR/REMOTE_HEAD"
if test -f "$GIT_DIR/REMOTE_HEAD"; then
- head_sha1=`cat "$GIT_DIR/REMOTE_HEAD"`
+ head_sha1=$(cat "$GIT_DIR/REMOTE_HEAD")
case "$head_sha1" in
'ref: refs/'*)
;;
@@ -444,15 +444,15 @@ then
# a non-bare repository is always in separate-remote layout
remote_top="refs/remotes/$origin"
head_sha1=
- test ! -r "$GIT_DIR/REMOTE_HEAD" || head_sha1=`cat "$GIT_DIR/REMOTE_HEAD"`
+ test ! -r "$GIT_DIR/REMOTE_HEAD" || head_sha1=$(cat "$GIT_DIR/REMOTE_HEAD")
case "$head_sha1" in
'ref: refs/'*)
# Uh-oh, the remote told us (http transport done against
# new style repository with a symref HEAD).
# Ideally we should skip the guesswork but for now
# opt for minimum change.
- head_sha1=`expr "z$head_sha1" : 'zref: refs/heads/\(.*\)'`
- head_sha1=`cat "$GIT_DIR/$remote_top/$head_sha1"`
+ head_sha1=$(expr "z$head_sha1" : 'zref: refs/heads/\(.*\)')
+ head_sha1=$(cat "$GIT_DIR/$remote_top/$head_sha1")
;;
esac
@@ -467,7 +467,7 @@ then
while read name
do
test t = $done && continue
- branch_tip=`cat "$GIT_DIR/$remote_top/$name"`
+ branch_tip=$(cat "$GIT_DIR/$remote_top/$name")
if test "$head_sha1" = "$branch_tip"
then
echo "$name"
@@ -516,7 +516,7 @@ then
case "$no_checkout" in
'')
- test "z$quiet" = z -a "z$no_progress" = z && v=-v || v=
+ test "z$quiet" = z && test "z$no_progress" = z && v=-v || v=
git read-tree -m -u $v HEAD HEAD
esac
fi
diff --git a/contrib/examples/git-commit.sh b/contrib/examples/git-commit.sh
index 5c72f655c7..934505bab9 100755
--- a/contrib/examples/git-commit.sh
+++ b/contrib/examples/git-commit.sh
@@ -51,7 +51,7 @@ run_status () {
export GIT_INDEX_FILE
fi
- if test "$status_only" = "t" -o "$use_status_color" = "t"; then
+ if test "$status_only" = "t" || test "$use_status_color" = "t"; then
color=
else
color=--nocolor
@@ -91,7 +91,7 @@ signoff=
force_author=
only_include_assumed=
untracked_files=
-templatefile="`git config commit.template`"
+templatefile="$(git config commit.template)"
while test $# != 0
do
case "$1" in
@@ -280,7 +280,7 @@ case "$#,$also,$only,$amend" in
0,,,*)
;;
*,,,*)
- only_include_assumed="# Explicit paths specified without -i nor -o; assuming --only paths..."
+ only_include_assumed="# Explicit paths specified without -i or -o; assuming --only paths..."
also=
;;
esac
@@ -296,7 +296,7 @@ t,,,[1-9]*)
die "No paths with -i does not make sense." ;;
esac
-if test ! -z "$templatefile" -a -z "$log_given"
+if test ! -z "$templatefile" && test -z "$log_given"
then
if test ! -f "$templatefile"
then
@@ -350,7 +350,7 @@ t,)
TMP_INDEX="$GIT_DIR/tmp-index$$"
W=
test -z "$initial_commit" && W=--with-tree=HEAD
- commit_only=`git ls-files --error-unmatch $W -- "$@"` || exit
+ commit_only=$(git ls-files --error-unmatch $W -- "$@") || exit
# Build a temporary index and update the real index
# the same way.
@@ -475,8 +475,8 @@ then
fi
if test '' != "$force_author"
then
- GIT_AUTHOR_NAME=`expr "z$force_author" : 'z\(.*[^ ]\) *<.*'` &&
- GIT_AUTHOR_EMAIL=`expr "z$force_author" : '.*\(<.*\)'` &&
+ GIT_AUTHOR_NAME=$(expr "z$force_author" : 'z\(.*[^ ]\) *<.*') &&
+ GIT_AUTHOR_EMAIL=$(expr "z$force_author" : '.*\(<.*\)') &&
test '' != "$GIT_AUTHOR_NAME" &&
test '' != "$GIT_AUTHOR_EMAIL" ||
die "malformed --author parameter"
@@ -489,7 +489,7 @@ then
rloga='commit'
if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
rloga='commit (merge)'
- PARENTS="-p HEAD "`sed -e 's/^/-p /' "$GIT_DIR/MERGE_HEAD"`
+ PARENTS="-p HEAD "$(sed -e 's/^/-p /' "$GIT_DIR/MERGE_HEAD")
elif test -n "$amend"; then
rloga='commit (amend)'
PARENTS=$(git cat-file commit HEAD |
@@ -631,7 +631,7 @@ then
if test -z "$quiet"
then
commit=`git diff-tree --always --shortstat --pretty="format:%h: %s"\
- --summary --root HEAD --`
+ --abbrev --summary --root HEAD --`
echo "Created${initial_commit:+ initial} commit $commit"
fi
fi
diff --git a/contrib/examples/git-fetch.sh b/contrib/examples/git-fetch.sh
index e44af2c86d..554070909c 100755
--- a/contrib/examples/git-fetch.sh
+++ b/contrib/examples/git-fetch.sh
@@ -67,7 +67,7 @@ do
keep='-k -k'
;;
--depth=*)
- shallow_depth="--depth=`expr "z$1" : 'z-[^=]*=\(.*\)'`"
+ shallow_depth="--depth=$(expr "z$1" : 'z-[^=]*=\(.*\)')"
;;
--depth)
shift
@@ -127,10 +127,12 @@ then
orig_head=$(git rev-parse --verify HEAD 2>/dev/null)
fi
-# Allow --notags from remote.$1.tagopt
+# Allow --tags/--notags from remote.$1.tagopt
case "$tags$no_tags" in
'')
case "$(git config --get "remote.$1.tagopt")" in
+ --tags)
+ tags=t ;;
--no-tags)
no_tags=t ;;
esac
@@ -260,12 +262,12 @@ fetch_per_ref () {
http://* | https://* | ftp://*)
test -n "$shallow_depth" &&
die "shallow clone with http not supported"
- proto=`expr "$remote" : '\([^:]*\):'`
+ proto=$(expr "$remote" : '\([^:]*\):')
if [ -n "$GIT_SSL_NO_VERIFY" ]; then
curl_extra_args="-k"
fi
if [ -n "$GIT_CURL_FTP_NO_EPSV" -o \
- "`git config --bool http.noEPSV`" = true ]; then
+ "$(git config --bool http.noEPSV)" = true ]; then
noepsv_opt="--disable-epsv"
fi
diff --git a/contrib/examples/git-log.sh b/contrib/examples/git-log.sh
new file mode 100755
index 0000000000..c2ea71cf14
--- /dev/null
+++ b/contrib/examples/git-log.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+#
+# Copyright (c) 2005 Linus Torvalds
+#
+
+USAGE='[--max-count=<n>] [<since>..<limit>] [--pretty=<format>] [git-rev-list options]'
+SUBDIRECTORY_OK='Yes'
+. git-sh-setup
+
+revs=$(git-rev-parse --revs-only --no-flags --default HEAD "$@") || exit
+[ "$revs" ] || {
+ die "No HEAD ref"
+}
+git-rev-list --pretty $(git-rev-parse --default HEAD "$@") |
+LESS=-S ${PAGER:-less}
diff --git a/contrib/examples/git-ls-remote.sh b/contrib/examples/git-ls-remote.sh
index fec70bbf88..2aa89a7df8 100755
--- a/contrib/examples/git-ls-remote.sh
+++ b/contrib/examples/git-ls-remote.sh
@@ -55,11 +55,11 @@ tmpdir=$tmp-d
case "$peek_repo" in
http://* | https://* | ftp://* )
if [ -n "$GIT_SSL_NO_VERIFY" -o \
- "`git config --bool http.sslVerify`" = false ]; then
+ "$(git config --bool http.sslVerify)" = false ]; then
curl_extra_args="-k"
fi
if [ -n "$GIT_CURL_FTP_NO_EPSV" -o \
- "`git config --bool http.noEPSV`" = true ]; then
+ "$(git config --bool http.noEPSV)" = true ]; then
curl_extra_args="${curl_extra_args} --disable-epsv"
fi
curl -nsf $curl_extra_args --header "Pragma: no-cache" "$peek_repo/info/refs" ||
diff --git a/contrib/examples/git-merge.sh b/contrib/examples/git-merge.sh
index e9588eec33..52f2aafb9d 100755
--- a/contrib/examples/git-merge.sh
+++ b/contrib/examples/git-merge.sh
@@ -14,8 +14,11 @@ summary (synonym to --stat)
log add list of one-line log to merge commit message
squash create a single commit instead of doing a merge
commit perform a commit if the merge succeeds (default)
-ff allow fast forward (default)
+ff allow fast-forward (default)
+ff-only abort if fast-forward is not possible
+rerere-autoupdate update index with any reused conflict resolution
s,strategy= merge strategy to use
+X= option for selected merge strategy
m,message= message to be used for the merge commit (if any)
"
@@ -25,25 +28,32 @@ require_work_tree
cd_to_toplevel
test -z "$(git ls-files -u)" ||
- die "You are in the middle of a conflicted merge."
+ die "Merge is not possible because you have unmerged files."
+
+! test -e "$GIT_DIR/MERGE_HEAD" ||
+ die 'You have not concluded your merge (MERGE_HEAD exists).'
LF='
'
all_strategies='recur recursive octopus resolve stupid ours subtree'
+all_strategies="$all_strategies recursive-ours recursive-theirs"
+not_strategies='base file index tree'
default_twohead_strategies='recursive'
default_octopus_strategies='octopus'
no_fast_forward_strategies='subtree ours'
-no_trivial_strategies='recursive recur subtree ours'
+no_trivial_strategies='recursive recur subtree ours recursive-ours recursive-theirs'
use_strategies=
+xopt=
allow_fast_forward=t
+fast_forward_only=
allow_trivial_merge=t
-squash= no_commit= log_arg=
+squash= no_commit= log_arg= rr_arg=
dropsave() {
rm -f -- "$GIT_DIR/MERGE_HEAD" "$GIT_DIR/MERGE_MSG" \
- "$GIT_DIR/MERGE_STASH" || exit 1
+ "$GIT_DIR/MERGE_STASH" "$GIT_DIR/MERGE_MODE" || exit 1
}
savestate() {
@@ -130,21 +140,34 @@ finish () {
merge_name () {
remote="$1"
rh=$(git rev-parse --verify "$remote^0" 2>/dev/null) || return
- bh=$(git show-ref -s --verify "refs/heads/$remote" 2>/dev/null)
- if test "$rh" = "$bh"
- then
- echo "$rh branch '$remote' of ."
- elif truname=$(expr "$remote" : '\(.*\)~[1-9][0-9]*$') &&
+ if truname=$(expr "$remote" : '\(.*\)~[0-9]*$') &&
git show-ref -q --verify "refs/heads/$truname" 2>/dev/null
then
echo "$rh branch '$truname' (early part) of ."
- elif test "$remote" = "FETCH_HEAD" -a -r "$GIT_DIR/FETCH_HEAD"
+ return
+ fi
+ if found_ref=$(git rev-parse --symbolic-full-name --verify \
+ "$remote" 2>/dev/null)
+ then
+ expanded=$(git check-ref-format --branch "$remote") ||
+ exit
+ if test "${found_ref#refs/heads/}" != "$found_ref"
+ then
+ echo "$rh branch '$expanded' of ."
+ return
+ elif test "${found_ref#refs/remotes/}" != "$found_ref"
+ then
+ echo "$rh remote branch '$expanded' of ."
+ return
+ fi
+ fi
+ if test "$remote" = "FETCH_HEAD" && test -r "$GIT_DIR/FETCH_HEAD"
then
sed -e 's/ not-for-merge / /' -e 1q \
"$GIT_DIR/FETCH_HEAD"
- else
- echo "$rh commit '$remote'"
+ return
fi
+ echo "$rh commit '$remote'"
}
parse_config () {
@@ -171,16 +194,36 @@ parse_config () {
--no-ff)
test "$squash" != t ||
die "You cannot combine --squash with --no-ff."
+ test "$fast_forward_only" != t ||
+ die "You cannot combine --ff-only with --no-ff."
allow_fast_forward=f ;;
+ --ff-only)
+ test "$allow_fast_forward" != f ||
+ die "You cannot combine --ff-only with --no-ff."
+ fast_forward_only=t ;;
+ --rerere-autoupdate|--no-rerere-autoupdate)
+ rr_arg=$1 ;;
-s|--strategy)
shift
case " $all_strategies " in
*" $1 "*)
- use_strategies="$use_strategies$1 " ;;
+ use_strategies="$use_strategies$1 "
+ ;;
*)
- die "available strategies are: $all_strategies" ;;
+ case " $not_strategies " in
+ *" $1 "*)
+ false
+ esac &&
+ type "git-merge-$1" >/dev/null 2>&1 ||
+ die "available strategies are: $all_strategies"
+ use_strategies="$use_strategies$1 "
+ ;;
esac
;;
+ -X)
+ shift
+ xopt="${xopt:+$xopt }$(git rev-parse --sq-quote "--$1")"
+ ;;
-m|--message)
shift
merge_msg="$1"
@@ -220,7 +263,7 @@ fi
# This could be traditional "merge <msg> HEAD <commit>..." and the
# way we can tell it is to see if the second token is HEAD, but some
-# people might have misused the interface and used a committish that
+# people might have misused the interface and used a commit-ish that
# is the same as HEAD there instead. Traditional format never would
# have "-m" so it is an additional safety measure to check for it.
@@ -244,6 +287,10 @@ then
exit 1
fi
+ test "$squash" != t ||
+ die "Squash commit into empty head not supported yet"
+ test "$allow_fast_forward" = t ||
+ die "Non-fast-forward into an empty head does not make sense"
rh=$(git rev-parse --verify "$1^0") ||
die "$1 - not something we can merge"
@@ -260,12 +307,18 @@ else
# the given message. If remote is invalid we will die
# later in the common codepath so we discard the error
# in this loop.
- merge_name=$(for remote
+ merge_msg="$(
+ for remote
do
merge_name "$remote"
- done | git fmt-merge-msg $log_arg
- )
- merge_msg="${merge_msg:+$merge_msg$LF$LF}$merge_name"
+ done |
+ if test "$have_message" = t
+ then
+ git fmt-merge-msg -m "$merge_msg" $log_arg
+ else
+ git fmt-merge-msg $log_arg
+ fi
+ )"
fi
head=$(git rev-parse --verify "$head_arg"^0) || usage
@@ -288,7 +341,7 @@ case "$use_strategies" in
'')
case "$#" in
1)
- var="`git config --get pull.twohead`"
+ var="$(git config --get pull.twohead)"
if test -n "$var"
then
use_strategies="$var"
@@ -296,7 +349,7 @@ case "$use_strategies" in
use_strategies="$default_twohead_strategies"
fi ;;
*)
- var="`git config --get pull.octopus`"
+ var="$(git config --get pull.octopus)"
if test -n "$var"
then
use_strategies="$var"
@@ -334,7 +387,7 @@ case "$#" in
common=$(git merge-base --all $head "$@")
;;
*)
- common=$(git show-branch --merge-base $head "$@")
+ common=$(git merge-base --all --octopus $head "$@")
;;
esac
echo "$head" >"$GIT_DIR/ORIG_HEAD"
@@ -353,7 +406,7 @@ t,1,"$head",*)
# Again the most common case of merging one remote.
echo "Updating $(git rev-parse --short $head)..$(git rev-parse --short $1)"
git update-index --refresh 2>/dev/null
- msg="Fast forward"
+ msg="Fast-forward"
if test -n "$have_message"
then
msg="$msg (no commit created; -m option ignored)"
@@ -365,15 +418,15 @@ t,1,"$head",*)
exit 0
;;
?,1,?*"$LF"?*,*)
- # We are not doing octopus and not fast forward. Need a
+ # We are not doing octopus and not fast-forward. Need a
# real merge.
;;
?,1,*,)
- # We are not doing octopus, not fast forward, and have only
+ # We are not doing octopus, not fast-forward, and have only
# one common.
git update-index --refresh 2>/dev/null
- case "$allow_trivial_merge" in
- t)
+ case "$allow_trivial_merge,$fast_forward_only" in
+ t,)
# See if it is really trivial.
git var GIT_COMMITTER_IDENT >/dev/null || exit
echo "Trying really trivial in-index merge..."
@@ -412,6 +465,11 @@ t,1,"$head",*)
;;
esac
+if test "$fast_forward_only" = t
+then
+ die "Not possible to fast-forward, aborting."
+fi
+
# We are going to make a new commit.
git var GIT_COMMITTER_IDENT >/dev/null || exit
@@ -450,7 +508,7 @@ do
# Remember which strategy left the state in the working tree
wt_strategy=$strategy
- git-merge-$strategy $common -- "$head_arg" "$@"
+ eval 'git-merge-$strategy '"$xopt"' $common -- "$head_arg" "$@"'
exit=$?
if test "$no_commit" = t && test "$exit" = 0
then
@@ -469,7 +527,7 @@ do
git diff-files --name-only
git ls-files --unmerged
} | wc -l`
- if test $best_cnt -le 0 -o $cnt -le $best_cnt
+ if test $best_cnt -le 0 || test $cnt -le $best_cnt
then
best_strategy=$strategy
best_cnt=$cnt
@@ -488,9 +546,9 @@ if test '' != "$result_tree"
then
if test "$allow_fast_forward" = "t"
then
- parents=$(git show-branch --independent "$head" "$@")
+ parents=$(git merge-base --independent "$head" "$@")
else
- parents=$(git rev-parse "$head" "$@")
+ parents=$(git rev-parse "$head" "$@")
fi
parents=$(echo "$parents" | sed -e 's/^/-p /')
result_commit=$(printf '%s\n' "$merge_msg" | git commit-tree $result_tree $parents) || exit
@@ -532,7 +590,15 @@ else
do
echo $remote
done >"$GIT_DIR/MERGE_HEAD"
- printf '%s\n' "$merge_msg" >"$GIT_DIR/MERGE_MSG"
+ printf '%s\n' "$merge_msg" >"$GIT_DIR/MERGE_MSG" ||
+ die "Could not write to $GIT_DIR/MERGE_MSG"
+ if test "$allow_fast_forward" != t
+ then
+ printf "%s" no-ff
+ else
+ :
+ fi >"$GIT_DIR/MERGE_MODE" ||
+ die "Could not write to $GIT_DIR/MERGE_MODE"
fi
if test "$merge_was_ok" = t
@@ -549,6 +615,6 @@ Conflicts:
sed -e 's/^[^ ]* / /' |
uniq
} >>"$GIT_DIR/MERGE_MSG"
- git rerere
+ git rerere $rr_arg
die "Automatic merge failed; fix conflicts and then commit the result."
fi
diff --git a/contrib/examples/git-notes.sh b/contrib/examples/git-notes.sh
new file mode 100755
index 0000000000..e642e47d9f
--- /dev/null
+++ b/contrib/examples/git-notes.sh
@@ -0,0 +1,121 @@
+#!/bin/sh
+
+USAGE="(edit [-F <file> | -m <msg>] | show) [commit]"
+. git-sh-setup
+
+test -z "$1" && usage
+ACTION="$1"; shift
+
+test -z "$GIT_NOTES_REF" && GIT_NOTES_REF="$(git config core.notesref)"
+test -z "$GIT_NOTES_REF" && GIT_NOTES_REF="refs/notes/commits"
+
+MESSAGE=
+while test $# != 0
+do
+ case "$1" in
+ -m)
+ test "$ACTION" = "edit" || usage
+ shift
+ if test "$#" = "0"; then
+ die "error: option -m needs an argument"
+ else
+ if [ -z "$MESSAGE" ]; then
+ MESSAGE="$1"
+ else
+ MESSAGE="$MESSAGE
+
+$1"
+ fi
+ shift
+ fi
+ ;;
+ -F)
+ test "$ACTION" = "edit" || usage
+ shift
+ if test "$#" = "0"; then
+ die "error: option -F needs an argument"
+ else
+ if [ -z "$MESSAGE" ]; then
+ MESSAGE="$(cat "$1")"
+ else
+ MESSAGE="$MESSAGE
+
+$(cat "$1")"
+ fi
+ shift
+ fi
+ ;;
+ -*)
+ usage
+ ;;
+ *)
+ break
+ ;;
+ esac
+done
+
+COMMIT=$(git rev-parse --verify --default HEAD "$@") ||
+die "Invalid commit: $@"
+
+case "$ACTION" in
+edit)
+ if [ "${GIT_NOTES_REF#refs/notes/}" = "$GIT_NOTES_REF" ]; then
+ die "Refusing to edit notes in $GIT_NOTES_REF (outside of refs/notes/)"
+ fi
+
+ MSG_FILE="$GIT_DIR/new-notes-$COMMIT"
+ GIT_INDEX_FILE="$MSG_FILE.idx"
+ export GIT_INDEX_FILE
+
+ trap '
+ test -f "$MSG_FILE" && rm "$MSG_FILE"
+ test -f "$GIT_INDEX_FILE" && rm "$GIT_INDEX_FILE"
+ ' 0
+
+ CURRENT_HEAD=$(git show-ref "$GIT_NOTES_REF" | cut -f 1 -d ' ')
+ if [ -z "$CURRENT_HEAD" ]; then
+ PARENT=
+ else
+ PARENT="-p $CURRENT_HEAD"
+ git read-tree "$GIT_NOTES_REF" || die "Could not read index"
+ fi
+
+ if [ -z "$MESSAGE" ]; then
+ GIT_NOTES_REF= git log -1 $COMMIT | sed "s/^/#/" > "$MSG_FILE"
+ if [ ! -z "$CURRENT_HEAD" ]; then
+ git cat-file blob :$COMMIT >> "$MSG_FILE" 2> /dev/null
+ fi
+ core_editor="$(git config core.editor)"
+ ${GIT_EDITOR:-${core_editor:-${VISUAL:-${EDITOR:-vi}}}} "$MSG_FILE"
+ else
+ echo "$MESSAGE" > "$MSG_FILE"
+ fi
+
+ grep -v ^# < "$MSG_FILE" | git stripspace > "$MSG_FILE".processed
+ mv "$MSG_FILE".processed "$MSG_FILE"
+ if [ -s "$MSG_FILE" ]; then
+ BLOB=$(git hash-object -w "$MSG_FILE") ||
+ die "Could not write into object database"
+ git update-index --add --cacheinfo 0644 $BLOB $COMMIT ||
+ die "Could not write index"
+ else
+ test -z "$CURRENT_HEAD" &&
+ die "Will not initialise with empty tree"
+ git update-index --force-remove $COMMIT ||
+ die "Could not update index"
+ fi
+
+ TREE=$(git write-tree) || die "Could not write tree"
+ NEW_HEAD=$(echo Annotate $COMMIT | git commit-tree $TREE $PARENT) ||
+ die "Could not annotate"
+ git update-ref -m "Annotate $COMMIT" \
+ "$GIT_NOTES_REF" $NEW_HEAD $CURRENT_HEAD
+;;
+show)
+ git rev-parse -q --verify "$GIT_NOTES_REF":$COMMIT > /dev/null ||
+ die "No note for commit $COMMIT."
+ git show "$GIT_NOTES_REF":$COMMIT
+;;
+*)
+ usage
+esac
diff --git a/contrib/examples/git-remote.perl b/contrib/examples/git-remote.perl
index b17952a785..d42df7b418 100755
--- a/contrib/examples/git-remote.perl
+++ b/contrib/examples/git-remote.perl
@@ -347,7 +347,7 @@ sub rm_remote {
}
sub add_usage {
- print STDERR "Usage: git remote add [-f] [-t track]* [-m master] <name> <url>\n";
+ print STDERR "usage: git remote add [-f] [-t track]* [-m master] <name> <url>\n";
exit(1);
}
@@ -380,7 +380,7 @@ elsif ($ARGV[0] eq 'show') {
}
}
if ($i >= @ARGV) {
- print STDERR "Usage: git remote show <remote>\n";
+ print STDERR "usage: git remote show <remote>\n";
exit(1);
}
my $status = 0;
@@ -410,7 +410,7 @@ elsif ($ARGV[0] eq 'prune') {
}
}
if ($i >= @ARGV) {
- print STDERR "Usage: git remote prune <remote>\n";
+ print STDERR "usage: git remote prune <remote>\n";
exit(1);
}
my $status = 0;
@@ -458,13 +458,13 @@ elsif ($ARGV[0] eq 'add') {
}
elsif ($ARGV[0] eq 'rm') {
if (@ARGV <= 1) {
- print STDERR "Usage: git remote rm <remote>\n";
+ print STDERR "usage: git remote rm <remote>\n";
exit(1);
}
exit(rm_remote($ARGV[1]));
}
else {
- print STDERR "Usage: git remote\n";
+ print STDERR "usage: git remote\n";
print STDERR " git remote add <name> <url>\n";
print STDERR " git remote rm <name>\n";
print STDERR " git remote show <name>\n";
diff --git a/contrib/examples/git-repack.sh b/contrib/examples/git-repack.sh
new file mode 100755
index 0000000000..96e3fed326
--- /dev/null
+++ b/contrib/examples/git-repack.sh
@@ -0,0 +1,194 @@
+#!/bin/sh
+#
+# Copyright (c) 2005 Linus Torvalds
+#
+
+OPTIONS_KEEPDASHDASH=
+OPTIONS_SPEC="\
+git repack [options]
+--
+a pack everything in a single pack
+A same as -a, and turn unreachable objects loose
+d remove redundant packs, and run git-prune-packed
+f pass --no-reuse-delta to git-pack-objects
+F pass --no-reuse-object to git-pack-objects
+n do not run git-update-server-info
+q,quiet be quiet
+l pass --local to git-pack-objects
+unpack-unreachable= with -A, do not loosen objects older than this
+ Packing constraints
+window= size of the window used for delta compression
+window-memory= same as the above, but limit memory size instead of entries count
+depth= limits the maximum delta depth
+max-pack-size= maximum size of each packfile
+"
+SUBDIRECTORY_OK='Yes'
+. git-sh-setup
+
+no_update_info= all_into_one= remove_redundant= unpack_unreachable=
+local= no_reuse= extra=
+while test $# != 0
+do
+ case "$1" in
+ -n) no_update_info=t ;;
+ -a) all_into_one=t ;;
+ -A) all_into_one=t
+ unpack_unreachable=--unpack-unreachable ;;
+ --unpack-unreachable)
+ unpack_unreachable="--unpack-unreachable=$2"; shift ;;
+ -d) remove_redundant=t ;;
+ -q) GIT_QUIET=t ;;
+ -f) no_reuse=--no-reuse-delta ;;
+ -F) no_reuse=--no-reuse-object ;;
+ -l) local=--local ;;
+ --max-pack-size|--window|--window-memory|--depth)
+ extra="$extra $1=$2"; shift ;;
+ --) shift; break;;
+ *) usage ;;
+ esac
+ shift
+done
+
+case "$(git config --bool repack.usedeltabaseoffset || echo true)" in
+true)
+ extra="$extra --delta-base-offset" ;;
+esac
+
+PACKDIR="$GIT_OBJECT_DIRECTORY/pack"
+PACKTMP="$PACKDIR/.tmp-$$-pack"
+rm -f "$PACKTMP"-*
+trap 'rm -f "$PACKTMP"-*' 0 1 2 3 15
+
+# There will be more repacking strategies to come...
+case ",$all_into_one," in
+,,)
+ args='--unpacked --incremental'
+ ;;
+,t,)
+ args= existing=
+ if [ -d "$PACKDIR" ]; then
+ for e in `cd "$PACKDIR" && find . -type f -name '*.pack' \
+ | sed -e 's/^\.\///' -e 's/\.pack$//'`
+ do
+ if [ -e "$PACKDIR/$e.keep" ]; then
+ : keep
+ else
+ existing="$existing $e"
+ fi
+ done
+ if test -n "$existing" && test -n "$unpack_unreachable" && \
+ test -n "$remove_redundant"
+ then
+ # This may have arbitrary user arguments, so we
+ # have to protect it against whitespace splitting
+ # when it gets run as "pack-objects $args" later.
+ # Fortunately, we know it's an approxidate, so we
+ # can just use dots instead.
+ args="$args $(echo "$unpack_unreachable" | tr ' ' .)"
+ fi
+ fi
+ ;;
+esac
+
+mkdir -p "$PACKDIR" || exit
+
+args="$args $local ${GIT_QUIET:+-q} $no_reuse$extra"
+names=$(git pack-objects --keep-true-parents --honor-pack-keep --non-empty --all --reflog $args </dev/null "$PACKTMP") ||
+ exit 1
+if [ -z "$names" ]; then
+ say Nothing new to pack.
+fi
+
+# Ok we have prepared all new packfiles.
+
+# First see if there are packs of the same name and if so
+# if we can move them out of the way (this can happen if we
+# repacked immediately after packing fully.
+rollback=
+failed=
+for name in $names
+do
+ for sfx in pack idx
+ do
+ file=pack-$name.$sfx
+ test -f "$PACKDIR/$file" || continue
+ rm -f "$PACKDIR/old-$file" &&
+ mv "$PACKDIR/$file" "$PACKDIR/old-$file" || {
+ failed=t
+ break
+ }
+ rollback="$rollback $file"
+ done
+ test -z "$failed" || break
+done
+
+# If renaming failed for any of them, roll the ones we have
+# already renamed back to their original names.
+if test -n "$failed"
+then
+ rollback_failure=
+ for file in $rollback
+ do
+ mv "$PACKDIR/old-$file" "$PACKDIR/$file" ||
+ rollback_failure="$rollback_failure $file"
+ done
+ if test -n "$rollback_failure"
+ then
+ echo >&2 "WARNING: Some packs in use have been renamed by"
+ echo >&2 "WARNING: prefixing old- to their name, in order to"
+ echo >&2 "WARNING: replace them with the new version of the"
+ echo >&2 "WARNING: file. But the operation failed, and"
+ echo >&2 "WARNING: attempt to rename them back to their"
+ echo >&2 "WARNING: original names also failed."
+ echo >&2 "WARNING: Please rename them in $PACKDIR manually:"
+ for file in $rollback_failure
+ do
+ echo >&2 "WARNING: old-$file -> $file"
+ done
+ fi
+ exit 1
+fi
+
+# Now the ones with the same name are out of the way...
+fullbases=
+for name in $names
+do
+ fullbases="$fullbases pack-$name"
+ chmod a-w "$PACKTMP-$name.pack"
+ chmod a-w "$PACKTMP-$name.idx"
+ mv -f "$PACKTMP-$name.pack" "$PACKDIR/pack-$name.pack" &&
+ mv -f "$PACKTMP-$name.idx" "$PACKDIR/pack-$name.idx" ||
+ exit
+done
+
+# Remove the "old-" files
+for name in $names
+do
+ rm -f "$PACKDIR/old-pack-$name.idx"
+ rm -f "$PACKDIR/old-pack-$name.pack"
+done
+
+# End of pack replacement.
+
+if test "$remove_redundant" = t
+then
+ # We know $existing are all redundant.
+ if [ -n "$existing" ]
+ then
+ ( cd "$PACKDIR" &&
+ for e in $existing
+ do
+ case " $fullbases " in
+ *" $e "*) ;;
+ *) rm -f "$e.pack" "$e.idx" "$e.keep" ;;
+ esac
+ done
+ )
+ fi
+ git prune-packed ${GIT_QUIET:+-q}
+fi
+
+case "$no_update_info" in
+t) : ;;
+*) git update-server-info ;;
+esac
diff --git a/contrib/examples/git-reset.sh b/contrib/examples/git-reset.sh
index bafeb52cd1..cb1bbf3b90 100755
--- a/contrib/examples/git-reset.sh
+++ b/contrib/examples/git-reset.sh
@@ -40,7 +40,7 @@ case "$1" in --) shift ;; esac
# git reset --mixed tree [--] paths... can be used to
# load chosen paths from the tree into the index without
-# affecting the working tree nor HEAD.
+# affecting the working tree or HEAD.
if test $# != 0
then
test "$reset_type" = "--mixed" ||
@@ -60,7 +60,7 @@ then
update=-u
fi
-# Soft reset does not touch the index file nor the working tree
+# Soft reset does not touch the index file or the working tree
# at all, but requires them in a good order. Other resets reset
# the index file to the tree object we are switching to.
if test "$reset_type" = "--soft"
diff --git a/contrib/examples/git-resolve.sh b/contrib/examples/git-resolve.sh
index 0ee1bd898e..70fdc27b72 100755
--- a/contrib/examples/git-resolve.sh
+++ b/contrib/examples/git-resolve.sh
@@ -48,7 +48,7 @@ case "$common" in
"$head")
echo "Updating $(git rev-parse --short $head)..$(git rev-parse --short $merge)"
git read-tree -u -m $head $merge || exit 1
- git update-ref -m "resolve $merge_name: Fast forward" \
+ git update-ref -m "resolve $merge_name: Fast-forward" \
HEAD "$merge" "$head"
git diff-tree -p $head $merge | git apply --stat
dropheads
@@ -75,8 +75,8 @@ case "$common" in
GIT_INDEX_FILE=$G git read-tree -m $c $head $merge \
2>/dev/null || continue
# Count the paths that are unmerged.
- cnt=`GIT_INDEX_FILE=$G git ls-files --unmerged | wc -l`
- if test $best_cnt -le 0 -o $cnt -le $best_cnt
+ cnt=$(GIT_INDEX_FILE=$G git ls-files --unmerged | wc -l)
+ if test $best_cnt -le 0 || test $cnt -le $best_cnt
then
best=$c
best_cnt=$cnt
diff --git a/contrib/examples/git-revert.sh b/contrib/examples/git-revert.sh
index 49f00321b2..7e2aad5491 100755
--- a/contrib/examples/git-revert.sh
+++ b/contrib/examples/git-revert.sh
@@ -26,6 +26,7 @@ require_work_tree
cd_to_toplevel
no_commit=
+xopt=
while case "$#" in 0) break ;; esac
do
case "$1" in
@@ -44,6 +45,16 @@ do
-x|--i-really-want-to-expose-my-private-commit-object-name)
replay=
;;
+ -X?*)
+ xopt="$xopt$(git rev-parse --sq-quote "--${1#-X}")"
+ ;;
+ --strategy-option=*)
+ xopt="$xopt$(git rev-parse --sq-quote "--${1#--strategy-option=}")"
+ ;;
+ -X|--strategy-option)
+ shift
+ xopt="$xopt$(git rev-parse --sq-quote "--$1")"
+ ;;
-*)
usage
;;
@@ -126,7 +137,7 @@ cherry-pick)
q
}'
- logmsg=`git show -s --pretty=raw --encoding="$encoding" "$commit"`
+ logmsg=$(git show -s --pretty=raw --encoding="$encoding" "$commit")
set_author_env=`echo "$logmsg" |
LANG=C LC_ALL=C sed -ne "$pick_author_script"`
eval "$set_author_env"
@@ -159,7 +170,7 @@ export GITHEAD_$head GITHEAD_$next
# and $prev on top of us (when reverting), or the change between
# $prev and $commit on top of us (when cherry-picking or replaying).
-git-merge-recursive $base -- $head $next &&
+eval "git merge-recursive $xopt $base -- $head $next" &&
result=$(git-write-tree 2>/dev/null) || {
mv -f .msg "$GIT_DIR/MERGE_MSG"
{
@@ -181,7 +192,6 @@ Conflicts:
esac
exit 1
}
-echo >&2 "Finished one $me."
# If we are cherry-pick, and if the merge did not result in
# hand-editing, we will hit this commit and inherit the original
diff --git a/contrib/examples/git-svnimport.perl b/contrib/examples/git-svnimport.perl
index 4576c4a862..c414f0d9c7 100755
--- a/contrib/examples/git-svnimport.perl
+++ b/contrib/examples/git-svnimport.perl
@@ -1,4 +1,4 @@
-#!/usr/bin/perl -w
+#!/usr/bin/perl
# This tool is copyright (c) 2005, Matthias Urlichs.
# It is released under the Gnu Public License, version 2.
@@ -36,7 +36,7 @@ our($opt_h,$opt_o,$opt_v,$opt_u,$opt_C,$opt_i,$opt_m,$opt_M,$opt_t,$opt_T,
sub usage() {
print STDERR <<END;
-Usage: ${\basename $0} # fetch/update GIT from SVN
+usage: ${\basename $0} # fetch/update GIT from SVN
[-o branch-for-HEAD] [-h] [-v] [-l max_rev] [-R repack_each_revs]
[-C GIT_repository] [-t tagname] [-T trunkname] [-b branchname]
[-d|-D] [-i] [-u] [-r] [-I ignorefilename] [-s start_chg]
@@ -289,7 +289,7 @@ my $current_rev = $opt_s || 1;
unless(-d $git_dir) {
system("git init");
die "Cannot init the GIT db at $git_tree: $?\n" if $?;
- system("git read-tree");
+ system("git read-tree --empty");
die "Cannot init an empty tree: $?\n" if $?;
$last_branch = $opt_o;
diff --git a/contrib/examples/git-svnimport.txt b/contrib/examples/git-svnimport.txt
index 3bb871e42f..3f0a9c33b5 100644
--- a/contrib/examples/git-svnimport.txt
+++ b/contrib/examples/git-svnimport.txt
@@ -176,4 +176,4 @@ Documentation by Matthias Urlichs <smurf@smurf.noris.de>.
GIT
---
-Part of the gitlink:git[7] suite
+Part of the linkgit:git[7] suite
diff --git a/contrib/examples/git-tag.sh b/contrib/examples/git-tag.sh
index 2c15bc955b..1bd8f3c58d 100755
--- a/contrib/examples/git-tag.sh
+++ b/contrib/examples/git-tag.sh
@@ -156,7 +156,7 @@ prev=0000000000000000000000000000000000000000
if git show-ref --verify --quiet -- "refs/tags/$name"
then
test -n "$force" || die "tag '$name' already exists"
- prev=`git rev-parse "refs/tags/$name"`
+ prev=$(git rev-parse "refs/tags/$name")
fi
shift
git check-ref-format "tags/$name" ||
diff --git a/contrib/examples/git-whatchanged.sh b/contrib/examples/git-whatchanged.sh
new file mode 100755
index 0000000000..2edbdc6d99
--- /dev/null
+++ b/contrib/examples/git-whatchanged.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+USAGE='[-p] [--max-count=<n>] [<since>..<limit>] [--pretty=<format>] [-m] [git-diff-tree options] [git-rev-list options]'
+SUBDIRECTORY_OK='Yes'
+. git-sh-setup
+
+diff_tree_flags=$(git-rev-parse --sq --no-revs --flags "$@") || exit
+case "$0" in
+*whatchanged)
+ count=
+ test -z "$diff_tree_flags" &&
+ diff_tree_flags=$(git config --get whatchanged.difftree)
+ diff_tree_default_flags='-c -M --abbrev' ;;
+*show)
+ count=-n1
+ test -z "$diff_tree_flags" &&
+ diff_tree_flags=$(git config --get show.difftree)
+ diff_tree_default_flags='--cc --always' ;;
+esac
+test -z "$diff_tree_flags" &&
+ diff_tree_flags="$diff_tree_default_flags"
+
+rev_list_args=$(git-rev-parse --sq --default HEAD --revs-only "$@") &&
+diff_tree_args=$(git-rev-parse --sq --no-revs --no-flags "$@") &&
+
+eval "git-rev-list $count $rev_list_args" |
+eval "git-diff-tree --stdin --pretty -r $diff_tree_flags $diff_tree_args" |
+LESS="$LESS -S" ${PAGER:-less}
diff --git a/contrib/fast-import/git-import.perl b/contrib/fast-import/git-import.perl
index f9fef6db28..0891b9e366 100755
--- a/contrib/fast-import/git-import.perl
+++ b/contrib/fast-import/git-import.perl
@@ -7,7 +7,7 @@
use strict;
use File::Find;
-my $USAGE = 'Usage: git-import branch import-message';
+my $USAGE = 'usage: git-import branch import-message';
my $branch = shift or die "$USAGE\n";
my $message = shift or die "$USAGE\n";
diff --git a/contrib/fast-import/git-import.sh b/contrib/fast-import/git-import.sh
index 0ca7718d05..f8d803c5e2 100755
--- a/contrib/fast-import/git-import.sh
+++ b/contrib/fast-import/git-import.sh
@@ -5,7 +5,7 @@
# but is meant to be a simple fast-import example.
if [ -z "$1" -o -z "$2" ]; then
- echo "Usage: git-import branch import-message"
+ echo "usage: git-import branch import-message"
exit 1
fi
diff --git a/contrib/fast-import/git-p4 b/contrib/fast-import/git-p4
deleted file mode 100755
index e710219ca5..0000000000
--- a/contrib/fast-import/git-p4
+++ /dev/null
@@ -1,1928 +0,0 @@
-#!/usr/bin/env python
-#
-# git-p4.py -- A tool for bidirectional operation between a Perforce depot and git.
-#
-# Author: Simon Hausmann <simon@lst.de>
-# Copyright: 2007 Simon Hausmann <simon@lst.de>
-# 2007 Trolltech ASA
-# License: MIT <http://www.opensource.org/licenses/mit-license.php>
-#
-
-import optparse, sys, os, marshal, subprocess, shelve
-import tempfile, getopt, os.path, time, platform
-import re
-
-verbose = False
-
-
-def p4_build_cmd(cmd):
- """Build a suitable p4 command line.
-
- This consolidates building and returning a p4 command line into one
- location. It means that hooking into the environment, or other configuration
- can be done more easily.
- """
- real_cmd = "%s " % "p4"
-
- user = gitConfig("git-p4.user")
- if len(user) > 0:
- real_cmd += "-u %s " % user
-
- password = gitConfig("git-p4.password")
- if len(password) > 0:
- real_cmd += "-P %s " % password
-
- port = gitConfig("git-p4.port")
- if len(port) > 0:
- real_cmd += "-p %s " % port
-
- host = gitConfig("git-p4.host")
- if len(host) > 0:
- real_cmd += "-h %s " % host
-
- client = gitConfig("git-p4.client")
- if len(client) > 0:
- real_cmd += "-c %s " % client
-
- real_cmd += "%s" % (cmd)
- if verbose:
- print real_cmd
- return real_cmd
-
-def chdir(dir):
- if os.name == 'nt':
- os.environ['PWD']=dir
- os.chdir(dir)
-
-def die(msg):
- if verbose:
- raise Exception(msg)
- else:
- sys.stderr.write(msg + "\n")
- sys.exit(1)
-
-def write_pipe(c, str):
- if verbose:
- sys.stderr.write('Writing pipe: %s\n' % c)
-
- pipe = os.popen(c, 'w')
- val = pipe.write(str)
- if pipe.close():
- die('Command failed: %s' % c)
-
- return val
-
-def p4_write_pipe(c, str):
- real_cmd = p4_build_cmd(c)
- return write_pipe(real_cmd, str)
-
-def read_pipe(c, ignore_error=False):
- if verbose:
- sys.stderr.write('Reading pipe: %s\n' % c)
-
- pipe = os.popen(c, 'rb')
- val = pipe.read()
- if pipe.close() and not ignore_error:
- die('Command failed: %s' % c)
-
- return val
-
-def p4_read_pipe(c, ignore_error=False):
- real_cmd = p4_build_cmd(c)
- return read_pipe(real_cmd, ignore_error)
-
-def read_pipe_lines(c):
- if verbose:
- sys.stderr.write('Reading pipe: %s\n' % c)
- ## todo: check return status
- pipe = os.popen(c, 'rb')
- val = pipe.readlines()
- if pipe.close():
- die('Command failed: %s' % c)
-
- return val
-
-def p4_read_pipe_lines(c):
- """Specifically invoke p4 on the command supplied. """
- real_cmd = p4_build_cmd(c)
- return read_pipe_lines(real_cmd)
-
-def system(cmd):
- if verbose:
- sys.stderr.write("executing %s\n" % cmd)
- if os.system(cmd) != 0:
- die("command failed: %s" % cmd)
-
-def p4_system(cmd):
- """Specifically invoke p4 as the system command. """
- real_cmd = p4_build_cmd(cmd)
- return system(real_cmd)
-
-def isP4Exec(kind):
- """Determine if a Perforce 'kind' should have execute permission
-
- 'p4 help filetypes' gives a list of the types. If it starts with 'x',
- or x follows one of a few letters. Otherwise, if there is an 'x' after
- a plus sign, it is also executable"""
- return (re.search(r"(^[cku]?x)|\+.*x", kind) != None)
-
-def setP4ExecBit(file, mode):
- # Reopens an already open file and changes the execute bit to match
- # the execute bit setting in the passed in mode.
-
- p4Type = "+x"
-
- if not isModeExec(mode):
- p4Type = getP4OpenedType(file)
- p4Type = re.sub('^([cku]?)x(.*)', '\\1\\2', p4Type)
- p4Type = re.sub('(.*?\+.*?)x(.*?)', '\\1\\2', p4Type)
- if p4Type[-1] == "+":
- p4Type = p4Type[0:-1]
-
- p4_system("reopen -t %s %s" % (p4Type, file))
-
-def getP4OpenedType(file):
- # Returns the perforce file type for the given file.
-
- result = p4_read_pipe("opened %s" % file)
- match = re.match(".*\((.+)\)\r?$", result)
- if match:
- return match.group(1)
- else:
- die("Could not determine file type for %s (result: '%s')" % (file, result))
-
-def diffTreePattern():
- # This is a simple generator for the diff tree regex pattern. This could be
- # a class variable if this and parseDiffTreeEntry were a part of a class.
- pattern = re.compile(':(\d+) (\d+) (\w+) (\w+) ([A-Z])(\d+)?\t(.*?)((\t(.*))|$)')
- while True:
- yield pattern
-
-def parseDiffTreeEntry(entry):
- """Parses a single diff tree entry into its component elements.
-
- See git-diff-tree(1) manpage for details about the format of the diff
- output. This method returns a dictionary with the following elements:
-
- src_mode - The mode of the source file
- dst_mode - The mode of the destination file
- src_sha1 - The sha1 for the source file
- dst_sha1 - The sha1 fr the destination file
- status - The one letter status of the diff (i.e. 'A', 'M', 'D', etc)
- status_score - The score for the status (applicable for 'C' and 'R'
- statuses). This is None if there is no score.
- src - The path for the source file.
- dst - The path for the destination file. This is only present for
- copy or renames. If it is not present, this is None.
-
- If the pattern is not matched, None is returned."""
-
- match = diffTreePattern().next().match(entry)
- if match:
- return {
- 'src_mode': match.group(1),
- 'dst_mode': match.group(2),
- 'src_sha1': match.group(3),
- 'dst_sha1': match.group(4),
- 'status': match.group(5),
- 'status_score': match.group(6),
- 'src': match.group(7),
- 'dst': match.group(10)
- }
- return None
-
-def isModeExec(mode):
- # Returns True if the given git mode represents an executable file,
- # otherwise False.
- return mode[-3:] == "755"
-
-def isModeExecChanged(src_mode, dst_mode):
- return isModeExec(src_mode) != isModeExec(dst_mode)
-
-def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None):
- cmd = p4_build_cmd("-G %s" % (cmd))
- if verbose:
- sys.stderr.write("Opening pipe: %s\n" % cmd)
-
- # Use a temporary file to avoid deadlocks without
- # subprocess.communicate(), which would put another copy
- # of stdout into memory.
- stdin_file = None
- if stdin is not None:
- stdin_file = tempfile.TemporaryFile(prefix='p4-stdin', mode=stdin_mode)
- stdin_file.write(stdin)
- stdin_file.flush()
- stdin_file.seek(0)
-
- p4 = subprocess.Popen(cmd, shell=True,
- stdin=stdin_file,
- stdout=subprocess.PIPE)
-
- result = []
- try:
- while True:
- entry = marshal.load(p4.stdout)
- if cb is not None:
- cb(entry)
- else:
- result.append(entry)
- except EOFError:
- pass
- exitCode = p4.wait()
- if exitCode != 0:
- entry = {}
- entry["p4ExitCode"] = exitCode
- result.append(entry)
-
- return result
-
-def p4Cmd(cmd):
- list = p4CmdList(cmd)
- result = {}
- for entry in list:
- result.update(entry)
- return result;
-
-def p4Where(depotPath):
- if not depotPath.endswith("/"):
- depotPath += "/"
- depotPath = depotPath + "..."
- outputList = p4CmdList("where %s" % depotPath)
- output = None
- for entry in outputList:
- if "depotFile" in entry:
- if entry["depotFile"] == depotPath:
- output = entry
- break
- elif "data" in entry:
- data = entry.get("data")
- space = data.find(" ")
- if data[:space] == depotPath:
- output = entry
- break
- if output == None:
- return ""
- if output["code"] == "error":
- return ""
- clientPath = ""
- if "path" in output:
- clientPath = output.get("path")
- elif "data" in output:
- data = output.get("data")
- lastSpace = data.rfind(" ")
- clientPath = data[lastSpace + 1:]
-
- if clientPath.endswith("..."):
- clientPath = clientPath[:-3]
- return clientPath
-
-def currentGitBranch():
- return read_pipe("git name-rev HEAD").split(" ")[1].strip()
-
-def isValidGitDir(path):
- if (os.path.exists(path + "/HEAD")
- and os.path.exists(path + "/refs") and os.path.exists(path + "/objects")):
- return True;
- return False
-
-def parseRevision(ref):
- return read_pipe("git rev-parse %s" % ref).strip()
-
-def extractLogMessageFromGitCommit(commit):
- logMessage = ""
-
- ## fixme: title is first line of commit, not 1st paragraph.
- foundTitle = False
- for log in read_pipe_lines("git cat-file commit %s" % commit):
- if not foundTitle:
- if len(log) == 1:
- foundTitle = True
- continue
-
- logMessage += log
- return logMessage
-
-def extractSettingsGitLog(log):
- values = {}
- for line in log.split("\n"):
- line = line.strip()
- m = re.search (r"^ *\[git-p4: (.*)\]$", line)
- if not m:
- continue
-
- assignments = m.group(1).split (':')
- for a in assignments:
- vals = a.split ('=')
- key = vals[0].strip()
- val = ('='.join (vals[1:])).strip()
- if val.endswith ('\"') and val.startswith('"'):
- val = val[1:-1]
-
- values[key] = val
-
- paths = values.get("depot-paths")
- if not paths:
- paths = values.get("depot-path")
- if paths:
- values['depot-paths'] = paths.split(',')
- return values
-
-def gitBranchExists(branch):
- proc = subprocess.Popen(["git", "rev-parse", branch],
- stderr=subprocess.PIPE, stdout=subprocess.PIPE);
- return proc.wait() == 0;
-
-_gitConfig = {}
-def gitConfig(key):
- if not _gitConfig.has_key(key):
- _gitConfig[key] = read_pipe("git config %s" % key, ignore_error=True).strip()
- return _gitConfig[key]
-
-def p4BranchesInGit(branchesAreInRemotes = True):
- branches = {}
-
- cmdline = "git rev-parse --symbolic "
- if branchesAreInRemotes:
- cmdline += " --remotes"
- else:
- cmdline += " --branches"
-
- for line in read_pipe_lines(cmdline):
- line = line.strip()
-
- ## only import to p4/
- if not line.startswith('p4/') or line == "p4/HEAD":
- continue
- branch = line
-
- # strip off p4
- branch = re.sub ("^p4/", "", line)
-
- branches[branch] = parseRevision(line)
- return branches
-
-def findUpstreamBranchPoint(head = "HEAD"):
- branches = p4BranchesInGit()
- # map from depot-path to branch name
- branchByDepotPath = {}
- for branch in branches.keys():
- tip = branches[branch]
- log = extractLogMessageFromGitCommit(tip)
- settings = extractSettingsGitLog(log)
- if settings.has_key("depot-paths"):
- paths = ",".join(settings["depot-paths"])
- branchByDepotPath[paths] = "remotes/p4/" + branch
-
- settings = None
- parent = 0
- while parent < 65535:
- commit = head + "~%s" % parent
- log = extractLogMessageFromGitCommit(commit)
- settings = extractSettingsGitLog(log)
- if settings.has_key("depot-paths"):
- paths = ",".join(settings["depot-paths"])
- if branchByDepotPath.has_key(paths):
- return [branchByDepotPath[paths], settings]
-
- parent = parent + 1
-
- return ["", settings]
-
-def createOrUpdateBranchesFromOrigin(localRefPrefix = "refs/remotes/p4/", silent=True):
- if not silent:
- print ("Creating/updating branch(es) in %s based on origin branch(es)"
- % localRefPrefix)
-
- originPrefix = "origin/p4/"
-
- for line in read_pipe_lines("git rev-parse --symbolic --remotes"):
- line = line.strip()
- if (not line.startswith(originPrefix)) or line.endswith("HEAD"):
- continue
-
- headName = line[len(originPrefix):]
- remoteHead = localRefPrefix + headName
- originHead = line
-
- original = extractSettingsGitLog(extractLogMessageFromGitCommit(originHead))
- if (not original.has_key('depot-paths')
- or not original.has_key('change')):
- continue
-
- update = False
- if not gitBranchExists(remoteHead):
- if verbose:
- print "creating %s" % remoteHead
- update = True
- else:
- settings = extractSettingsGitLog(extractLogMessageFromGitCommit(remoteHead))
- if settings.has_key('change') > 0:
- if settings['depot-paths'] == original['depot-paths']:
- originP4Change = int(original['change'])
- p4Change = int(settings['change'])
- if originP4Change > p4Change:
- print ("%s (%s) is newer than %s (%s). "
- "Updating p4 branch from origin."
- % (originHead, originP4Change,
- remoteHead, p4Change))
- update = True
- else:
- print ("Ignoring: %s was imported from %s while "
- "%s was imported from %s"
- % (originHead, ','.join(original['depot-paths']),
- remoteHead, ','.join(settings['depot-paths'])))
-
- if update:
- system("git update-ref %s %s" % (remoteHead, originHead))
-
-def originP4BranchesExist():
- return gitBranchExists("origin") or gitBranchExists("origin/p4") or gitBranchExists("origin/p4/master")
-
-def p4ChangesForPaths(depotPaths, changeRange):
- assert depotPaths
- output = p4_read_pipe_lines("changes " + ' '.join (["%s...%s" % (p, changeRange)
- for p in depotPaths]))
-
- changes = {}
- for line in output:
- changeNum = int(line.split(" ")[1])
- changes[changeNum] = True
-
- changelist = changes.keys()
- changelist.sort()
- return changelist
-
-class Command:
- def __init__(self):
- self.usage = "usage: %prog [options]"
- self.needsGit = True
-
-class P4Debug(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [
- optparse.make_option("--verbose", dest="verbose", action="store_true",
- default=False),
- ]
- self.description = "A tool to debug the output of p4 -G."
- self.needsGit = False
- self.verbose = False
-
- def run(self, args):
- j = 0
- for output in p4CmdList(" ".join(args)):
- print 'Element: %d' % j
- j += 1
- print output
- return True
-
-class P4RollBack(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [
- optparse.make_option("--verbose", dest="verbose", action="store_true"),
- optparse.make_option("--local", dest="rollbackLocalBranches", action="store_true")
- ]
- self.description = "A tool to debug the multi-branch import. Don't use :)"
- self.verbose = False
- self.rollbackLocalBranches = False
-
- def run(self, args):
- if len(args) != 1:
- return False
- maxChange = int(args[0])
-
- if "p4ExitCode" in p4Cmd("changes -m 1"):
- die("Problems executing p4");
-
- if self.rollbackLocalBranches:
- refPrefix = "refs/heads/"
- lines = read_pipe_lines("git rev-parse --symbolic --branches")
- else:
- refPrefix = "refs/remotes/"
- lines = read_pipe_lines("git rev-parse --symbolic --remotes")
-
- for line in lines:
- if self.rollbackLocalBranches or (line.startswith("p4/") and line != "p4/HEAD\n"):
- line = line.strip()
- ref = refPrefix + line
- log = extractLogMessageFromGitCommit(ref)
- settings = extractSettingsGitLog(log)
-
- depotPaths = settings['depot-paths']
- change = settings['change']
-
- changed = False
-
- if len(p4Cmd("changes -m 1 " + ' '.join (['%s...@%s' % (p, maxChange)
- for p in depotPaths]))) == 0:
- print "Branch %s did not exist at change %s, deleting." % (ref, maxChange)
- system("git update-ref -d %s `git rev-parse %s`" % (ref, ref))
- continue
-
- while change and int(change) > maxChange:
- changed = True
- if self.verbose:
- print "%s is at %s ; rewinding towards %s" % (ref, change, maxChange)
- system("git update-ref %s \"%s^\"" % (ref, ref))
- log = extractLogMessageFromGitCommit(ref)
- settings = extractSettingsGitLog(log)
-
-
- depotPaths = settings['depot-paths']
- change = settings['change']
-
- if changed:
- print "%s rewound to %s" % (ref, change)
-
- return True
-
-class P4Submit(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [
- optparse.make_option("--verbose", dest="verbose", action="store_true"),
- optparse.make_option("--origin", dest="origin"),
- optparse.make_option("-M", dest="detectRename", action="store_true"),
- ]
- self.description = "Submit changes from git to the perforce depot."
- self.usage += " [name of git branch to submit into perforce depot]"
- self.interactive = True
- self.origin = ""
- self.detectRename = False
- self.verbose = False
- self.isWindows = (platform.system() == "Windows")
-
- def check(self):
- if len(p4CmdList("opened ...")) > 0:
- die("You have files opened with perforce! Close them before starting the sync.")
-
- # replaces everything between 'Description:' and the next P4 submit template field with the
- # commit message
- def prepareLogMessage(self, template, message):
- result = ""
-
- inDescriptionSection = False
-
- for line in template.split("\n"):
- if line.startswith("#"):
- result += line + "\n"
- continue
-
- if inDescriptionSection:
- if line.startswith("Files:"):
- inDescriptionSection = False
- else:
- continue
- else:
- if line.startswith("Description:"):
- inDescriptionSection = True
- line += "\n"
- for messageLine in message.split("\n"):
- line += "\t" + messageLine + "\n"
-
- result += line + "\n"
-
- return result
-
- def prepareSubmitTemplate(self):
- # remove lines in the Files section that show changes to files outside the depot path we're committing into
- template = ""
- inFilesSection = False
- for line in p4_read_pipe_lines("change -o"):
- if line.endswith("\r\n"):
- line = line[:-2] + "\n"
- if inFilesSection:
- if line.startswith("\t"):
- # path starts and ends with a tab
- path = line[1:]
- lastTab = path.rfind("\t")
- if lastTab != -1:
- path = path[:lastTab]
- if not path.startswith(self.depotPath):
- continue
- else:
- inFilesSection = False
- else:
- if line.startswith("Files:"):
- inFilesSection = True
-
- template += line
-
- return template
-
- def applyCommit(self, id):
- print "Applying %s" % (read_pipe("git log --max-count=1 --pretty=oneline %s" % id))
- diffOpts = ("", "-M")[self.detectRename]
- diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (diffOpts, id, id))
- filesToAdd = set()
- filesToDelete = set()
- editedFiles = set()
- filesToChangeExecBit = {}
- for line in diff:
- diff = parseDiffTreeEntry(line)
- modifier = diff['status']
- path = diff['src']
- if modifier == "M":
- p4_system("edit \"%s\"" % path)
- if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- filesToChangeExecBit[path] = diff['dst_mode']
- editedFiles.add(path)
- elif modifier == "A":
- filesToAdd.add(path)
- filesToChangeExecBit[path] = diff['dst_mode']
- if path in filesToDelete:
- filesToDelete.remove(path)
- elif modifier == "D":
- filesToDelete.add(path)
- if path in filesToAdd:
- filesToAdd.remove(path)
- elif modifier == "R":
- src, dest = diff['src'], diff['dst']
- p4_system("integrate -Dt \"%s\" \"%s\"" % (src, dest))
- p4_system("edit \"%s\"" % (dest))
- if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- filesToChangeExecBit[dest] = diff['dst_mode']
- os.unlink(dest)
- editedFiles.add(dest)
- filesToDelete.add(src)
- else:
- die("unknown modifier %s for %s" % (modifier, path))
-
- diffcmd = "git format-patch -k --stdout \"%s^\"..\"%s\"" % (id, id)
- patchcmd = diffcmd + " | git apply "
- tryPatchCmd = patchcmd + "--check -"
- applyPatchCmd = patchcmd + "--check --apply -"
-
- if os.system(tryPatchCmd) != 0:
- print "Unfortunately applying the change failed!"
- print "What do you want to do?"
- response = "x"
- while response != "s" and response != "a" and response != "w":
- response = raw_input("[s]kip this patch / [a]pply the patch forcibly "
- "and with .rej files / [w]rite the patch to a file (patch.txt) ")
- if response == "s":
- print "Skipping! Good luck with the next patches..."
- for f in editedFiles:
- p4_system("revert \"%s\"" % f);
- for f in filesToAdd:
- system("rm %s" %f)
- return
- elif response == "a":
- os.system(applyPatchCmd)
- if len(filesToAdd) > 0:
- print "You may also want to call p4 add on the following files:"
- print " ".join(filesToAdd)
- if len(filesToDelete):
- print "The following files should be scheduled for deletion with p4 delete:"
- print " ".join(filesToDelete)
- die("Please resolve and submit the conflict manually and "
- + "continue afterwards with git-p4 submit --continue")
- elif response == "w":
- system(diffcmd + " > patch.txt")
- print "Patch saved to patch.txt in %s !" % self.clientPath
- die("Please resolve and submit the conflict manually and "
- "continue afterwards with git-p4 submit --continue")
-
- system(applyPatchCmd)
-
- for f in filesToAdd:
- p4_system("add \"%s\"" % f)
- for f in filesToDelete:
- p4_system("revert \"%s\"" % f)
- p4_system("delete \"%s\"" % f)
-
- # Set/clear executable bits
- for f in filesToChangeExecBit.keys():
- mode = filesToChangeExecBit[f]
- setP4ExecBit(f, mode)
-
- logMessage = extractLogMessageFromGitCommit(id)
- logMessage = logMessage.strip()
-
- template = self.prepareSubmitTemplate()
-
- if self.interactive:
- submitTemplate = self.prepareLogMessage(template, logMessage)
- if os.environ.has_key("P4DIFF"):
- del(os.environ["P4DIFF"])
- diff = p4_read_pipe("diff -du ...")
-
- newdiff = ""
- for newFile in filesToAdd:
- newdiff += "==== new file ====\n"
- newdiff += "--- /dev/null\n"
- newdiff += "+++ %s\n" % newFile
- f = open(newFile, "r")
- for line in f.readlines():
- newdiff += "+" + line
- f.close()
-
- separatorLine = "######## everything below this line is just the diff #######\n"
-
- [handle, fileName] = tempfile.mkstemp()
- tmpFile = os.fdopen(handle, "w+")
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\n", "\r\n")
- separatorLine = separatorLine.replace("\n", "\r\n")
- newdiff = newdiff.replace("\n", "\r\n")
- tmpFile.write(submitTemplate + separatorLine + diff + newdiff)
- tmpFile.close()
- mtime = os.stat(fileName).st_mtime
- defaultEditor = "vi"
- if platform.system() == "Windows":
- defaultEditor = "notepad"
- if os.environ.has_key("P4EDITOR"):
- editor = os.environ.get("P4EDITOR")
- else:
- editor = os.environ.get("EDITOR", defaultEditor);
- system(editor + " " + fileName)
-
- response = "y"
- if os.stat(fileName).st_mtime <= mtime:
- response = "x"
- while response != "y" and response != "n":
- response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ")
-
- if response == "y":
- tmpFile = open(fileName, "rb")
- message = tmpFile.read()
- tmpFile.close()
- submitTemplate = message[:message.index(separatorLine)]
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\r\n", "\n")
- p4_write_pipe("submit -i", submitTemplate)
- else:
- for f in editedFiles:
- p4_system("revert \"%s\"" % f);
- for f in filesToAdd:
- p4_system("revert \"%s\"" % f);
- system("rm %s" %f)
-
- os.remove(fileName)
- else:
- fileName = "submit.txt"
- file = open(fileName, "w+")
- file.write(self.prepareLogMessage(template, logMessage))
- file.close()
- print ("Perforce submit template written as %s. "
- + "Please review/edit and then use p4 submit -i < %s to submit directly!"
- % (fileName, fileName))
-
- def run(self, args):
- if len(args) == 0:
- self.master = currentGitBranch()
- if len(self.master) == 0 or not gitBranchExists("refs/heads/%s" % self.master):
- die("Detecting current git branch failed!")
- elif len(args) == 1:
- self.master = args[0]
- else:
- return False
-
- allowSubmit = gitConfig("git-p4.allowSubmit")
- if len(allowSubmit) > 0 and not self.master in allowSubmit.split(","):
- die("%s is not in git-p4.allowSubmit" % self.master)
-
- [upstream, settings] = findUpstreamBranchPoint()
- self.depotPath = settings['depot-paths'][0]
- if len(self.origin) == 0:
- self.origin = upstream
-
- if self.verbose:
- print "Origin branch is " + self.origin
-
- if len(self.depotPath) == 0:
- print "Internal error: cannot locate perforce depot path from existing branches"
- sys.exit(128)
-
- self.clientPath = p4Where(self.depotPath)
-
- if len(self.clientPath) == 0:
- print "Error: Cannot locate perforce checkout of %s in client view" % self.depotPath
- sys.exit(128)
-
- print "Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath)
- self.oldWorkingDirectory = os.getcwd()
-
- chdir(self.clientPath)
- print "Syncronizing p4 checkout..."
- p4_system("sync ...")
-
- self.check()
-
- commits = []
- for line in read_pipe_lines("git rev-list --no-merges %s..%s" % (self.origin, self.master)):
- commits.append(line.strip())
- commits.reverse()
-
- while len(commits) > 0:
- commit = commits[0]
- commits = commits[1:]
- self.applyCommit(commit)
- if not self.interactive:
- break
-
- if len(commits) == 0:
- print "All changes applied!"
- chdir(self.oldWorkingDirectory)
-
- sync = P4Sync()
- sync.run([])
-
- rebase = P4Rebase()
- rebase.rebase()
-
- return True
-
-class P4Sync(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [
- optparse.make_option("--branch", dest="branch"),
- optparse.make_option("--detect-branches", dest="detectBranches", action="store_true"),
- optparse.make_option("--changesfile", dest="changesFile"),
- optparse.make_option("--silent", dest="silent", action="store_true"),
- optparse.make_option("--detect-labels", dest="detectLabels", action="store_true"),
- optparse.make_option("--verbose", dest="verbose", action="store_true"),
- optparse.make_option("--import-local", dest="importIntoRemotes", action="store_false",
- help="Import into refs/heads/ , not refs/remotes"),
- optparse.make_option("--max-changes", dest="maxChanges"),
- optparse.make_option("--keep-path", dest="keepRepoPath", action='store_true',
- help="Keep entire BRANCH/DIR/SUBDIR prefix during import"),
- optparse.make_option("--use-client-spec", dest="useClientSpec", action='store_true',
- help="Only sync files that are included in the Perforce Client Spec")
- ]
- self.description = """Imports from Perforce into a git repository.\n
- example:
- //depot/my/project/ -- to import the current head
- //depot/my/project/@all -- to import everything
- //depot/my/project/@1,6 -- to import only from revision 1 to 6
-
- (a ... is not needed in the path p4 specification, it's added implicitly)"""
-
- self.usage += " //depot/path[@revRange]"
- self.silent = False
- self.createdBranches = set()
- self.committedChanges = set()
- self.branch = ""
- self.detectBranches = False
- self.detectLabels = False
- self.changesFile = ""
- self.syncWithOrigin = True
- self.verbose = False
- self.importIntoRemotes = True
- self.maxChanges = ""
- self.isWindows = (platform.system() == "Windows")
- self.keepRepoPath = False
- self.depotPaths = None
- self.p4BranchesInGit = []
- self.cloneExclude = []
- self.useClientSpec = False
- self.clientSpecDirs = []
-
- if gitConfig("git-p4.syncFromOrigin") == "false":
- self.syncWithOrigin = False
-
- def extractFilesFromCommit(self, commit):
- self.cloneExclude = [re.sub(r"\.\.\.$", "", path)
- for path in self.cloneExclude]
- files = []
- fnum = 0
- while commit.has_key("depotFile%s" % fnum):
- path = commit["depotFile%s" % fnum]
-
- if [p for p in self.cloneExclude
- if path.startswith (p)]:
- found = False
- else:
- found = [p for p in self.depotPaths
- if path.startswith (p)]
- if not found:
- fnum = fnum + 1
- continue
-
- file = {}
- file["path"] = path
- file["rev"] = commit["rev%s" % fnum]
- file["action"] = commit["action%s" % fnum]
- file["type"] = commit["type%s" % fnum]
- files.append(file)
- fnum = fnum + 1
- return files
-
- def stripRepoPath(self, path, prefixes):
- if self.keepRepoPath:
- prefixes = [re.sub("^(//[^/]+/).*", r'\1', prefixes[0])]
-
- for p in prefixes:
- if path.startswith(p):
- path = path[len(p):]
-
- return path
-
- def splitFilesIntoBranches(self, commit):
- branches = {}
- fnum = 0
- while commit.has_key("depotFile%s" % fnum):
- path = commit["depotFile%s" % fnum]
- found = [p for p in self.depotPaths
- if path.startswith (p)]
- if not found:
- fnum = fnum + 1
- continue
-
- file = {}
- file["path"] = path
- file["rev"] = commit["rev%s" % fnum]
- file["action"] = commit["action%s" % fnum]
- file["type"] = commit["type%s" % fnum]
- fnum = fnum + 1
-
- relPath = self.stripRepoPath(path, self.depotPaths)
-
- for branch in self.knownBranches.keys():
-
- # add a trailing slash so that a commit into qt/4.2foo doesn't end up in qt/4.2
- if relPath.startswith(branch + "/"):
- if branch not in branches:
- branches[branch] = []
- branches[branch].append(file)
- break
-
- return branches
-
- # output one file from the P4 stream
- # - helper for streamP4Files
-
- def streamOneP4File(self, file, contents):
- if file["type"] == "apple":
- print "\nfile %s is a strange apple file that forks. Ignoring" % \
- file['depotFile']
- return
-
- relPath = self.stripRepoPath(file['depotFile'], self.branchPrefixes)
- if verbose:
- sys.stderr.write("%s\n" % relPath)
-
- mode = "644"
- if isP4Exec(file["type"]):
- mode = "755"
- elif file["type"] == "symlink":
- mode = "120000"
- # p4 print on a symlink contains "target\n", so strip it off
- last = contents.pop()
- last = last[:-1]
- contents.append(last)
-
- if self.isWindows and file["type"].endswith("text"):
- mangled = []
- for data in contents:
- data = data.replace("\r\n", "\n")
- mangled.append(data)
- contents = mangled
-
- if file['type'] in ('text+ko', 'unicode+ko', 'binary+ko'):
- contents = map(lambda text: re.sub(r'(?i)\$(Id|Header):[^$]*\$',r'$\1$', text), contents)
- elif file['type'] in ('text+k', 'ktext', 'kxtext', 'unicode+k', 'binary+k'):
- contents = map(lambda text: re.sub(r'\$(Id|Header|Author|Date|DateTime|Change|File|Revision):[^$\n]*\$',r'$\1$', text), contents)
-
- self.gitStream.write("M %s inline %s\n" % (mode, relPath))
-
- # total length...
- length = 0
- for d in contents:
- length = length + len(d)
-
- self.gitStream.write("data %d\n" % length)
- for d in contents:
- self.gitStream.write(d)
- self.gitStream.write("\n")
-
- def streamOneP4Deletion(self, file):
- relPath = self.stripRepoPath(file['path'], self.branchPrefixes)
- if verbose:
- sys.stderr.write("delete %s\n" % relPath)
- self.gitStream.write("D %s\n" % relPath)
-
- # handle another chunk of streaming data
- def streamP4FilesCb(self, marshalled):
-
- if marshalled.has_key('depotFile') and self.stream_have_file_info:
- # start of a new file - output the old one first
- self.streamOneP4File(self.stream_file, self.stream_contents)
- self.stream_file = {}
- self.stream_contents = []
- self.stream_have_file_info = False
-
- # pick up the new file information... for the
- # 'data' field we need to append to our array
- for k in marshalled.keys():
- if k == 'data':
- self.stream_contents.append(marshalled['data'])
- else:
- self.stream_file[k] = marshalled[k]
-
- self.stream_have_file_info = True
-
- # Stream directly from "p4 files" into "git fast-import"
- def streamP4Files(self, files):
- filesForCommit = []
- filesToRead = []
- filesToDelete = []
-
- for f in files:
- includeFile = True
- for val in self.clientSpecDirs:
- if f['path'].startswith(val[0]):
- if val[1] <= 0:
- includeFile = False
- break
-
- if includeFile:
- filesForCommit.append(f)
- if f['action'] not in ('delete', 'purge'):
- filesToRead.append(f)
- else:
- filesToDelete.append(f)
-
- # deleted files...
- for f in filesToDelete:
- self.streamOneP4Deletion(f)
-
- if len(filesToRead) > 0:
- self.stream_file = {}
- self.stream_contents = []
- self.stream_have_file_info = False
-
- # curry self argument
- def streamP4FilesCbSelf(entry):
- self.streamP4FilesCb(entry)
-
- p4CmdList("-x - print",
- '\n'.join(['%s#%s' % (f['path'], f['rev'])
- for f in filesToRead]),
- cb=streamP4FilesCbSelf)
-
- # do the last chunk
- if self.stream_file.has_key('depotFile'):
- self.streamOneP4File(self.stream_file, self.stream_contents)
-
- def commit(self, details, files, branch, branchPrefixes, parent = ""):
- epoch = details["time"]
- author = details["user"]
- self.branchPrefixes = branchPrefixes
-
- if self.verbose:
- print "commit into %s" % branch
-
- # start with reading files; if that fails, we should not
- # create a commit.
- new_files = []
- for f in files:
- if [p for p in branchPrefixes if f['path'].startswith(p)]:
- new_files.append (f)
- else:
- sys.stderr.write("Ignoring file outside of prefix: %s\n" % path)
-
- self.gitStream.write("commit %s\n" % branch)
-# gitStream.write("mark :%s\n" % details["change"])
- self.committedChanges.add(int(details["change"]))
- committer = ""
- if author not in self.users:
- self.getUserMapFromPerforceServer()
- if author in self.users:
- committer = "%s %s %s" % (self.users[author], epoch, self.tz)
- else:
- committer = "%s <a@b> %s %s" % (author, epoch, self.tz)
-
- self.gitStream.write("committer %s\n" % committer)
-
- self.gitStream.write("data <<EOT\n")
- self.gitStream.write(details["desc"])
- self.gitStream.write("\n[git-p4: depot-paths = \"%s\": change = %s"
- % (','.join (branchPrefixes), details["change"]))
- if len(details['options']) > 0:
- self.gitStream.write(": options = %s" % details['options'])
- self.gitStream.write("]\nEOT\n\n")
-
- if len(parent) > 0:
- if self.verbose:
- print "parent %s" % parent
- self.gitStream.write("from %s\n" % parent)
-
- self.streamP4Files(new_files)
- self.gitStream.write("\n")
-
- change = int(details["change"])
-
- if self.labels.has_key(change):
- label = self.labels[change]
- labelDetails = label[0]
- labelRevisions = label[1]
- if self.verbose:
- print "Change %s is labelled %s" % (change, labelDetails)
-
- files = p4CmdList("files " + ' '.join (["%s...@%s" % (p, change)
- for p in branchPrefixes]))
-
- if len(files) == len(labelRevisions):
-
- cleanedFiles = {}
- for info in files:
- if info["action"] in ("delete", "purge"):
- continue
- cleanedFiles[info["depotFile"]] = info["rev"]
-
- if cleanedFiles == labelRevisions:
- self.gitStream.write("tag tag_%s\n" % labelDetails["label"])
- self.gitStream.write("from %s\n" % branch)
-
- owner = labelDetails["Owner"]
- tagger = ""
- if author in self.users:
- tagger = "%s %s %s" % (self.users[owner], epoch, self.tz)
- else:
- tagger = "%s <a@b> %s %s" % (owner, epoch, self.tz)
- self.gitStream.write("tagger %s\n" % tagger)
- self.gitStream.write("data <<EOT\n")
- self.gitStream.write(labelDetails["Description"])
- self.gitStream.write("EOT\n\n")
-
- else:
- if not self.silent:
- print ("Tag %s does not match with change %s: files do not match."
- % (labelDetails["label"], change))
-
- else:
- if not self.silent:
- print ("Tag %s does not match with change %s: file count is different."
- % (labelDetails["label"], change))
-
- def getUserCacheFilename(self):
- home = os.environ.get("HOME", os.environ.get("USERPROFILE"))
- return home + "/.gitp4-usercache.txt"
-
- def getUserMapFromPerforceServer(self):
- if self.userMapFromPerforceServer:
- return
- self.users = {}
-
- for output in p4CmdList("users"):
- if not output.has_key("User"):
- continue
- self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">"
-
-
- s = ''
- for (key, val) in self.users.items():
- s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1))
-
- open(self.getUserCacheFilename(), "wb").write(s)
- self.userMapFromPerforceServer = True
-
- def loadUserMapFromCache(self):
- self.users = {}
- self.userMapFromPerforceServer = False
- try:
- cache = open(self.getUserCacheFilename(), "rb")
- lines = cache.readlines()
- cache.close()
- for line in lines:
- entry = line.strip().split("\t")
- self.users[entry[0]] = entry[1]
- except IOError:
- self.getUserMapFromPerforceServer()
-
- def getLabels(self):
- self.labels = {}
-
- l = p4CmdList("labels %s..." % ' '.join (self.depotPaths))
- if len(l) > 0 and not self.silent:
- print "Finding files belonging to labels in %s" % `self.depotPaths`
-
- for output in l:
- label = output["label"]
- revisions = {}
- newestChange = 0
- if self.verbose:
- print "Querying files for label %s" % label
- for file in p4CmdList("files "
- + ' '.join (["%s...@%s" % (p, label)
- for p in self.depotPaths])):
- revisions[file["depotFile"]] = file["rev"]
- change = int(file["change"])
- if change > newestChange:
- newestChange = change
-
- self.labels[newestChange] = [output, revisions]
-
- if self.verbose:
- print "Label changes: %s" % self.labels.keys()
-
- def guessProjectName(self):
- for p in self.depotPaths:
- if p.endswith("/"):
- p = p[:-1]
- p = p[p.strip().rfind("/") + 1:]
- if not p.endswith("/"):
- p += "/"
- return p
-
- def getBranchMapping(self):
- lostAndFoundBranches = set()
-
- for info in p4CmdList("branches"):
- details = p4Cmd("branch -o %s" % info["branch"])
- viewIdx = 0
- while details.has_key("View%s" % viewIdx):
- paths = details["View%s" % viewIdx].split(" ")
- viewIdx = viewIdx + 1
- # require standard //depot/foo/... //depot/bar/... mapping
- if len(paths) != 2 or not paths[0].endswith("/...") or not paths[1].endswith("/..."):
- continue
- source = paths[0]
- destination = paths[1]
- ## HACK
- if source.startswith(self.depotPaths[0]) and destination.startswith(self.depotPaths[0]):
- source = source[len(self.depotPaths[0]):-4]
- destination = destination[len(self.depotPaths[0]):-4]
-
- if destination in self.knownBranches:
- if not self.silent:
- print "p4 branch %s defines a mapping from %s to %s" % (info["branch"], source, destination)
- print "but there exists another mapping from %s to %s already!" % (self.knownBranches[destination], destination)
- continue
-
- self.knownBranches[destination] = source
-
- lostAndFoundBranches.discard(destination)
-
- if source not in self.knownBranches:
- lostAndFoundBranches.add(source)
-
-
- for branch in lostAndFoundBranches:
- self.knownBranches[branch] = branch
-
- def getBranchMappingFromGitBranches(self):
- branches = p4BranchesInGit(self.importIntoRemotes)
- for branch in branches.keys():
- if branch == "master":
- branch = "main"
- else:
- branch = branch[len(self.projectName):]
- self.knownBranches[branch] = branch
-
- def listExistingP4GitBranches(self):
- # branches holds mapping from name to commit
- branches = p4BranchesInGit(self.importIntoRemotes)
- self.p4BranchesInGit = branches.keys()
- for branch in branches.keys():
- self.initialParents[self.refPrefix + branch] = branches[branch]
-
- def updateOptionDict(self, d):
- option_keys = {}
- if self.keepRepoPath:
- option_keys['keepRepoPath'] = 1
-
- d["options"] = ' '.join(sorted(option_keys.keys()))
-
- def readOptions(self, d):
- self.keepRepoPath = (d.has_key('options')
- and ('keepRepoPath' in d['options']))
-
- def gitRefForBranch(self, branch):
- if branch == "main":
- return self.refPrefix + "master"
-
- if len(branch) <= 0:
- return branch
-
- return self.refPrefix + self.projectName + branch
-
- def gitCommitByP4Change(self, ref, change):
- if self.verbose:
- print "looking in ref " + ref + " for change %s using bisect..." % change
-
- earliestCommit = ""
- latestCommit = parseRevision(ref)
-
- while True:
- if self.verbose:
- print "trying: earliest %s latest %s" % (earliestCommit, latestCommit)
- next = read_pipe("git rev-list --bisect %s %s" % (latestCommit, earliestCommit)).strip()
- if len(next) == 0:
- if self.verbose:
- print "argh"
- return ""
- log = extractLogMessageFromGitCommit(next)
- settings = extractSettingsGitLog(log)
- currentChange = int(settings['change'])
- if self.verbose:
- print "current change %s" % currentChange
-
- if currentChange == change:
- if self.verbose:
- print "found %s" % next
- return next
-
- if currentChange < change:
- earliestCommit = "^%s" % next
- else:
- latestCommit = "%s" % next
-
- return ""
-
- def importNewBranch(self, branch, maxChange):
- # make fast-import flush all changes to disk and update the refs using the checkpoint
- # command so that we can try to find the branch parent in the git history
- self.gitStream.write("checkpoint\n\n");
- self.gitStream.flush();
- branchPrefix = self.depotPaths[0] + branch + "/"
- range = "@1,%s" % maxChange
- #print "prefix" + branchPrefix
- changes = p4ChangesForPaths([branchPrefix], range)
- if len(changes) <= 0:
- return False
- firstChange = changes[0]
- #print "first change in branch: %s" % firstChange
- sourceBranch = self.knownBranches[branch]
- sourceDepotPath = self.depotPaths[0] + sourceBranch
- sourceRef = self.gitRefForBranch(sourceBranch)
- #print "source " + sourceBranch
-
- branchParentChange = int(p4Cmd("changes -m 1 %s...@1,%s" % (sourceDepotPath, firstChange))["change"])
- #print "branch parent: %s" % branchParentChange
- gitParent = self.gitCommitByP4Change(sourceRef, branchParentChange)
- if len(gitParent) > 0:
- self.initialParents[self.gitRefForBranch(branch)] = gitParent
- #print "parent git commit: %s" % gitParent
-
- self.importChanges(changes)
- return True
-
- def importChanges(self, changes):
- cnt = 1
- for change in changes:
- description = p4Cmd("describe %s" % change)
- self.updateOptionDict(description)
-
- if not self.silent:
- sys.stdout.write("\rImporting revision %s (%s%%)" % (change, cnt * 100 / len(changes)))
- sys.stdout.flush()
- cnt = cnt + 1
-
- try:
- if self.detectBranches:
- branches = self.splitFilesIntoBranches(description)
- for branch in branches.keys():
- ## HACK --hwn
- branchPrefix = self.depotPaths[0] + branch + "/"
-
- parent = ""
-
- filesForCommit = branches[branch]
-
- if self.verbose:
- print "branch is %s" % branch
-
- self.updatedBranches.add(branch)
-
- if branch not in self.createdBranches:
- self.createdBranches.add(branch)
- parent = self.knownBranches[branch]
- if parent == branch:
- parent = ""
- else:
- fullBranch = self.projectName + branch
- if fullBranch not in self.p4BranchesInGit:
- if not self.silent:
- print("\n Importing new branch %s" % fullBranch);
- if self.importNewBranch(branch, change - 1):
- parent = ""
- self.p4BranchesInGit.append(fullBranch)
- if not self.silent:
- print("\n Resuming with change %s" % change);
-
- if self.verbose:
- print "parent determined through known branches: %s" % parent
-
- branch = self.gitRefForBranch(branch)
- parent = self.gitRefForBranch(parent)
-
- if self.verbose:
- print "looking for initial parent for %s; current parent is %s" % (branch, parent)
-
- if len(parent) == 0 and branch in self.initialParents:
- parent = self.initialParents[branch]
- del self.initialParents[branch]
-
- self.commit(description, filesForCommit, branch, [branchPrefix], parent)
- else:
- files = self.extractFilesFromCommit(description)
- self.commit(description, files, self.branch, self.depotPaths,
- self.initialParent)
- self.initialParent = ""
- except IOError:
- print self.gitError.read()
- sys.exit(1)
-
- def importHeadRevision(self, revision):
- print "Doing initial import of %s from revision %s into %s" % (' '.join(self.depotPaths), revision, self.branch)
-
- details = { "user" : "git perforce import user", "time" : int(time.time()) }
- details["desc"] = ("Initial import of %s from the state at revision %s"
- % (' '.join(self.depotPaths), revision))
- details["change"] = revision
- newestRevision = 0
-
- fileCnt = 0
- for info in p4CmdList("files "
- + ' '.join(["%s...%s"
- % (p, revision)
- for p in self.depotPaths])):
-
- if info['code'] == 'error':
- sys.stderr.write("p4 returned an error: %s\n"
- % info['data'])
- sys.exit(1)
-
-
- change = int(info["change"])
- if change > newestRevision:
- newestRevision = change
-
- if info["action"] in ("delete", "purge"):
- # don't increase the file cnt, otherwise details["depotFile123"] will have gaps!
- #fileCnt = fileCnt + 1
- continue
-
- for prop in ["depotFile", "rev", "action", "type" ]:
- details["%s%s" % (prop, fileCnt)] = info[prop]
-
- fileCnt = fileCnt + 1
-
- details["change"] = newestRevision
- self.updateOptionDict(details)
- try:
- self.commit(details, self.extractFilesFromCommit(details), self.branch, self.depotPaths)
- except IOError:
- print "IO error with git fast-import. Is your git version recent enough?"
- print self.gitError.read()
-
-
- def getClientSpec(self):
- specList = p4CmdList( "client -o" )
- temp = {}
- for entry in specList:
- for k,v in entry.iteritems():
- if k.startswith("View"):
- if v.startswith('"'):
- start = 1
- else:
- start = 0
- index = v.find("...")
- v = v[start:index]
- if v.startswith("-"):
- v = v[1:]
- temp[v] = -len(v)
- else:
- temp[v] = len(v)
- self.clientSpecDirs = temp.items()
- self.clientSpecDirs.sort( lambda x, y: abs( y[1] ) - abs( x[1] ) )
-
- def run(self, args):
- self.depotPaths = []
- self.changeRange = ""
- self.initialParent = ""
- self.previousDepotPaths = []
-
- # map from branch depot path to parent branch
- self.knownBranches = {}
- self.initialParents = {}
- self.hasOrigin = originP4BranchesExist()
- if not self.syncWithOrigin:
- self.hasOrigin = False
-
- if self.importIntoRemotes:
- self.refPrefix = "refs/remotes/p4/"
- else:
- self.refPrefix = "refs/heads/p4/"
-
- if self.syncWithOrigin and self.hasOrigin:
- if not self.silent:
- print "Syncing with origin first by calling git fetch origin"
- system("git fetch origin")
-
- if len(self.branch) == 0:
- self.branch = self.refPrefix + "master"
- if gitBranchExists("refs/heads/p4") and self.importIntoRemotes:
- system("git update-ref %s refs/heads/p4" % self.branch)
- system("git branch -D p4");
- # create it /after/ importing, when master exists
- if not gitBranchExists(self.refPrefix + "HEAD") and self.importIntoRemotes and gitBranchExists(self.branch):
- system("git symbolic-ref %sHEAD %s" % (self.refPrefix, self.branch))
-
- if self.useClientSpec or gitConfig("git-p4.useclientspec") == "true":
- self.getClientSpec()
-
- # TODO: should always look at previous commits,
- # merge with previous imports, if possible.
- if args == []:
- if self.hasOrigin:
- createOrUpdateBranchesFromOrigin(self.refPrefix, self.silent)
- self.listExistingP4GitBranches()
-
- if len(self.p4BranchesInGit) > 1:
- if not self.silent:
- print "Importing from/into multiple branches"
- self.detectBranches = True
-
- if self.verbose:
- print "branches: %s" % self.p4BranchesInGit
-
- p4Change = 0
- for branch in self.p4BranchesInGit:
- logMsg = extractLogMessageFromGitCommit(self.refPrefix + branch)
-
- settings = extractSettingsGitLog(logMsg)
-
- self.readOptions(settings)
- if (settings.has_key('depot-paths')
- and settings.has_key ('change')):
- change = int(settings['change']) + 1
- p4Change = max(p4Change, change)
-
- depotPaths = sorted(settings['depot-paths'])
- if self.previousDepotPaths == []:
- self.previousDepotPaths = depotPaths
- else:
- paths = []
- for (prev, cur) in zip(self.previousDepotPaths, depotPaths):
- for i in range(0, min(len(cur), len(prev))):
- if cur[i] <> prev[i]:
- i = i - 1
- break
-
- paths.append (cur[:i + 1])
-
- self.previousDepotPaths = paths
-
- if p4Change > 0:
- self.depotPaths = sorted(self.previousDepotPaths)
- self.changeRange = "@%s,#head" % p4Change
- if not self.detectBranches:
- self.initialParent = parseRevision(self.branch)
- if not self.silent and not self.detectBranches:
- print "Performing incremental import into %s git branch" % self.branch
-
- if not self.branch.startswith("refs/"):
- self.branch = "refs/heads/" + self.branch
-
- if len(args) == 0 and self.depotPaths:
- if not self.silent:
- print "Depot paths: %s" % ' '.join(self.depotPaths)
- else:
- if self.depotPaths and self.depotPaths != args:
- print ("previous import used depot path %s and now %s was specified. "
- "This doesn't work!" % (' '.join (self.depotPaths),
- ' '.join (args)))
- sys.exit(1)
-
- self.depotPaths = sorted(args)
-
- revision = ""
- self.users = {}
-
- newPaths = []
- for p in self.depotPaths:
- if p.find("@") != -1:
- atIdx = p.index("@")
- self.changeRange = p[atIdx:]
- if self.changeRange == "@all":
- self.changeRange = ""
- elif ',' not in self.changeRange:
- revision = self.changeRange
- self.changeRange = ""
- p = p[:atIdx]
- elif p.find("#") != -1:
- hashIdx = p.index("#")
- revision = p[hashIdx:]
- p = p[:hashIdx]
- elif self.previousDepotPaths == []:
- revision = "#head"
-
- p = re.sub ("\.\.\.$", "", p)
- if not p.endswith("/"):
- p += "/"
-
- newPaths.append(p)
-
- self.depotPaths = newPaths
-
-
- self.loadUserMapFromCache()
- self.labels = {}
- if self.detectLabels:
- self.getLabels();
-
- if self.detectBranches:
- ## FIXME - what's a P4 projectName ?
- self.projectName = self.guessProjectName()
-
- if self.hasOrigin:
- self.getBranchMappingFromGitBranches()
- else:
- self.getBranchMapping()
- if self.verbose:
- print "p4-git branches: %s" % self.p4BranchesInGit
- print "initial parents: %s" % self.initialParents
- for b in self.p4BranchesInGit:
- if b != "master":
-
- ## FIXME
- b = b[len(self.projectName):]
- self.createdBranches.add(b)
-
- self.tz = "%+03d%02d" % (- time.timezone / 3600, ((- time.timezone % 3600) / 60))
-
- importProcess = subprocess.Popen(["git", "fast-import"],
- stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE);
- self.gitOutput = importProcess.stdout
- self.gitStream = importProcess.stdin
- self.gitError = importProcess.stderr
-
- if revision:
- self.importHeadRevision(revision)
- else:
- changes = []
-
- if len(self.changesFile) > 0:
- output = open(self.changesFile).readlines()
- changeSet = set()
- for line in output:
- changeSet.add(int(line))
-
- for change in changeSet:
- changes.append(change)
-
- changes.sort()
- else:
- if self.verbose:
- print "Getting p4 changes for %s...%s" % (', '.join(self.depotPaths),
- self.changeRange)
- changes = p4ChangesForPaths(self.depotPaths, self.changeRange)
-
- if len(self.maxChanges) > 0:
- changes = changes[:min(int(self.maxChanges), len(changes))]
-
- if len(changes) == 0:
- if not self.silent:
- print "No changes to import!"
- return True
-
- if not self.silent and not self.detectBranches:
- print "Import destination: %s" % self.branch
-
- self.updatedBranches = set()
-
- self.importChanges(changes)
-
- if not self.silent:
- print ""
- if len(self.updatedBranches) > 0:
- sys.stdout.write("Updated branches: ")
- for b in self.updatedBranches:
- sys.stdout.write("%s " % b)
- sys.stdout.write("\n")
-
- self.gitStream.close()
- if importProcess.wait() != 0:
- die("fast-import failed: %s" % self.gitError.read())
- self.gitOutput.close()
- self.gitError.close()
-
- return True
-
-class P4Rebase(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [ ]
- self.description = ("Fetches the latest revision from perforce and "
- + "rebases the current work (branch) against it")
- self.verbose = False
-
- def run(self, args):
- sync = P4Sync()
- sync.run([])
-
- return self.rebase()
-
- def rebase(self):
- if os.system("git update-index --refresh") != 0:
- die("Some files in your working directory are modified and different than what is in your index. You can use git update-index <filename> to bring the index up-to-date or stash away all your changes with git stash.");
- if len(read_pipe("git diff-index HEAD --")) > 0:
- die("You have uncommited changes. Please commit them before rebasing or stash them away with git stash.");
-
- [upstream, settings] = findUpstreamBranchPoint()
- if len(upstream) == 0:
- die("Cannot find upstream branchpoint for rebase")
-
- # the branchpoint may be p4/foo~3, so strip off the parent
- upstream = re.sub("~[0-9]+$", "", upstream)
-
- print "Rebasing the current branch onto %s" % upstream
- oldHead = read_pipe("git rev-parse HEAD").strip()
- system("git rebase %s" % upstream)
- system("git diff-tree --stat --summary -M %s HEAD" % oldHead)
- return True
-
-class P4Clone(P4Sync):
- def __init__(self):
- P4Sync.__init__(self)
- self.description = "Creates a new git repository and imports from Perforce into it"
- self.usage = "usage: %prog [options] //depot/path[@revRange]"
- self.options += [
- optparse.make_option("--destination", dest="cloneDestination",
- action='store', default=None,
- help="where to leave result of the clone"),
- optparse.make_option("-/", dest="cloneExclude",
- action="append", type="string",
- help="exclude depot path")
- ]
- self.cloneDestination = None
- self.needsGit = False
-
- # This is required for the "append" cloneExclude action
- def ensure_value(self, attr, value):
- if not hasattr(self, attr) or getattr(self, attr) is None:
- setattr(self, attr, value)
- return getattr(self, attr)
-
- def defaultDestination(self, args):
- ## TODO: use common prefix of args?
- depotPath = args[0]
- depotDir = re.sub("(@[^@]*)$", "", depotPath)
- depotDir = re.sub("(#[^#]*)$", "", depotDir)
- depotDir = re.sub(r"\.\.\.$", "", depotDir)
- depotDir = re.sub(r"/$", "", depotDir)
- return os.path.split(depotDir)[1]
-
- def run(self, args):
- if len(args) < 1:
- return False
-
- if self.keepRepoPath and not self.cloneDestination:
- sys.stderr.write("Must specify destination for --keep-path\n")
- sys.exit(1)
-
- depotPaths = args
-
- if not self.cloneDestination and len(depotPaths) > 1:
- self.cloneDestination = depotPaths[-1]
- depotPaths = depotPaths[:-1]
-
- self.cloneExclude = ["/"+p for p in self.cloneExclude]
- for p in depotPaths:
- if not p.startswith("//"):
- return False
-
- if not self.cloneDestination:
- self.cloneDestination = self.defaultDestination(args)
-
- print "Importing from %s into %s" % (', '.join(depotPaths), self.cloneDestination)
- if not os.path.exists(self.cloneDestination):
- os.makedirs(self.cloneDestination)
- chdir(self.cloneDestination)
- system("git init")
- self.gitdir = os.getcwd() + "/.git"
- if not P4Sync.run(self, depotPaths):
- return False
- if self.branch != "master":
- if self.importIntoRemotes:
- masterbranch = "refs/remotes/p4/master"
- else:
- masterbranch = "refs/heads/p4/master"
- if gitBranchExists(masterbranch):
- system("git branch master %s" % masterbranch)
- system("git checkout -f")
- else:
- print "Could not detect main branch. No checkout/master branch created."
-
- return True
-
-class P4Branches(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [ ]
- self.description = ("Shows the git branches that hold imports and their "
- + "corresponding perforce depot paths")
- self.verbose = False
-
- def run(self, args):
- if originP4BranchesExist():
- createOrUpdateBranchesFromOrigin()
-
- cmdline = "git rev-parse --symbolic "
- cmdline += " --remotes"
-
- for line in read_pipe_lines(cmdline):
- line = line.strip()
-
- if not line.startswith('p4/') or line == "p4/HEAD":
- continue
- branch = line
-
- log = extractLogMessageFromGitCommit("refs/remotes/%s" % branch)
- settings = extractSettingsGitLog(log)
-
- print "%s <= %s (%s)" % (branch, ",".join(settings["depot-paths"]), settings["change"])
- return True
-
-class HelpFormatter(optparse.IndentedHelpFormatter):
- def __init__(self):
- optparse.IndentedHelpFormatter.__init__(self)
-
- def format_description(self, description):
- if description:
- return description + "\n"
- else:
- return ""
-
-def printUsage(commands):
- print "usage: %s <command> [options]" % sys.argv[0]
- print ""
- print "valid commands: %s" % ", ".join(commands)
- print ""
- print "Try %s <command> --help for command specific help." % sys.argv[0]
- print ""
-
-commands = {
- "debug" : P4Debug,
- "submit" : P4Submit,
- "commit" : P4Submit,
- "sync" : P4Sync,
- "rebase" : P4Rebase,
- "clone" : P4Clone,
- "rollback" : P4RollBack,
- "branches" : P4Branches
-}
-
-
-def main():
- if len(sys.argv[1:]) == 0:
- printUsage(commands.keys())
- sys.exit(2)
-
- cmd = ""
- cmdName = sys.argv[1]
- try:
- klass = commands[cmdName]
- cmd = klass()
- except KeyError:
- print "unknown command %s" % cmdName
- print ""
- printUsage(commands.keys())
- sys.exit(2)
-
- options = cmd.options
- cmd.gitdir = os.environ.get("GIT_DIR", None)
-
- args = sys.argv[2:]
-
- if len(options) > 0:
- options.append(optparse.make_option("--git-dir", dest="gitdir"))
-
- parser = optparse.OptionParser(cmd.usage.replace("%prog", "%prog " + cmdName),
- options,
- description = cmd.description,
- formatter = HelpFormatter())
-
- (cmd, args) = parser.parse_args(sys.argv[2:], cmd);
- global verbose
- verbose = cmd.verbose
- if cmd.needsGit:
- if cmd.gitdir == None:
- cmd.gitdir = os.path.abspath(".git")
- if not isValidGitDir(cmd.gitdir):
- cmd.gitdir = read_pipe("git rev-parse --git-dir").strip()
- if os.path.exists(cmd.gitdir):
- cdup = read_pipe("git rev-parse --show-cdup").strip()
- if len(cdup) > 0:
- chdir(cdup);
-
- if not isValidGitDir(cmd.gitdir):
- if isValidGitDir(cmd.gitdir + "/.git"):
- cmd.gitdir += "/.git"
- else:
- die("fatal: cannot locate git repository at %s" % cmd.gitdir)
-
- os.environ["GIT_DIR"] = cmd.gitdir
-
- if not cmd.run(args):
- parser.print_help()
-
-
-if __name__ == '__main__':
- main()
diff --git a/contrib/fast-import/git-p4.README b/contrib/fast-import/git-p4.README
new file mode 100644
index 0000000000..cec5ecfa7c
--- /dev/null
+++ b/contrib/fast-import/git-p4.README
@@ -0,0 +1,12 @@
+The git-p4 script moved to the top-level of the git source directory.
+
+Invoke it as any other git command, like "git p4 clone", for instance.
+
+Note that the top-level git-p4.py script is now the source. It is
+built using make to git-p4, which will be installed.
+
+Windows users can copy the git-p4.py source script directly, possibly
+invoking it through a batch file called "git-p4.bat" in the same folder.
+It should contain just one line:
+
+ @python "%~d0%~p0git-p4.py" %*
diff --git a/contrib/fast-import/git-p4.bat b/contrib/fast-import/git-p4.bat
deleted file mode 100644
index 9f97e884f5..0000000000
--- a/contrib/fast-import/git-p4.bat
+++ /dev/null
@@ -1 +0,0 @@
-@python "%~d0%~p0git-p4" %*
diff --git a/contrib/fast-import/git-p4.txt b/contrib/fast-import/git-p4.txt
deleted file mode 100644
index 49b335921a..0000000000
--- a/contrib/fast-import/git-p4.txt
+++ /dev/null
@@ -1,210 +0,0 @@
-git-p4 - Perforce <-> Git converter using git-fast-import
-
-Usage
-=====
-
-git-p4 can be used in two different ways:
-
-1) To import changes from Perforce to a Git repository, using "git-p4 sync".
-
-2) To submit changes from Git back to Perforce, using "git-p4 submit".
-
-Importing
-=========
-
-Simply start with
-
- git-p4 clone //depot/path/project
-
-or
-
- git-p4 clone //depot/path/project myproject
-
-This will:
-
-1) Create an empty git repository in a subdirectory called "project" (or
-"myproject" with the second command)
-
-2) Import the head revision from the given Perforce path into a git branch
-called "p4" (remotes/p4 actually)
-
-3) Create a master branch based on it and check it out.
-
-If you want the entire history (not just the head revision) then you can simply
-append a "@all" to the depot path:
-
- git-p4 clone //depot/project/main@all myproject
-
-
-
-If you want more control you can also use the git-p4 sync command directly:
-
- mkdir repo-git
- cd repo-git
- git init
- git-p4 sync //path/in/your/perforce/depot
-
-This will import the current head revision of the specified depot path into a
-"remotes/p4/master" branch of your git repository. You can use the
---branch=mybranch option to import into a different branch.
-
-If you want to import the entire history of a given depot path simply use:
-
- git-p4 sync //path/in/depot@all
-
-
-Note:
-
-To achieve optimal compression you may want to run 'git repack -a -d -f' after
-a big import. This may take a while.
-
-Incremental Imports
-===================
-
-After an initial import you can continue to synchronize your git repository
-with newer changes from the Perforce depot by just calling
-
- git-p4 sync
-
-in your git repository. By default the "remotes/p4/master" branch is updated.
-
-Advanced Setup
-==============
-
-Suppose you have a periodically updated git repository somewhere, containing a
-complete import of a Perforce project. This repository can be cloned and used
-with git-p4. When updating the cloned repository with the "sync" command,
-git-p4 will try to fetch changes from the original repository first. The git
-protocol used with this is usually faster than importing from Perforce
-directly.
-
-This behaviour can be disabled by setting the "git-p4.syncFromOrigin" git
-configuration variable to "false".
-
-Updating
-========
-
-A common working pattern is to fetch the latest changes from the Perforce depot
-and merge them with local uncommitted changes. The recommended way is to use
-git's rebase mechanism to preserve linear history. git-p4 provides a convenient
-
- git-p4 rebase
-
-command that calls git-p4 sync followed by git rebase to rebase the current
-working branch.
-
-Submitting
-==========
-
-git-p4 has support for submitting changes from a git repository back to the
-Perforce depot. This requires a Perforce checkout separate from your git
-repository. To submit all changes that are in the current git branch but not in
-the "p4" branch (or "origin" if "p4" doesn't exist) simply call
-
- git-p4 submit
-
-in your git repository. If you want to submit changes in a specific branch that
-is not your current git branch you can also pass that as an argument:
-
- git-p4 submit mytopicbranch
-
-You can override the reference branch with the --origin=mysourcebranch option.
-
-If a submit fails you may have to "p4 resolve" and submit manually. You can
-continue importing the remaining changes with
-
- git-p4 submit --continue
-
-Example
-=======
-
-# Clone a repository
- git-p4 clone //depot/path/project
-# Enter the newly cloned directory
- cd project
-# Do some work...
- vi foo.h
-# ... and commit locally to gi
- git commit foo.h
-# In the meantime somebody submitted changes to the Perforce depot. Rebase your latest
-# changes against the latest changes in Perforce:
- git-p4 rebase
-# Submit your locally committed changes back to Perforce
- git-p4 submit
-# ... and synchronize with Perforce
- git-p4 rebase
-
-
-Configuration parameters
-========================
-
-git-p4.user ($P4USER)
-
-Allows you to specify the username to use to connect to the Perforce repository.
-
- git config [--global] git-p4.user public
-
-git-p4.password ($P4PASS)
-
-Allows you to specify the password to use to connect to the Perforce repository.
-Warning this password will be visible on the command-line invocation of the p4 binary.
-
- git config [--global] git-p4.password public1234
-
-git-p4.port ($P4PORT)
-
-Specify the port to be used to contact the Perforce server. As this will be passed
-directly to the p4 binary, it may be in the format host:port as well.
-
- git config [--global] git-p4.port codes.zimbra.com:2666
-
-git-p4.host ($P4HOST)
-
-Specify the host to contact for a Perforce repository.
-
- git config [--global] git-p4.host perforce.example.com
-
-git-p4.client ($P4CLIENT)
-
-Specify the client name to use
-
- git config [--global] git-p4.client public-view
-
-git-p4.allowSubmit
-
- git config [--global] git-p4.allowSubmit false
-
-git-p4.syncFromOrigin
-
-A useful setup may be that you have a periodically updated git repository
-somewhere that contains a complete import of a Perforce project. That git
-repository can be used to clone the working repository from and one would
-import from Perforce directly after cloning using git-p4. If the connection to
-the Perforce server is slow and the working repository hasn't been synced for a
-while it may be desirable to fetch changes from the origin git repository using
-the efficient git protocol. git-p4 supports this setup by calling "git fetch origin"
-by default if there is an origin branch. You can disable this using:
-
- git config [--global] git-p4.syncFromOrigin false
-
-git-p4.useclientspec
-
- git config [--global] git-p4.useclientspec false
-
-Implementation Details...
-=========================
-
-* Changesets from Perforce are imported using git fast-import.
-* The import does not require anything from the Perforce client view as it just uses
- "p4 print //depot/path/file#revision" to get the actual file contents.
-* Every imported changeset has a special [git-p4...] line at the
- end of the log message that gives information about the corresponding
- Perforce change number and is also used by git-p4 itself to find out
- where to continue importing when doing incremental imports.
- Basically when syncing it extracts the perforce change number of the
- latest commit in the "p4" branch and uses "p4 changes //depot/path/...@changenum,#head"
- to find out which changes need to be imported.
-* git-p4 submit uses "git rev-list" to pick the commits between the "p4" branch
- and the current branch.
- The commits themselves are applied using git diff/format-patch ... | git apply
-
diff --git a/contrib/fast-import/import-directories.perl b/contrib/fast-import/import-directories.perl
index 5782d80e26..4dec1f18e4 100755
--- a/contrib/fast-import/import-directories.perl
+++ b/contrib/fast-import/import-directories.perl
@@ -1,4 +1,4 @@
-#!/usr/bin/perl -w
+#!/usr/bin/perl
#
# Copyright 2008-2009 Peter Krefting <peter@softwolves.pp.se>
#
@@ -109,8 +109,8 @@ was available previously is not included in this revision, it will
be removed.
If an on-disk revision is incomplete, you can point to files from
-a previous revision. There are no restriction as to where the source
-files are located, nor to the names of them.
+a previous revision. There are no restrictions on where the source
+files are located, nor on their names.
[3.files]
; the key is the path inside the repository, the value is the path
@@ -140,6 +140,7 @@ by whitespace or other characters.
# Globals
use strict;
+use warnings;
use integer;
my $crlfmode = 0;
my @revs;
@@ -344,7 +345,7 @@ sub parsekeyvaluepair
Key and value strings may be enclosed in quotes, in which case
whitespace inside the quotes is preserved. Additionally, an equal
-sign may be included in the key by preceeding it with a backslash.
+sign may be included in the key by preceding it with a backslash.
For example:
"key1 "=value1
diff --git a/contrib/fast-import/import-tars.perl b/contrib/fast-import/import-tars.perl
index 7001862bfd..95438e1ed4 100755
--- a/contrib/fast-import/import-tars.perl
+++ b/contrib/fast-import/import-tars.perl
@@ -20,7 +20,7 @@ use Getopt::Long;
my $metaext = '';
-die "usage: import-tars [--metainfo=extension] *.tar.{gz,bz2,Z}\n"
+die "usage: import-tars [--metainfo=extension] *.tar.{gz,bz2,lzma,xz,Z}\n"
unless GetOptions('metainfo=s' => \$metaext) && @ARGV;
my $branch_name = 'import-tars';
@@ -49,6 +49,9 @@ foreach my $tar_file (@ARGV)
} elsif ($tar_name =~ s/\.tar\.Z$//) {
open(I, '-|', 'uncompress', '-c', $tar_file)
or die "Unable to uncompress -c $tar_file: $!\n";
+ } elsif ($tar_name =~ s/\.(tar\.(lzma|xz)|(tlz|txz))$//) {
+ open(I, '-|', 'xz', '-dc', $tar_file)
+ or die "Unable to xz -dc $tar_file: $!\n";
} elsif ($tar_name =~ s/\.tar$//) {
open(I, $tar_file) or die "Unable to open $tar_file: $!\n";
} else {
diff --git a/contrib/fast-import/import-zips.py b/contrib/fast-import/import-zips.py
index 7051a83a59..d12c296223 100755
--- a/contrib/fast-import/import-zips.py
+++ b/contrib/fast-import/import-zips.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/env python
## zip archive frontend for git-fast-import
##
@@ -9,13 +9,18 @@
## git log --stat import-zips
from os import popen, path
-from sys import argv, exit
+from sys import argv, exit, hexversion, stderr
from time import mktime
from zipfile import ZipFile
+if hexversion < 0x01060000:
+ # The limiter is the zipfile module
+ stderr.write("import-zips.py: requires Python 1.6.0 or later.\n")
+ exit(1)
+
if len(argv) < 2:
- print 'Usage:', argv[0], '<zipfile>...'
- exit(1)
+ print 'usage:', argv[0], '<zipfile>...'
+ exit(1)
branch_ref = 'refs/heads/import-zips'
committer_name = 'Z Ip Creator'
@@ -23,51 +28,51 @@ committer_email = 'zip@example.com'
fast_import = popen('git fast-import --quiet', 'w')
def printlines(list):
- for str in list:
- fast_import.write(str + "\n")
+ for str in list:
+ fast_import.write(str + "\n")
for zipfile in argv[1:]:
- commit_time = 0
- next_mark = 1
- common_prefix = None
- mark = dict()
-
- zip = ZipFile(zipfile, 'r')
- for name in zip.namelist():
- if name.endswith('/'):
- continue
- info = zip.getinfo(name)
-
- if commit_time < info.date_time:
- commit_time = info.date_time
- if common_prefix == None:
- common_prefix = name[:name.rfind('/') + 1]
- else:
- while not name.startswith(common_prefix):
- last_slash = common_prefix[:-1].rfind('/') + 1
- common_prefix = common_prefix[:last_slash]
-
- mark[name] = ':' + str(next_mark)
- next_mark += 1
-
- printlines(('blob', 'mark ' + mark[name], \
- 'data ' + str(info.file_size)))
- fast_import.write(zip.read(name) + "\n")
-
- committer = committer_name + ' <' + committer_email + '> %d +0000' % \
- mktime(commit_time + (0, 0, 0))
-
- printlines(('commit ' + branch_ref, 'committer ' + committer, \
- 'data <<EOM', 'Imported from ' + zipfile + '.', 'EOM', \
- '', 'deleteall'))
-
- for name in mark.keys():
- fast_import.write('M 100644 ' + mark[name] + ' ' +
- name[len(common_prefix):] + "\n")
-
- printlines(('', 'tag ' + path.basename(zipfile), \
- 'from ' + branch_ref, 'tagger ' + committer, \
- 'data <<EOM', 'Package ' + zipfile, 'EOM', ''))
+ commit_time = 0
+ next_mark = 1
+ common_prefix = None
+ mark = dict()
+
+ zip = ZipFile(zipfile, 'r')
+ for name in zip.namelist():
+ if name.endswith('/'):
+ continue
+ info = zip.getinfo(name)
+
+ if commit_time < info.date_time:
+ commit_time = info.date_time
+ if common_prefix == None:
+ common_prefix = name[:name.rfind('/') + 1]
+ else:
+ while not name.startswith(common_prefix):
+ last_slash = common_prefix[:-1].rfind('/') + 1
+ common_prefix = common_prefix[:last_slash]
+
+ mark[name] = ':' + str(next_mark)
+ next_mark += 1
+
+ printlines(('blob', 'mark ' + mark[name], \
+ 'data ' + str(info.file_size)))
+ fast_import.write(zip.read(name) + "\n")
+
+ committer = committer_name + ' <' + committer_email + '> %d +0000' % \
+ mktime(commit_time + (0, 0, 0))
+
+ printlines(('commit ' + branch_ref, 'committer ' + committer, \
+ 'data <<EOM', 'Imported from ' + zipfile + '.', 'EOM', \
+ '', 'deleteall'))
+
+ for name in mark.keys():
+ fast_import.write('M 100644 ' + mark[name] + ' ' +
+ name[len(common_prefix):] + "\n")
+
+ printlines(('', 'tag ' + path.basename(zipfile), \
+ 'from ' + branch_ref, 'tagger ' + committer, \
+ 'data <<EOM', 'Package ' + zipfile, 'EOM', ''))
if fast_import.close():
- exit(1)
+ exit(1)
diff --git a/contrib/git-jump/README b/contrib/git-jump/README
new file mode 100644
index 0000000000..1cebc328cb
--- /dev/null
+++ b/contrib/git-jump/README
@@ -0,0 +1,92 @@
+git-jump
+========
+
+Git-jump is a script for helping you jump to "interesting" parts of your
+project in your editor. It works by outputting a set of interesting
+spots in the "quickfix" format, which editors like vim can use as a
+queue of places to visit (this feature is usually used to jump to errors
+produced by a compiler). For example, given a diff like this:
+
+------------------------------------
+diff --git a/foo.c b/foo.c
+index a655540..5a59044 100644
+--- a/foo.c
++++ b/foo.c
+@@ -1,3 +1,3 @@
+ int main(void) {
+- printf("hello word!\n");
++ printf("hello world!\n");
+ }
+-----------------------------------
+
+git-jump will feed this to the editor:
+
+-----------------------------------
+foo.c:2: printf("hello word!\n");
+-----------------------------------
+
+Obviously this trivial case isn't that interesting; you could just open
+`foo.c` yourself. But when you have many changes scattered across a
+project, you can use the editor's support to "jump" from point to point.
+
+Git-jump can generate three types of interesting lists:
+
+ 1. The beginning of any diff hunks.
+
+ 2. The beginning of any merge conflict markers.
+
+ 3. Any grep matches.
+
+
+Using git-jump
+--------------
+
+To use it, just drop git-jump in your PATH, and then invoke it like
+this:
+
+--------------------------------------------------
+# jump to changes not yet staged for commit
+git jump diff
+
+# jump to changes that are staged for commit; you can give
+# arbitrary diff options
+git jump diff --cached
+
+# jump to merge conflicts
+git jump merge
+
+# jump to all instances of foo_bar
+git jump grep foo_bar
+
+# same as above, but case-insensitive; you can give
+# arbitrary grep options
+git jump grep -i foo_bar
+--------------------------------------------------
+
+
+Related Programs
+----------------
+
+You can accomplish some of the same things with individual tools. For
+example, you can use `git mergetool` to start vimdiff on each unmerged
+file. `git jump merge` is for the vim-wielding luddite who just wants to
+jump straight to the conflict text with no fanfare.
+
+As of git v1.7.2, `git grep` knows the `--open-files-in-pager` option,
+which does something similar to `git jump grep`. However, it is limited
+to positioning the cursor to the correct line in only the first file,
+leaving you to locate subsequent hits in that file or other files using
+the editor or pager. By contrast, git-jump provides the editor with a
+complete list of files and line numbers for each match.
+
+
+Limitations
+-----------
+
+This scripts was written and tested with vim. Given that the quickfix
+format is the same as what gcc produces, I expect emacs users have a
+similar feature for iterating through the list, but I know nothing about
+how to activate it.
+
+The shell snippets to generate the quickfix lines will almost certainly
+choke on filenames with exotic characters (like newlines).
diff --git a/contrib/git-jump/git-jump b/contrib/git-jump/git-jump
new file mode 100755
index 0000000000..dc90cd6379
--- /dev/null
+++ b/contrib/git-jump/git-jump
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+usage() {
+ cat <<\EOF
+usage: git jump <mode> [<args>]
+
+Jump to interesting elements in an editor.
+The <mode> parameter is one of:
+
+diff: elements are diff hunks. Arguments are given to diff.
+
+merge: elements are merge conflicts. Arguments are ignored.
+
+grep: elements are grep hits. Arguments are given to grep.
+EOF
+}
+
+open_editor() {
+ editor=`git var GIT_EDITOR`
+ eval "$editor -q \$1"
+}
+
+mode_diff() {
+ git diff --no-prefix --relative "$@" |
+ perl -ne '
+ if (m{^\+\+\+ (.*)}) { $file = $1; next }
+ defined($file) or next;
+ if (m/^@@ .*\+(\d+)/) { $line = $1; next }
+ defined($line) or next;
+ if (/^ /) { $line++; next }
+ if (/^[-+]\s*(.*)/) {
+ print "$file:$line: $1\n";
+ $line = undef;
+ }
+ '
+}
+
+mode_merge() {
+ git ls-files -u |
+ perl -pe 's/^.*?\t//' |
+ sort -u |
+ while IFS= read fn; do
+ grep -Hn '^<<<<<<<' "$fn"
+ done
+}
+
+# Grep -n generates nice quickfix-looking lines by itself,
+# but let's clean up extra whitespace, so they look better if the
+# editor shows them to us in the status bar.
+mode_grep() {
+ git grep -n "$@" |
+ perl -pe '
+ s/[ \t]+/ /g;
+ s/^ *//;
+ '
+}
+
+if test $# -lt 1; then
+ usage >&2
+ exit 1
+fi
+mode=$1; shift
+
+trap 'rm -f "$tmp"' 0 1 2 3 15
+tmp=`mktemp -t git-jump.XXXXXX` || exit 1
+type "mode_$mode" >/dev/null 2>&1 || { usage >&2; exit 1; }
+"mode_$mode" "$@" >"$tmp"
+test -s "$tmp" || exit 0
+open_editor "$tmp"
diff --git a/contrib/git-resurrect.sh b/contrib/git-resurrect.sh
index c364dda696..d7e97bbc76 100755
--- a/contrib/git-resurrect.sh
+++ b/contrib/git-resurrect.sh
@@ -9,6 +9,8 @@ other/Merge <other> into <name> (respectively) commit subjects, which
is rather slow but allows you to resurrect other people's topic
branches."
+OPTIONS_KEEPDASHDASH=
+OPTIONS_STUCKLONG=
OPTIONS_SPEC="\
git resurrect $USAGE
--
diff --git a/contrib/git-shell-commands/README b/contrib/git-shell-commands/README
new file mode 100644
index 0000000000..438463b160
--- /dev/null
+++ b/contrib/git-shell-commands/README
@@ -0,0 +1,18 @@
+Sample programs callable through git-shell. Place a directory named
+'git-shell-commands' in the home directory of a user whose shell is
+git-shell. Then anyone logging in as that user will be able to run
+executables in the 'git-shell-commands' directory.
+
+Provided commands:
+
+help: Prints out the names of available commands. When run
+interactively, git-shell will automatically run 'help' on startup,
+provided it exists.
+
+list: Displays any bare repository whose name ends with ".git" under
+user's home directory. No other git repositories are visible,
+although they might be clonable through git-shell. 'list' is designed
+to minimize the number of calls to git that must be made in finding
+available repositories; if your setup has additional repositories that
+should be user-discoverable, you may wish to modify 'list'
+accordingly.
diff --git a/contrib/git-shell-commands/help b/contrib/git-shell-commands/help
new file mode 100755
index 0000000000..535770c6ec
--- /dev/null
+++ b/contrib/git-shell-commands/help
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+if tty -s
+then
+ echo "Run 'help' for help, or 'exit' to leave. Available commands:"
+else
+ echo "Run 'help' for help. Available commands:"
+fi
+
+cd "$(dirname "$0")"
+
+for cmd in *
+do
+ case "$cmd" in
+ help) ;;
+ *) [ -f "$cmd" ] && [ -x "$cmd" ] && echo "$cmd" ;;
+ esac
+done
diff --git a/contrib/git-shell-commands/list b/contrib/git-shell-commands/list
new file mode 100755
index 0000000000..6f89938821
--- /dev/null
+++ b/contrib/git-shell-commands/list
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+print_if_bare_repo='
+ if "$(git --git-dir="$1" rev-parse --is-bare-repository)" = true
+ then
+ printf "%s\n" "${1#./}"
+ fi
+'
+
+find -type d -name "*.git" -exec sh -c "$print_if_bare_repo" -- \{} \; -prune 2>/dev/null
diff --git a/contrib/gitview/gitview b/contrib/gitview/gitview
index 4c99dfb903..4e23c650fe 100755
--- a/contrib/gitview/gitview
+++ b/contrib/gitview/gitview
@@ -1205,7 +1205,7 @@ class GitView(object):
#The first parent always continue on the same line
try:
- # check we alreay have the value
+ # check we already have the value
tmp_node_pos = self.nodepos[commit.parent_sha1[0]]
except KeyError:
self.colours[commit.parent_sha1[0]] = colour
diff --git a/contrib/gitview/gitview.txt b/contrib/gitview/gitview.txt
index 77c29de305..7b5f9002b9 100644
--- a/contrib/gitview/gitview.txt
+++ b/contrib/gitview/gitview.txt
@@ -7,6 +7,7 @@ gitview - A GTK based repository browser for git
SYNOPSIS
--------
+[verse]
'gitview' [options] [args]
DESCRIPTION
@@ -27,7 +28,7 @@ OPTIONS
<args>::
- All the valid option for gitlink:git-rev-list[1].
+ All the valid option for linkgit:git-rev-list[1].
Key Bindings
------------
diff --git a/contrib/hg-to-git/hg-to-git.py b/contrib/hg-to-git/hg-to-git.py
index 2a6839d81e..60dec86d37 100755
--- a/contrib/hg-to-git/hg-to-git.py
+++ b/contrib/hg-to-git/hg-to-git.py
@@ -1,4 +1,4 @@
-#! /usr/bin/python
+#!/usr/bin/env python
""" hg-to-git.py - A Mercurial to GIT converter
@@ -23,6 +23,11 @@ import os, os.path, sys
import tempfile, pickle, getopt
import re
+if sys.hexversion < 0x02030000:
+ # The behavior of the pickle module changed significantly in 2.3
+ sys.stderr.write("hg-to-git.py: requires Python 2.3 or later.\n")
+ sys.exit(1)
+
# Maps hg version -> git version
hgvers = {}
# List of children for each hg revision
@@ -59,14 +64,14 @@ def getgitenv(user, date):
elems = re.compile('(.*?)\s+<(.*)>').match(user)
if elems:
env += 'export GIT_AUTHOR_NAME="%s" ;' % elems.group(1)
- env += 'export GIT_COMMITER_NAME="%s" ;' % elems.group(1)
+ env += 'export GIT_COMMITTER_NAME="%s" ;' % elems.group(1)
env += 'export GIT_AUTHOR_EMAIL="%s" ;' % elems.group(2)
- env += 'export GIT_COMMITER_EMAIL="%s" ;' % elems.group(2)
+ env += 'export GIT_COMMITTER_EMAIL="%s" ;' % elems.group(2)
else:
env += 'export GIT_AUTHOR_NAME="%s" ;' % user
- env += 'export GIT_COMMITER_NAME="%s" ;' % user
+ env += 'export GIT_COMMITTER_NAME="%s" ;' % user
env += 'export GIT_AUTHOR_EMAIL= ;'
- env += 'export GIT_COMMITER_EMAIL= ;'
+ env += 'export GIT_COMMITTER_EMAIL= ;'
env += 'export GIT_AUTHOR_DATE="%s" ;' % date
env += 'export GIT_COMMITTER_DATE="%s" ;' % date
@@ -220,7 +225,7 @@ for cset in range(int(tip) + 1):
os.system('git ls-files -x .hg --deleted | git update-index --remove --stdin')
# commit
- os.system(getgitenv(user, date) + 'git commit --allow-empty -a -F %s' % filecomment)
+ os.system(getgitenv(user, date) + 'git commit --allow-empty --allow-empty-message -a -F %s' % filecomment)
os.unlink(filecomment)
# tag
diff --git a/contrib/hooks/multimail/CHANGES b/contrib/hooks/multimail/CHANGES
new file mode 100644
index 0000000000..3603d56c26
--- /dev/null
+++ b/contrib/hooks/multimail/CHANGES
@@ -0,0 +1,33 @@
+Release 1.0.0
+=============
+
+* Fix encoding of non-ASCII email addresses in email headers.
+
+* Fix backwards-compatibility bugs for older Python 2.x versions.
+
+* Fix a backwards-compatibility bug for Git 1.7.1.
+
+* Add an option commitDiffOpts to customize logs for revisions.
+
+* Pass "-oi" to sendmail by default to prevent premature termination
+ on a line containing only ".".
+
+* Stagger email "Date:" values in an attempt to help mail clients
+ thread the emails in the right order.
+
+* If a mailing list setting is missing, just skip sending the
+ corresponding email (with a warning) instead of failing.
+
+* Add a X-Git-Host header that can be used for email filtering.
+
+* Allow the sender's fully-qualified domain name to be configured.
+
+* Minor documentation improvements.
+
+* Add this CHANGES file.
+
+
+Release 0.9.0
+=============
+
+* Initial release.
diff --git a/contrib/hooks/multimail/README b/contrib/hooks/multimail/README
new file mode 100644
index 0000000000..477d65fed3
--- /dev/null
+++ b/contrib/hooks/multimail/README
@@ -0,0 +1,500 @@
+ git-multimail
+ =============
+
+git-multimail is a tool for sending notification emails on pushes to a
+Git repository. It includes a Python module called git_multimail.py,
+which can either be used as a hook script directly or can be imported
+as a Python module into another script.
+
+git-multimail is derived from the Git project's old
+contrib/hooks/post-receive-email, and is mostly compatible with that
+script. See README.migrate-from-post-receive-email for details about
+the differences and for how to migrate from post-receive-email to
+git-multimail.
+
+git-multimail, like the rest of the Git project, is licensed under
+GPLv2 (see the COPYING file for details).
+
+Please note: although, as a convenience, git-multimail may be
+distributed along with the main Git project, development of
+git-multimail takes place in its own, separate project. See section
+"Getting involved" below for more information.
+
+
+By default, for each push received by the repository, git-multimail:
+
+1. Outputs one email summarizing each reference that was changed.
+ These "reference change" (called "refchange" below) emails describe
+ the nature of the change (e.g., was the reference created, deleted,
+ fast-forwarded, etc.) and include a one-line summary of each commit
+ that was added to the reference.
+
+2. Outputs one email for each new commit that was introduced by the
+ reference change. These "commit" emails include a list of the
+ files changed by the commit, followed by the diffs of files
+ modified by the commit. The commit emails are threaded to the
+ corresponding reference change email via "In-Reply-To". This style
+ (similar to the "git format-patch" style used on the Git mailing
+ list) makes it easy to scan through the emails, jump to patches
+ that need further attention, and write comments about specific
+ commits. Commits are handled in reverse topological order (i.e.,
+ parents shown before children). For example,
+
+ [git] branch master updated
+ + [git] 01/08: doc: fix xref link from api docs to manual pages
+ + [git] 02/08: api-credentials.txt: show the big picture first
+ + [git] 03/08: api-credentials.txt: mention credential.helper explicitly
+ + [git] 04/08: api-credentials.txt: add "see also" section
+ + [git] 05/08: t3510 (cherry-pick-sequence): add missing '&&'
+ + [git] 06/08: Merge branch 'rr/maint-t3510-cascade-fix'
+ + [git] 07/08: Merge branch 'mm/api-credentials-doc'
+ + [git] 08/08: Git 1.7.11-rc2
+
+ Each commit appears in exactly one commit email, the first time
+ that it is pushed to the repository. If a commit is later merged
+ into another branch, then a one-line summary of the commit is
+ included in the reference change email (as usual), but no
+ additional commit email is generated.
+
+ By default, reference change emails have their "Reply-To" field set
+ to the person who pushed the change, and commit emails have their
+ "Reply-To" field set to the author of the commit.
+
+3. Output one "announce" mail for each new annotated tag, including
+ information about the tag and optionally a shortlog describing the
+ changes since the previous tag. Such emails might be useful if you
+ use annotated tags to mark releases of your project.
+
+
+Requirements
+------------
+
+* Python 2.x, version 2.4 or later. No non-standard Python modules
+ are required. git-multimail does *not* currently work with Python
+ 3.x.
+
+ The example scripts invoke Python using the following shebang line
+ (following PEP 394 [1]):
+
+ #! /usr/bin/env python2
+
+ If your system's Python2 interpreter is not in your PATH or is not
+ called "python2", you can change the lines accordingly. Or you can
+ invoke the Python interpreter explicitly, for example via a tiny
+ shell script like
+
+ #! /bin/sh
+ /usr/local/bin/python /path/to/git_multimail.py "$@"
+
+* The "git" command must be in your PATH. git-multimail is known to
+ work with Git versions back to 1.7.1. (Earlier versions have not
+ been tested; if you do so, please report your results.)
+
+* To send emails using the default configuration, a standard sendmail
+ program must be located at '/usr/sbin/sendmail' or
+ '/usr/lib/sendmail' and must be configured correctly to send emails.
+ If this is not the case, set multimailhook.sendmailCommand, or see
+ the multimailhook.mailer configuration variable below for how to
+ configure git-multimail to send emails via an SMTP server.
+
+
+Invocation
+----------
+
+git_multimail.py is designed to be used as a "post-receive" hook in a
+Git repository (see githooks(5)). Link or copy it to
+$GIT_DIR/hooks/post-receive within the repository for which email
+notifications are desired. Usually it should be installed on the
+central repository for a project, to which all commits are eventually
+pushed.
+
+For use on pre-v1.5.1 Git servers, git_multimail.py can also work as
+an "update" hook, taking its arguments on the command line. To use
+this script in this manner, link or copy it to $GIT_DIR/hooks/update.
+Please note that the script is not completely reliable in this mode
+[2].
+
+Alternatively, git_multimail.py can be imported as a Python module
+into your own Python post-receive script. This method is a bit more
+work, but allows the behavior of the hook to be customized using
+arbitrary Python code. For example, you can use a custom environment
+(perhaps inheriting from GenericEnvironment or GitoliteEnvironment) to
+
+* change how the user who did the push is determined
+
+* read users' email addresses from an LDAP server or from a database
+
+* decide which users should be notified about which commits based on
+ the contents of the commits (e.g., for users who want to be notified
+ only about changes affecting particular files or subdirectories)
+
+Or you can change how emails are sent by writing your own Mailer
+class. The "post-receive" script in this directory demonstrates how
+to use git_multimail.py as a Python module. (If you make interesting
+changes of this type, please consider sharing them with the
+community.)
+
+
+Configuration
+-------------
+
+By default, git-multimail mostly takes its configuration from the
+following "git config" settings:
+
+multimailhook.environment
+
+ This describes the general environment of the repository.
+ Currently supported values:
+
+ "generic" -- the username of the pusher is read from $USER and the
+ repository name is derived from the repository's path.
+
+ "gitolite" -- the username of the pusher is read from $GL_USER and
+ the repository name from $GL_REPO.
+
+ If neither of these environments is suitable for your setup, then
+ you can implement a Python class that inherits from Environment
+ and instantiate it via a script that looks like the example
+ post-receive script.
+
+ The environment value can be specified on the command line using
+ the --environment option. If it is not specified on the command
+ line or by multimailhook.environment, then it defaults to
+ "gitolite" if the environment contains variables $GL_USER and
+ $GL_REPO; otherwise "generic".
+
+multimailhook.repoName
+
+ A short name of this Git repository, to be used in various places
+ in the notification email text. The default is to use $GL_REPO
+ for gitolite repositories, or otherwise to derive this value from
+ the repository path name.
+
+multimailhook.mailingList
+
+ The list of email addresses to which notification emails should be
+ sent, as RFC 2822 email addresses separated by commas. This
+ configuration option can be multivalued. Leave it unset or set it
+ to the empty string to not send emails by default. The next few
+ settings can be used to configure specific address lists for
+ specific types of notification email.
+
+multimailhook.refchangeList
+
+ The list of email addresses to which summary emails about
+ reference changes should be sent, as RFC 2822 email addresses
+ separated by commas. This configuration option can be
+ multivalued. The default is the value in
+ multimailhook.mailingList. Set this value to the empty string to
+ prevent reference change emails from being sent even if
+ multimailhook.mailingList is set.
+
+multimailhook.announceList
+
+ The list of email addresses to which emails about new annotated
+ tags should be sent, as RFC 2822 email addresses separated by
+ commas. This configuration option can be multivalued. The
+ default is the value in multimailhook.refchangeList or
+ multimailhook.mailingList. Set this value to the empty string to
+ prevent annotated tag announcement emails from being sent even if
+ one of the other values is set.
+
+multimailhook.commitList
+
+ The list of email addresses to which emails about individual new
+ commits should be sent, as RFC 2822 email addresses separated by
+ commas. This configuration option can be multivalued. The
+ default is the value in multimailhook.mailingList. Set this value
+ to the empty string to prevent notification emails about
+ individual commits from being sent even if
+ multimailhook.mailingList is set.
+
+multimailhook.announceShortlog
+
+ If this option is set to true, then emails about changes to
+ annotated tags include a shortlog of changes since the previous
+ tag. This can be useful if the annotated tags represent releases;
+ then the shortlog will be a kind of rough summary of what has
+ happened since the last release. But if your tagging policy is
+ not so straightforward, then the shortlog might be confusing
+ rather than useful. Default is false.
+
+multimailhook.refchangeShowLog
+
+ If this option is set to true, then summary emails about reference
+ changes will include a detailed log of the added commits in
+ addition to the one line summary. The log is generated by running
+ "git log" with the options specified in multimailhook.logOpts.
+ Default is false.
+
+multimailhook.mailer
+
+ This option changes the way emails are sent. Accepted values are:
+
+ - sendmail (the default): use the command /usr/sbin/sendmail or
+ /usr/lib/sendmail (or sendmailCommand, if configured). This
+ mode can be further customized via the following options:
+
+ multimailhook.sendmailCommand
+
+ The command used by mailer "sendmail" to send emails. Shell
+ quoting is allowed in the value of this setting, but remember that
+ Git requires double-quotes to be escaped; e.g.,
+
+ git config multimailhook.sendmailcommand '/usr/sbin/sendmail -oi -t -F \"Git Repo\"'
+
+ Default is '/usr/sbin/sendmail -oi -t' or
+ '/usr/lib/sendmail -oi -t' (depending on which file is
+ present and executable).
+
+ multimailhook.envelopeSender
+
+ If set then pass this value to sendmail via the -f option to set
+ the envelope sender address.
+
+ - smtp: use Python's smtplib. This is useful when the sendmail
+ command is not available on the system. This mode can be
+ further customized via the following options:
+
+ multimailhook.smtpServer
+
+ The name of the SMTP server to connect to. The value can
+ also include a colon and a port number; e.g.,
+ "mail.example.com:25". Default is 'localhost' using port
+ 25.
+
+ multimailhook.envelopeSender
+
+ The sender address to be passed to the SMTP server. If
+ unset, then the value of multimailhook.from is used.
+
+multimailhook.from
+
+ If set then use this value in the From: field of generated emails.
+ If unset, then use the repository's user configuration (user.name
+ and user.email). If user.email is also unset, then use
+ multimailhook.envelopeSender.
+
+multimailhook.administrator
+
+ The name and/or email address of the administrator of the Git
+ repository; used in FOOTER_TEMPLATE. Default is
+ multimailhook.envelopesender if it is set; otherwise a generic
+ string is used.
+
+multimailhook.emailPrefix
+
+ All emails have this string prepended to their subjects, to aid
+ email filtering (though filtering based on the X-Git-* email
+ headers is probably more robust). Default is the short name of
+ the repository in square brackets; e.g., "[myrepo]".
+
+multimailhook.emailMaxLines
+
+ The maximum number of lines that should be included in the body of
+ a generated email. If not specified, there is no limit. Lines
+ beyond the limit are suppressed and counted, and a final line is
+ added indicating the number of suppressed lines.
+
+multimailhook.emailMaxLineLength
+
+ The maximum length of a line in the email body. Lines longer than
+ this limit are truncated to this length with a trailing " [...]"
+ added to indicate the missing text. The default is 500, because
+ (a) diffs with longer lines are probably from binary files, for
+ which a diff is useless, and (b) even if a text file has such long
+ lines, the diffs are probably unreadable anyway. To disable line
+ truncation, set this option to 0.
+
+multimailhook.maxCommitEmails
+
+ The maximum number of commit emails to send for a given change.
+ When the number of patches is larger that this value, only the
+ summary refchange email is sent. This can avoid accidental
+ mailbombing, for example on an initial push. To disable commit
+ emails limit, set this option to 0. The default is 500.
+
+multimailhook.emailStrictUTF8
+
+ If this boolean option is set to "true", then the main part of the
+ email body is forced to be valid UTF-8. Any characters that are
+ not valid UTF-8 are converted to the Unicode replacement
+ character, U+FFFD. The default is "true".
+
+multimailhook.diffOpts
+
+ Options passed to "git diff-tree" when generating the summary
+ information for ReferenceChange emails. Default is "--stat
+ --summary --find-copies-harder". Add -p to those options to
+ include a unified diff of changes in addition to the usual summary
+ output. Shell quoting is allowed; see multimailhook.logOpts for
+ details.
+
+multimailhook.logOpts
+
+ Options passed to "git log" to generate additional info for
+ reference change emails (used only if refchangeShowLog is set).
+ For example, adding --graph will show the graph of revisions, -p
+ will show the complete diff, etc. The default is empty.
+
+ Shell quoting is allowed; for example, a log format that contains
+ spaces can be specified using something like:
+
+ git config multimailhook.logopts '--pretty=format:"%h %aN <%aE>%n%s%n%n%b%n"'
+
+ If you want to set this by editing your configuration file
+ directly, remember that Git requires double-quotes to be escaped
+ (see git-config(1) for more information):
+
+ [multimailhook]
+ logopts = --pretty=format:\"%h %aN <%aE>%n%s%n%n%b%n\"
+
+multimailhook.commitLogOpts
+
+ Options passed to "git log" to generate additional info for
+ revision change emails. For example, adding --ignore-all-spaces
+ will suppress whitespace changes. The default options are "-C
+ --stat -p --cc". Shell quoting is allowed; see
+ multimailhook.logOpts for details.
+
+multimailhook.emailDomain
+
+ Domain name appended to the username of the person doing the push
+ to convert it into an email address (via "%s@%s" % (username,
+ emaildomain)). More complicated schemes can be implemented by
+ overriding Environment and overriding its get_pusher_email()
+ method.
+
+multimailhook.replyTo
+multimailhook.replyToCommit
+multimailhook.replyToRefchange
+
+ Addresses to use in the Reply-To: field for commit emails
+ (replyToCommit) and refchange emails (replyToRefchange).
+ multimailhook.replyTo is used as default when replyToCommit or
+ replyToRefchange is not set. The value for these variables can be
+ either:
+
+ - An email address, which will be used directly.
+
+ - The value "pusher", in which case the pusher's address (if
+ available) will be used. This is the default for refchange
+ emails.
+
+ - The value "author" (meaningful only for replyToCommit), in which
+ case the commit author's address will be used. This is the
+ default for commit emails.
+
+ - The value "none", in which case the Reply-To: field will be
+ omitted.
+
+
+Email filtering aids
+--------------------
+
+All emails include extra headers to enable fine tuned filtering and
+give information for debugging. All emails include the headers
+"X-Git-Host", "X-Git-Repo", "X-Git-Refname", and "X-Git-Reftype".
+ReferenceChange emails also include headers "X-Git-Oldrev" and "X-Git-Newrev";
+Revision emails also include header "X-Git-Rev".
+
+
+Customizing email contents
+--------------------------
+
+git-multimail mostly generates emails by expanding templates. The
+templates can be customized. To avoid the need to edit
+git_multimail.py directly, the preferred way to change the templates
+is to write a separate Python script that imports git_multimail.py as
+a module, then replaces the templates in place. See the provided
+post-receive script for an example of how this is done.
+
+
+Customizing git-multimail for your environment
+----------------------------------------------
+
+git-multimail is mostly customized via an "environment" that describes
+the local environment in which Git is running. Two types of
+environment are built in:
+
+* GenericEnvironment: a stand-alone Git repository.
+
+* GitoliteEnvironment: a Git repository that is managed by gitolite
+ [3]. For such repositories, the identity of the pusher is read from
+ environment variable $GL_USER, and the name of the repository is
+ read from $GL_REPO (if it is not overridden by
+ multimailhook.reponame).
+
+By default, git-multimail assumes GitoliteEnvironment if $GL_USER and
+$GL_REPO are set, and otherwise assumes GenericEnvironment.
+Alternatively, you can choose one of these two environments explicitly
+by setting a "multimailhook.environment" config setting (which can
+have the value "generic" or "gitolite") or by passing an --environment
+option to the script.
+
+If you need to customize the script in ways that are not supported by
+the existing environments, you can define your own environment class
+class using arbitrary Python code. To do so, you need to import
+git_multimail.py as a Python module, as demonstrated by the example
+post-receive script. Then implement your environment class; it should
+usually inherit from one of the existing Environment classes and
+possibly one or more of the EnvironmentMixin classes. Then set the
+"environment" variable to an instance of your own environment class
+and pass it to run_as_post_receive_hook().
+
+The standard environment classes, GenericEnvironment and
+GitoliteEnvironment, are in fact themselves put together out of a
+number of mixin classes, each of which handles one aspect of the
+customization. For the finest control over your configuration, you
+can specify exactly which mixin classes your own environment class
+should inherit from, and override individual methods (or even add your
+own mixin classes) to implement entirely new behaviors. If you
+implement any mixins that might be useful to other people, please
+consider sharing them with the community!
+
+
+Getting involved
+----------------
+
+git-multimail is an open-source project, built by volunteers. We
+would welcome your help!
+
+The current maintainer is Michael Haggerty <mhagger@alum.mit.edu>.
+
+General discussion of git-multimail takes place on the main Git
+mailing list,
+
+ git@vger.kernel.org
+
+Please CC emails regarding git-multimail to me so that I don't
+overlook them.
+
+The git-multimail project itself is currently hosted on GitHub:
+
+ https://github.com/mhagger/git-multimail
+
+We use the GitHub issue tracker to keep track of bugs and feature
+requests, and GitHub pull requests to exchange patches (though, if you
+prefer, you can send patches via the Git mailing list with cc to me).
+Please sign off your patches as per the Git project practice.
+
+Please note that although a copy of git-multimail will probably be
+distributed in the "contrib" section of the main Git project,
+development takes place in the separate git-multimail repository on
+GitHub! (Whenever enough changes to git-multimail have accumulated, a
+new code-drop of git-multimail will be submitted for inclusion in the
+Git project.)
+
+
+Footnotes
+---------
+
+[1] http://www.python.org/dev/peps/pep-0394/
+
+[2] Because of the way information is passed to update hooks, the
+ script's method of determining whether a commit has already been
+ seen does not work when it is used as an "update" script. In
+ particular, no notification email will be generated for a new
+ commit that is added to multiple references in the same push.
+
+[3] https://github.com/sitaramc/gitolite
diff --git a/contrib/hooks/multimail/README.Git b/contrib/hooks/multimail/README.Git
new file mode 100644
index 0000000000..129b771410
--- /dev/null
+++ b/contrib/hooks/multimail/README.Git
@@ -0,0 +1,15 @@
+This copy of git-multimail is distributed as part of the "contrib"
+section of the Git project as a convenience to Git users.
+git-multimail is developed as an independent project at the following
+website:
+
+ https://github.com/mhagger/git-multimail
+
+The version in this directory was obtained from the upstream project
+on 2014-04-07 and consists of the "git-multimail" subdirectory from
+revision
+
+ 1b32653bafc4f902535b9fc1cd9cae911325b870
+
+Please see the README file in this directory for information about how
+to report bugs or contribute to git-multimail.
diff --git a/contrib/hooks/multimail/README.migrate-from-post-receive-email b/contrib/hooks/multimail/README.migrate-from-post-receive-email
new file mode 100644
index 0000000000..1e6a976699
--- /dev/null
+++ b/contrib/hooks/multimail/README.migrate-from-post-receive-email
@@ -0,0 +1,145 @@
+git-multimail is close to, but not exactly, a plug-in replacement for
+the old Git project script contrib/hooks/post-receive-email. This
+document describes the differences and explains how to configure
+git-multimail to get behavior closest to that of post-receive-email.
+
+If you are in a hurry
+=====================
+
+A script called migrate-mailhook-config is included with
+git-multimail. If you run this script within a Git repository that is
+configured to use post-receive-email, it will convert the
+configuration settings into the approximate equivalent settings for
+git-multimail. For more information, run
+
+ migrate-mailhook-config --help
+
+
+Configuration differences
+=========================
+
+* The names of the config options for git-multimail are in namespace
+ "multimailhook.*" instead of "hooks.*". (Editorial comment:
+ post-receive-email should never have used such a generic top-level
+ namespace.)
+
+* In emails about new annotated tags, post-receive-email includes a
+ shortlog of all changes since the previous annotated tag. To get
+ this behavior with git-multimail, you need to set
+ multimailhook.announceshortlog to true:
+
+ git config multimailhook.announceshortlog true
+
+* multimailhook.commitlist -- This is a new configuration variable.
+ Recipients listed here will receive a separate email for each new
+ commit. However, if this variable is *not* set, it defaults to the
+ value of multimailhook.mailinglist. Therefore, if you *don't* want
+ the members of multimailhook.mailinglist to receive one email per
+ commit, then set this value to the empty string:
+
+ git config multimailhook.commitlist ''
+
+* multimailhook.emailprefix -- If this value is not set, then the
+ subjects of generated emails are prefixed with the short name of the
+ repository enclosed in square brackets; e.g., "[myrepo]".
+ post-receive-email defaults to prefix "[SCM]" if this option is not
+ set. So if you were using the old default and want to retain it
+ (for example, to avoid having to change your email filters), set
+ this variable explicitly to the old value:
+
+ git config multimailhook.emailprefix "[SCM]"
+
+* The "multimailhook.showrev" configuration option is not supported.
+ Its main use is obsoleted by the one-email-per-commit feature of
+ git-multimail.
+
+
+Other differences
+=================
+
+This section describes other differences in the behavior of
+git-multimail vs. post-receive-email. For full details, please refer
+to the main README file:
+
+* One email per commit. For each reference change, the script first
+ outputs one email summarizing the reference change (including
+ one-line summaries of the new commits), then it outputs a separate
+ email for each new commit that was introduced, including patches.
+ These one-email-per-commit emails go to the addresses listed in
+ multimailhook.commitlist. post-receive-email sends only one email
+ for each *reference* that is changed, no matter how many commits
+ were added to the reference.
+
+* Better algorithm for detecting new commits. post-receive-email
+ processes one reference change at a time, which causes it to fail to
+ describe new commits that were included in multiple branches. For
+ example, if a single push adds the "*" commits in the diagram below,
+ then post-receive-email would never include the details of the two
+ commits that are common to "master" and "branch" in its
+ notifications.
+
+ o---o---o---*---*---* <-- master
+ \
+ *---* <-- branch
+
+ git-multimail analyzes all reference modifications to determine
+ which commits were not present before the change, therefore avoiding
+ that error.
+
+* In reference change emails, git-multimail tells which commits have
+ been added to the reference vs. are entirely new to the repository,
+ and which commits that have been omitted from the reference
+ vs. entirely discarded from the repository.
+
+* The environment in which Git is running can be configured via an
+ "Environment" abstraction.
+
+* Built-in support for Gitolite-managed repositories.
+
+* Instead of using full SHA1 object names in emails, git-multimail
+ mostly uses abbreviated SHA1s, plus one-line log message summaries
+ where appropriate.
+
+* In the schematic diagrams that explain non-fast-forward commits,
+ git-multimail shows the names of the branches involved.
+
+* The emails generated by git-multimail include the name of the Git
+ repository that was modified; this is convenient for recipients who
+ are monitoring multiple repositories.
+
+* git-multimail allows the email "From" addresses to be configured.
+
+* The recipients lists (multimailhook.mailinglist,
+ multimailhook.refchangelist, multimailhook.announcelist, and
+ multimailhook.commitlist) can be comma-separated values and/or
+ multivalued settings in the config file; e.g.,
+
+ [multimailhook]
+ mailinglist = mr.brown@example.com, mr.black@example.com
+ announcelist = Him <him@example.com>
+ announcelist = Jim <jim@example.com>
+ announcelist = pop@example.com
+
+ This might make it easier to maintain short recipients lists without
+ requiring full-fledged mailing list software.
+
+* By default, git-multimail sets email "Reply-To" headers to reply to
+ the pusher (for reference updates) and to the author (for commit
+ notifications). By default, the pusher's email address is
+ constructed by appending "multimailhook.emaildomain" to the pusher's
+ username.
+
+* The generated emails contain a configurable footer. By default, it
+ lists the name of the administrator who should be contacted to
+ unsubscribe from notification emails.
+
+* New option multimailhook.emailmaxlinelength to limit the length of
+ lines in the main part of the email body. The default limit is 500
+ characters.
+
+* New option multimailhook.emailstrictutf8 to ensure that the main
+ part of the email body is valid UTF-8. Invalid characters are
+ turned into the Unicode replacement character, U+FFFD. By default
+ this option is turned on.
+
+* Written in Python. Easier to add new features.
diff --git a/contrib/hooks/multimail/git_multimail.py b/contrib/hooks/multimail/git_multimail.py
new file mode 100755
index 0000000000..8b58ed6444
--- /dev/null
+++ b/contrib/hooks/multimail/git_multimail.py
@@ -0,0 +1,2539 @@
+#! /usr/bin/env python2
+
+# Copyright (c) 2012-2014 Michael Haggerty and others
+# Derived from contrib/hooks/post-receive-email, which is
+# Copyright (c) 2007 Andy Parkins
+# and also includes contributions by other authors.
+#
+# This file is part of git-multimail.
+#
+# git-multimail is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License version
+# 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see
+# <http://www.gnu.org/licenses/>.
+
+"""Generate notification emails for pushes to a git repository.
+
+This hook sends emails describing changes introduced by pushes to a
+git repository. For each reference that was changed, it emits one
+ReferenceChange email summarizing how the reference was changed,
+followed by one Revision email for each new commit that was introduced
+by the reference change.
+
+Each commit is announced in exactly one Revision email. If the same
+commit is merged into another branch in the same or a later push, then
+the ReferenceChange email will list the commit's SHA1 and its one-line
+summary, but no new Revision email will be generated.
+
+This script is designed to be used as a "post-receive" hook in a git
+repository (see githooks(5)). It can also be used as an "update"
+script, but this usage is not completely reliable and is deprecated.
+
+To help with debugging, this script accepts a --stdout option, which
+causes the emails to be written to standard output rather than sent
+using sendmail.
+
+See the accompanying README file for the complete documentation.
+
+"""
+
+import sys
+import os
+import re
+import bisect
+import socket
+import subprocess
+import shlex
+import optparse
+import smtplib
+import time
+
+try:
+ from email.utils import make_msgid
+ from email.utils import getaddresses
+ from email.utils import formataddr
+ from email.utils import formatdate
+ from email.header import Header
+except ImportError:
+ # Prior to Python 2.5, the email module used different names:
+ from email.Utils import make_msgid
+ from email.Utils import getaddresses
+ from email.Utils import formataddr
+ from email.Utils import formatdate
+ from email.Header import Header
+
+
+DEBUG = False
+
+ZEROS = '0' * 40
+LOGBEGIN = '- Log -----------------------------------------------------------------\n'
+LOGEND = '-----------------------------------------------------------------------\n'
+
+ADDR_HEADERS = set(['from', 'to', 'cc', 'bcc', 'reply-to', 'sender'])
+
+# It is assumed in many places that the encoding is uniformly UTF-8,
+# so changing these constants is unsupported. But define them here
+# anyway, to make it easier to find (at least most of) the places
+# where the encoding is important.
+(ENCODING, CHARSET) = ('UTF-8', 'utf-8')
+
+
+REF_CREATED_SUBJECT_TEMPLATE = (
+ '%(emailprefix)s%(refname_type)s %(short_refname)s created'
+ ' (now %(newrev_short)s)'
+ )
+REF_UPDATED_SUBJECT_TEMPLATE = (
+ '%(emailprefix)s%(refname_type)s %(short_refname)s updated'
+ ' (%(oldrev_short)s -> %(newrev_short)s)'
+ )
+REF_DELETED_SUBJECT_TEMPLATE = (
+ '%(emailprefix)s%(refname_type)s %(short_refname)s deleted'
+ ' (was %(oldrev_short)s)'
+ )
+
+REFCHANGE_HEADER_TEMPLATE = """\
+Date: %(send_date)s
+To: %(recipients)s
+Subject: %(subject)s
+MIME-Version: 1.0
+Content-Type: text/plain; charset=%(charset)s
+Content-Transfer-Encoding: 8bit
+Message-ID: %(msgid)s
+From: %(fromaddr)s
+Reply-To: %(reply_to)s
+X-Git-Host: %(fqdn)s
+X-Git-Repo: %(repo_shortname)s
+X-Git-Refname: %(refname)s
+X-Git-Reftype: %(refname_type)s
+X-Git-Oldrev: %(oldrev)s
+X-Git-Newrev: %(newrev)s
+Auto-Submitted: auto-generated
+"""
+
+REFCHANGE_INTRO_TEMPLATE = """\
+This is an automated email from the git hooks/post-receive script.
+
+%(pusher)s pushed a change to %(refname_type)s %(short_refname)s
+in repository %(repo_shortname)s.
+
+"""
+
+
+FOOTER_TEMPLATE = """\
+
+-- \n\
+To stop receiving notification emails like this one, please contact
+%(administrator)s.
+"""
+
+
+REWIND_ONLY_TEMPLATE = """\
+This update removed existing revisions from the reference, leaving the
+reference pointing at a previous point in the repository history.
+
+ * -- * -- N %(refname)s (%(newrev_short)s)
+ \\
+ O -- O -- O (%(oldrev_short)s)
+
+Any revisions marked "omits" are not gone; other references still
+refer to them. Any revisions marked "discards" are gone forever.
+"""
+
+
+NON_FF_TEMPLATE = """\
+This update added new revisions after undoing existing revisions.
+That is to say, some revisions that were in the old version of the
+%(refname_type)s are not in the new version. This situation occurs
+when a user --force pushes a change and generates a repository
+containing something like this:
+
+ * -- * -- B -- O -- O -- O (%(oldrev_short)s)
+ \\
+ N -- N -- N %(refname)s (%(newrev_short)s)
+
+You should already have received notification emails for all of the O
+revisions, and so the following emails describe only the N revisions
+from the common base, B.
+
+Any revisions marked "omits" are not gone; other references still
+refer to them. Any revisions marked "discards" are gone forever.
+"""
+
+
+NO_NEW_REVISIONS_TEMPLATE = """\
+No new revisions were added by this update.
+"""
+
+
+DISCARDED_REVISIONS_TEMPLATE = """\
+This change permanently discards the following revisions:
+"""
+
+
+NO_DISCARDED_REVISIONS_TEMPLATE = """\
+The revisions that were on this %(refname_type)s are still contained in
+other references; therefore, this change does not discard any commits
+from the repository.
+"""
+
+
+NEW_REVISIONS_TEMPLATE = """\
+The %(tot)s revisions listed above as "new" are entirely new to this
+repository and will be described in separate emails. The revisions
+listed as "adds" were already present in the repository and have only
+been added to this reference.
+
+"""
+
+
+TAG_CREATED_TEMPLATE = """\
+ at %(newrev_short)-9s (%(newrev_type)s)
+"""
+
+
+TAG_UPDATED_TEMPLATE = """\
+*** WARNING: tag %(short_refname)s was modified! ***
+
+ from %(oldrev_short)-9s (%(oldrev_type)s)
+ to %(newrev_short)-9s (%(newrev_type)s)
+"""
+
+
+TAG_DELETED_TEMPLATE = """\
+*** WARNING: tag %(short_refname)s was deleted! ***
+
+"""
+
+
+# The template used in summary tables. It looks best if this uses the
+# same alignment as TAG_CREATED_TEMPLATE and TAG_UPDATED_TEMPLATE.
+BRIEF_SUMMARY_TEMPLATE = """\
+%(action)10s %(rev_short)-9s %(text)s
+"""
+
+
+NON_COMMIT_UPDATE_TEMPLATE = """\
+This is an unusual reference change because the reference did not
+refer to a commit either before or after the change. We do not know
+how to provide full information about this reference change.
+"""
+
+
+REVISION_HEADER_TEMPLATE = """\
+Date: %(send_date)s
+To: %(recipients)s
+Subject: %(emailprefix)s%(num)02d/%(tot)02d: %(oneline)s
+MIME-Version: 1.0
+Content-Type: text/plain; charset=%(charset)s
+Content-Transfer-Encoding: 8bit
+From: %(fromaddr)s
+Reply-To: %(reply_to)s
+In-Reply-To: %(reply_to_msgid)s
+References: %(reply_to_msgid)s
+X-Git-Host: %(fqdn)s
+X-Git-Repo: %(repo_shortname)s
+X-Git-Refname: %(refname)s
+X-Git-Reftype: %(refname_type)s
+X-Git-Rev: %(rev)s
+Auto-Submitted: auto-generated
+"""
+
+REVISION_INTRO_TEMPLATE = """\
+This is an automated email from the git hooks/post-receive script.
+
+%(pusher)s pushed a commit to %(refname_type)s %(short_refname)s
+in repository %(repo_shortname)s.
+
+"""
+
+
+REVISION_FOOTER_TEMPLATE = FOOTER_TEMPLATE
+
+
+class CommandError(Exception):
+ def __init__(self, cmd, retcode):
+ self.cmd = cmd
+ self.retcode = retcode
+ Exception.__init__(
+ self,
+ 'Command "%s" failed with retcode %s' % (' '.join(cmd), retcode,)
+ )
+
+
+class ConfigurationException(Exception):
+ pass
+
+
+# The "git" program (this could be changed to include a full path):
+GIT_EXECUTABLE = 'git'
+
+
+# How "git" should be invoked (including global arguments), as a list
+# of words. This variable is usually initialized automatically by
+# read_git_output() via choose_git_command(), but if a value is set
+# here then it will be used unconditionally.
+GIT_CMD = None
+
+
+def choose_git_command():
+ """Decide how to invoke git, and record the choice in GIT_CMD."""
+
+ global GIT_CMD
+
+ if GIT_CMD is None:
+ try:
+ # Check to see whether the "-c" option is accepted (it was
+ # only added in Git 1.7.2). We don't actually use the
+ # output of "git --version", though if we needed more
+ # specific version information this would be the place to
+ # do it.
+ cmd = [GIT_EXECUTABLE, '-c', 'foo.bar=baz', '--version']
+ read_output(cmd)
+ GIT_CMD = [GIT_EXECUTABLE, '-c', 'i18n.logoutputencoding=%s' % (ENCODING,)]
+ except CommandError:
+ GIT_CMD = [GIT_EXECUTABLE]
+
+
+def read_git_output(args, input=None, keepends=False, **kw):
+ """Read the output of a Git command."""
+
+ if GIT_CMD is None:
+ choose_git_command()
+
+ return read_output(GIT_CMD + args, input=input, keepends=keepends, **kw)
+
+
+def read_output(cmd, input=None, keepends=False, **kw):
+ if input:
+ stdin = subprocess.PIPE
+ else:
+ stdin = None
+ p = subprocess.Popen(
+ cmd, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kw
+ )
+ (out, err) = p.communicate(input)
+ retcode = p.wait()
+ if retcode:
+ raise CommandError(cmd, retcode)
+ if not keepends:
+ out = out.rstrip('\n\r')
+ return out
+
+
+def read_git_lines(args, keepends=False, **kw):
+ """Return the lines output by Git command.
+
+ Return as single lines, with newlines stripped off."""
+
+ return read_git_output(args, keepends=True, **kw).splitlines(keepends)
+
+
+def header_encode(text, header_name=None):
+ """Encode and line-wrap the value of an email header field."""
+
+ try:
+ if isinstance(text, str):
+ text = text.decode(ENCODING, 'replace')
+ return Header(text, header_name=header_name).encode()
+ except UnicodeEncodeError:
+ return Header(text, header_name=header_name, charset=CHARSET,
+ errors='replace').encode()
+
+
+def addr_header_encode(text, header_name=None):
+ """Encode and line-wrap the value of an email header field containing
+ email addresses."""
+
+ return Header(
+ ', '.join(
+ formataddr((header_encode(name), emailaddr))
+ for name, emailaddr in getaddresses([text])
+ ),
+ header_name=header_name
+ ).encode()
+
+
+class Config(object):
+ def __init__(self, section, git_config=None):
+ """Represent a section of the git configuration.
+
+ If git_config is specified, it is passed to "git config" in
+ the GIT_CONFIG environment variable, meaning that "git config"
+ will read the specified path rather than the Git default
+ config paths."""
+
+ self.section = section
+ if git_config:
+ self.env = os.environ.copy()
+ self.env['GIT_CONFIG'] = git_config
+ else:
+ self.env = None
+
+ @staticmethod
+ def _split(s):
+ """Split NUL-terminated values."""
+
+ words = s.split('\0')
+ assert words[-1] == ''
+ return words[:-1]
+
+ def get(self, name, default=None):
+ try:
+ values = self._split(read_git_output(
+ ['config', '--get', '--null', '%s.%s' % (self.section, name)],
+ env=self.env, keepends=True,
+ ))
+ assert len(values) == 1
+ return values[0]
+ except CommandError:
+ return default
+
+ def get_bool(self, name, default=None):
+ try:
+ value = read_git_output(
+ ['config', '--get', '--bool', '%s.%s' % (self.section, name)],
+ env=self.env,
+ )
+ except CommandError:
+ return default
+ return value == 'true'
+
+ def get_all(self, name, default=None):
+ """Read a (possibly multivalued) setting from the configuration.
+
+ Return the result as a list of values, or default if the name
+ is unset."""
+
+ try:
+ return self._split(read_git_output(
+ ['config', '--get-all', '--null', '%s.%s' % (self.section, name)],
+ env=self.env, keepends=True,
+ ))
+ except CommandError, e:
+ if e.retcode == 1:
+ # "the section or key is invalid"; i.e., there is no
+ # value for the specified key.
+ return default
+ else:
+ raise
+
+ def get_recipients(self, name, default=None):
+ """Read a recipients list from the configuration.
+
+ Return the result as a comma-separated list of email
+ addresses, or default if the option is unset. If the setting
+ has multiple values, concatenate them with comma separators."""
+
+ lines = self.get_all(name, default=None)
+ if lines is None:
+ return default
+ return ', '.join(line.strip() for line in lines)
+
+ def set(self, name, value):
+ read_git_output(
+ ['config', '%s.%s' % (self.section, name), value],
+ env=self.env,
+ )
+
+ def add(self, name, value):
+ read_git_output(
+ ['config', '--add', '%s.%s' % (self.section, name), value],
+ env=self.env,
+ )
+
+ def has_key(self, name):
+ return self.get_all(name, default=None) is not None
+
+ def unset_all(self, name):
+ try:
+ read_git_output(
+ ['config', '--unset-all', '%s.%s' % (self.section, name)],
+ env=self.env,
+ )
+ except CommandError, e:
+ if e.retcode == 5:
+ # The name doesn't exist, which is what we wanted anyway...
+ pass
+ else:
+ raise
+
+ def set_recipients(self, name, value):
+ self.unset_all(name)
+ for pair in getaddresses([value]):
+ self.add(name, formataddr(pair))
+
+
+def generate_summaries(*log_args):
+ """Generate a brief summary for each revision requested.
+
+ log_args are strings that will be passed directly to "git log" as
+ revision selectors. Iterate over (sha1_short, subject) for each
+ commit specified by log_args (subject is the first line of the
+ commit message as a string without EOLs)."""
+
+ cmd = [
+ 'log', '--abbrev', '--format=%h %s',
+ ] + list(log_args) + ['--']
+ for line in read_git_lines(cmd):
+ yield tuple(line.split(' ', 1))
+
+
+def limit_lines(lines, max_lines):
+ for (index, line) in enumerate(lines):
+ if index < max_lines:
+ yield line
+
+ if index >= max_lines:
+ yield '... %d lines suppressed ...\n' % (index + 1 - max_lines,)
+
+
+def limit_linelength(lines, max_linelength):
+ for line in lines:
+ # Don't forget that lines always include a trailing newline.
+ if len(line) > max_linelength + 1:
+ line = line[:max_linelength - 7] + ' [...]\n'
+ yield line
+
+
+class CommitSet(object):
+ """A (constant) set of object names.
+
+ The set should be initialized with full SHA1 object names. The
+ __contains__() method returns True iff its argument is an
+ abbreviation of any the names in the set."""
+
+ def __init__(self, names):
+ self._names = sorted(names)
+
+ def __len__(self):
+ return len(self._names)
+
+ def __contains__(self, sha1_abbrev):
+ """Return True iff this set contains sha1_abbrev (which might be abbreviated)."""
+
+ i = bisect.bisect_left(self._names, sha1_abbrev)
+ return i < len(self) and self._names[i].startswith(sha1_abbrev)
+
+
+class GitObject(object):
+ def __init__(self, sha1, type=None):
+ if sha1 == ZEROS:
+ self.sha1 = self.type = self.commit_sha1 = None
+ else:
+ self.sha1 = sha1
+ self.type = type or read_git_output(['cat-file', '-t', self.sha1])
+
+ if self.type == 'commit':
+ self.commit_sha1 = self.sha1
+ elif self.type == 'tag':
+ try:
+ self.commit_sha1 = read_git_output(
+ ['rev-parse', '--verify', '%s^0' % (self.sha1,)]
+ )
+ except CommandError:
+ # Cannot deref tag to determine commit_sha1
+ self.commit_sha1 = None
+ else:
+ self.commit_sha1 = None
+
+ self.short = read_git_output(['rev-parse', '--short', sha1])
+
+ def get_summary(self):
+ """Return (sha1_short, subject) for this commit."""
+
+ if not self.sha1:
+ raise ValueError('Empty commit has no summary')
+
+ return iter(generate_summaries('--no-walk', self.sha1)).next()
+
+ def __eq__(self, other):
+ return isinstance(other, GitObject) and self.sha1 == other.sha1
+
+ def __hash__(self):
+ return hash(self.sha1)
+
+ def __nonzero__(self):
+ return bool(self.sha1)
+
+ def __str__(self):
+ return self.sha1 or ZEROS
+
+
+class Change(object):
+ """A Change that has been made to the Git repository.
+
+ Abstract class from which both Revisions and ReferenceChanges are
+ derived. A Change knows how to generate a notification email
+ describing itself."""
+
+ def __init__(self, environment):
+ self.environment = environment
+ self._values = None
+
+ def _compute_values(self):
+ """Return a dictionary {keyword : expansion} for this Change.
+
+ Derived classes overload this method to add more entries to
+ the return value. This method is used internally by
+ get_values(). The return value should always be a new
+ dictionary."""
+
+ return self.environment.get_values()
+
+ def get_values(self, **extra_values):
+ """Return a dictionary {keyword : expansion} for this Change.
+
+ Return a dictionary mapping keywords to the values that they
+ should be expanded to for this Change (used when interpolating
+ template strings). If any keyword arguments are supplied, add
+ those to the return value as well. The return value is always
+ a new dictionary."""
+
+ if self._values is None:
+ self._values = self._compute_values()
+
+ values = self._values.copy()
+ if extra_values:
+ values.update(extra_values)
+ return values
+
+ def expand(self, template, **extra_values):
+ """Expand template.
+
+ Expand the template (which should be a string) using string
+ interpolation of the values for this Change. If any keyword
+ arguments are provided, also include those in the keywords
+ available for interpolation."""
+
+ return template % self.get_values(**extra_values)
+
+ def expand_lines(self, template, **extra_values):
+ """Break template into lines and expand each line."""
+
+ values = self.get_values(**extra_values)
+ for line in template.splitlines(True):
+ yield line % values
+
+ def expand_header_lines(self, template, **extra_values):
+ """Break template into lines and expand each line as an RFC 2822 header.
+
+ Encode values and split up lines that are too long. Silently
+ skip lines that contain references to unknown variables."""
+
+ values = self.get_values(**extra_values)
+ for line in template.splitlines():
+ (name, value) = line.split(':', 1)
+
+ try:
+ value = value % values
+ except KeyError, e:
+ if DEBUG:
+ sys.stderr.write(
+ 'Warning: unknown variable %r in the following line; line skipped:\n'
+ ' %s\n'
+ % (e.args[0], line,)
+ )
+ else:
+ if name.lower() in ADDR_HEADERS:
+ value = addr_header_encode(value, name)
+ else:
+ value = header_encode(value, name)
+ for splitline in ('%s: %s\n' % (name, value)).splitlines(True):
+ yield splitline
+
+ def generate_email_header(self):
+ """Generate the RFC 2822 email headers for this Change, a line at a time.
+
+ The output should not include the trailing blank line."""
+
+ raise NotImplementedError()
+
+ def generate_email_intro(self):
+ """Generate the email intro for this Change, a line at a time.
+
+ The output will be used as the standard boilerplate at the top
+ of the email body."""
+
+ raise NotImplementedError()
+
+ def generate_email_body(self):
+ """Generate the main part of the email body, a line at a time.
+
+ The text in the body might be truncated after a specified
+ number of lines (see multimailhook.emailmaxlines)."""
+
+ raise NotImplementedError()
+
+ def generate_email_footer(self):
+ """Generate the footer of the email, a line at a time.
+
+ The footer is always included, irrespective of
+ multimailhook.emailmaxlines."""
+
+ raise NotImplementedError()
+
+ def generate_email(self, push, body_filter=None, extra_header_values={}):
+ """Generate an email describing this change.
+
+ Iterate over the lines (including the header lines) of an
+ email describing this change. If body_filter is not None,
+ then use it to filter the lines that are intended for the
+ email body.
+
+ The extra_header_values field is received as a dict and not as
+ **kwargs, to allow passing other keyword arguments in the
+ future (e.g. passing extra values to generate_email_intro()"""
+
+ for line in self.generate_email_header(**extra_header_values):
+ yield line
+ yield '\n'
+ for line in self.generate_email_intro():
+ yield line
+
+ body = self.generate_email_body(push)
+ if body_filter is not None:
+ body = body_filter(body)
+ for line in body:
+ yield line
+
+ for line in self.generate_email_footer():
+ yield line
+
+
+class Revision(Change):
+ """A Change consisting of a single git commit."""
+
+ def __init__(self, reference_change, rev, num, tot):
+ Change.__init__(self, reference_change.environment)
+ self.reference_change = reference_change
+ self.rev = rev
+ self.change_type = self.reference_change.change_type
+ self.refname = self.reference_change.refname
+ self.num = num
+ self.tot = tot
+ self.author = read_git_output(['log', '--no-walk', '--format=%aN <%aE>', self.rev.sha1])
+ self.recipients = self.environment.get_revision_recipients(self)
+
+ def _compute_values(self):
+ values = Change._compute_values(self)
+
+ oneline = read_git_output(
+ ['log', '--format=%s', '--no-walk', self.rev.sha1]
+ )
+
+ values['rev'] = self.rev.sha1
+ values['rev_short'] = self.rev.short
+ values['change_type'] = self.change_type
+ values['refname'] = self.refname
+ values['short_refname'] = self.reference_change.short_refname
+ values['refname_type'] = self.reference_change.refname_type
+ values['reply_to_msgid'] = self.reference_change.msgid
+ values['num'] = self.num
+ values['tot'] = self.tot
+ values['recipients'] = self.recipients
+ values['oneline'] = oneline
+ values['author'] = self.author
+
+ reply_to = self.environment.get_reply_to_commit(self)
+ if reply_to:
+ values['reply_to'] = reply_to
+
+ return values
+
+ def generate_email_header(self, **extra_values):
+ for line in self.expand_header_lines(
+ REVISION_HEADER_TEMPLATE, **extra_values
+ ):
+ yield line
+
+ def generate_email_intro(self):
+ for line in self.expand_lines(REVISION_INTRO_TEMPLATE):
+ yield line
+
+ def generate_email_body(self, push):
+ """Show this revision."""
+
+ return read_git_lines(
+ ['log'] + self.environment.commitlogopts + ['-1', self.rev.sha1],
+ keepends=True,
+ )
+
+ def generate_email_footer(self):
+ return self.expand_lines(REVISION_FOOTER_TEMPLATE)
+
+
+class ReferenceChange(Change):
+ """A Change to a Git reference.
+
+ An abstract class representing a create, update, or delete of a
+ Git reference. Derived classes handle specific types of reference
+ (e.g., tags vs. branches). These classes generate the main
+ reference change email summarizing the reference change and
+ whether it caused any any commits to be added or removed.
+
+ ReferenceChange objects are usually created using the static
+ create() method, which has the logic to decide which derived class
+ to instantiate."""
+
+ REF_RE = re.compile(r'^refs\/(?P<area>[^\/]+)\/(?P<shortname>.*)$')
+
+ @staticmethod
+ def create(environment, oldrev, newrev, refname):
+ """Return a ReferenceChange object representing the change.
+
+ Return an object that represents the type of change that is being
+ made. oldrev and newrev should be SHA1s or ZEROS."""
+
+ old = GitObject(oldrev)
+ new = GitObject(newrev)
+ rev = new or old
+
+ # The revision type tells us what type the commit is, combined with
+ # the location of the ref we can decide between
+ # - working branch
+ # - tracking branch
+ # - unannotated tag
+ # - annotated tag
+ m = ReferenceChange.REF_RE.match(refname)
+ if m:
+ area = m.group('area')
+ short_refname = m.group('shortname')
+ else:
+ area = ''
+ short_refname = refname
+
+ if rev.type == 'tag':
+ # Annotated tag:
+ klass = AnnotatedTagChange
+ elif rev.type == 'commit':
+ if area == 'tags':
+ # Non-annotated tag:
+ klass = NonAnnotatedTagChange
+ elif area == 'heads':
+ # Branch:
+ klass = BranchChange
+ elif area == 'remotes':
+ # Tracking branch:
+ sys.stderr.write(
+ '*** Push-update of tracking branch %r\n'
+ '*** - incomplete email generated.\n'
+ % (refname,)
+ )
+ klass = OtherReferenceChange
+ else:
+ # Some other reference namespace:
+ sys.stderr.write(
+ '*** Push-update of strange reference %r\n'
+ '*** - incomplete email generated.\n'
+ % (refname,)
+ )
+ klass = OtherReferenceChange
+ else:
+ # Anything else (is there anything else?)
+ sys.stderr.write(
+ '*** Unknown type of update to %r (%s)\n'
+ '*** - incomplete email generated.\n'
+ % (refname, rev.type,)
+ )
+ klass = OtherReferenceChange
+
+ return klass(
+ environment,
+ refname=refname, short_refname=short_refname,
+ old=old, new=new, rev=rev,
+ )
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ Change.__init__(self, environment)
+ self.change_type = {
+ (False, True) : 'create',
+ (True, True) : 'update',
+ (True, False) : 'delete',
+ }[bool(old), bool(new)]
+ self.refname = refname
+ self.short_refname = short_refname
+ self.old = old
+ self.new = new
+ self.rev = rev
+ self.msgid = make_msgid()
+ self.diffopts = environment.diffopts
+ self.logopts = environment.logopts
+ self.commitlogopts = environment.commitlogopts
+ self.showlog = environment.refchange_showlog
+
+ def _compute_values(self):
+ values = Change._compute_values(self)
+
+ values['change_type'] = self.change_type
+ values['refname_type'] = self.refname_type
+ values['refname'] = self.refname
+ values['short_refname'] = self.short_refname
+ values['msgid'] = self.msgid
+ values['recipients'] = self.recipients
+ values['oldrev'] = str(self.old)
+ values['oldrev_short'] = self.old.short
+ values['newrev'] = str(self.new)
+ values['newrev_short'] = self.new.short
+
+ if self.old:
+ values['oldrev_type'] = self.old.type
+ if self.new:
+ values['newrev_type'] = self.new.type
+
+ reply_to = self.environment.get_reply_to_refchange(self)
+ if reply_to:
+ values['reply_to'] = reply_to
+
+ return values
+
+ def get_subject(self):
+ template = {
+ 'create' : REF_CREATED_SUBJECT_TEMPLATE,
+ 'update' : REF_UPDATED_SUBJECT_TEMPLATE,
+ 'delete' : REF_DELETED_SUBJECT_TEMPLATE,
+ }[self.change_type]
+ return self.expand(template)
+
+ def generate_email_header(self, **extra_values):
+ if 'subject' not in extra_values:
+ extra_values['subject'] = self.get_subject()
+
+ for line in self.expand_header_lines(
+ REFCHANGE_HEADER_TEMPLATE, **extra_values
+ ):
+ yield line
+
+ def generate_email_intro(self):
+ for line in self.expand_lines(REFCHANGE_INTRO_TEMPLATE):
+ yield line
+
+ def generate_email_body(self, push):
+ """Call the appropriate body-generation routine.
+
+ Call one of generate_create_summary() /
+ generate_update_summary() / generate_delete_summary()."""
+
+ change_summary = {
+ 'create' : self.generate_create_summary,
+ 'delete' : self.generate_delete_summary,
+ 'update' : self.generate_update_summary,
+ }[self.change_type](push)
+ for line in change_summary:
+ yield line
+
+ for line in self.generate_revision_change_summary(push):
+ yield line
+
+ def generate_email_footer(self):
+ return self.expand_lines(FOOTER_TEMPLATE)
+
+ def generate_revision_change_log(self, new_commits_list):
+ if self.showlog:
+ yield '\n'
+ yield 'Detailed log of new commits:\n\n'
+ for line in read_git_lines(
+ ['log', '--no-walk']
+ + self.logopts
+ + new_commits_list
+ + ['--'],
+ keepends=True,
+ ):
+ yield line
+
+ def generate_revision_change_summary(self, push):
+ """Generate a summary of the revisions added/removed by this change."""
+
+ if self.new.commit_sha1 and not self.old.commit_sha1:
+ # A new reference was created. List the new revisions
+ # brought by the new reference (i.e., those revisions that
+ # were not in the repository before this reference
+ # change).
+ sha1s = list(push.get_new_commits(self))
+ sha1s.reverse()
+ tot = len(sha1s)
+ new_revisions = [
+ Revision(self, GitObject(sha1), num=i+1, tot=tot)
+ for (i, sha1) in enumerate(sha1s)
+ ]
+
+ if new_revisions:
+ yield self.expand('This %(refname_type)s includes the following new commits:\n')
+ yield '\n'
+ for r in new_revisions:
+ (sha1, subject) = r.rev.get_summary()
+ yield r.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='new', text=subject,
+ )
+ yield '\n'
+ for line in self.expand_lines(NEW_REVISIONS_TEMPLATE, tot=tot):
+ yield line
+ for line in self.generate_revision_change_log([r.rev.sha1 for r in new_revisions]):
+ yield line
+ else:
+ for line in self.expand_lines(NO_NEW_REVISIONS_TEMPLATE):
+ yield line
+
+ elif self.new.commit_sha1 and self.old.commit_sha1:
+ # A reference was changed to point at a different commit.
+ # List the revisions that were removed and/or added *from
+ # that reference* by this reference change, along with a
+ # diff between the trees for its old and new values.
+
+ # List of the revisions that were added to the branch by
+ # this update. Note this list can include revisions that
+ # have already had notification emails; we want such
+ # revisions in the summary even though we will not send
+ # new notification emails for them.
+ adds = list(generate_summaries(
+ '--topo-order', '--reverse', '%s..%s'
+ % (self.old.commit_sha1, self.new.commit_sha1,)
+ ))
+
+ # List of the revisions that were removed from the branch
+ # by this update. This will be empty except for
+ # non-fast-forward updates.
+ discards = list(generate_summaries(
+ '%s..%s' % (self.new.commit_sha1, self.old.commit_sha1,)
+ ))
+
+ if adds:
+ new_commits_list = push.get_new_commits(self)
+ else:
+ new_commits_list = []
+ new_commits = CommitSet(new_commits_list)
+
+ if discards:
+ discarded_commits = CommitSet(push.get_discarded_commits(self))
+ else:
+ discarded_commits = CommitSet([])
+
+ if discards and adds:
+ for (sha1, subject) in discards:
+ if sha1 in discarded_commits:
+ action = 'discards'
+ else:
+ action = 'omits'
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action=action,
+ rev_short=sha1, text=subject,
+ )
+ for (sha1, subject) in adds:
+ if sha1 in new_commits:
+ action = 'new'
+ else:
+ action = 'adds'
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action=action,
+ rev_short=sha1, text=subject,
+ )
+ yield '\n'
+ for line in self.expand_lines(NON_FF_TEMPLATE):
+ yield line
+
+ elif discards:
+ for (sha1, subject) in discards:
+ if sha1 in discarded_commits:
+ action = 'discards'
+ else:
+ action = 'omits'
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action=action,
+ rev_short=sha1, text=subject,
+ )
+ yield '\n'
+ for line in self.expand_lines(REWIND_ONLY_TEMPLATE):
+ yield line
+
+ elif adds:
+ (sha1, subject) = self.old.get_summary()
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='from',
+ rev_short=sha1, text=subject,
+ )
+ for (sha1, subject) in adds:
+ if sha1 in new_commits:
+ action = 'new'
+ else:
+ action = 'adds'
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action=action,
+ rev_short=sha1, text=subject,
+ )
+
+ yield '\n'
+
+ if new_commits:
+ for line in self.expand_lines(NEW_REVISIONS_TEMPLATE, tot=len(new_commits)):
+ yield line
+ for line in self.generate_revision_change_log(new_commits_list):
+ yield line
+ else:
+ for line in self.expand_lines(NO_NEW_REVISIONS_TEMPLATE):
+ yield line
+
+ # The diffstat is shown from the old revision to the new
+ # revision. This is to show the truth of what happened in
+ # this change. There's no point showing the stat from the
+ # base to the new revision because the base is effectively a
+ # random revision at this point - the user will be interested
+ # in what this revision changed - including the undoing of
+ # previous revisions in the case of non-fast-forward updates.
+ yield '\n'
+ yield 'Summary of changes:\n'
+ for line in read_git_lines(
+ ['diff-tree']
+ + self.diffopts
+ + ['%s..%s' % (self.old.commit_sha1, self.new.commit_sha1,)],
+ keepends=True,
+ ):
+ yield line
+
+ elif self.old.commit_sha1 and not self.new.commit_sha1:
+ # A reference was deleted. List the revisions that were
+ # removed from the repository by this reference change.
+
+ sha1s = list(push.get_discarded_commits(self))
+ tot = len(sha1s)
+ discarded_revisions = [
+ Revision(self, GitObject(sha1), num=i+1, tot=tot)
+ for (i, sha1) in enumerate(sha1s)
+ ]
+
+ if discarded_revisions:
+ for line in self.expand_lines(DISCARDED_REVISIONS_TEMPLATE):
+ yield line
+ yield '\n'
+ for r in discarded_revisions:
+ (sha1, subject) = r.rev.get_summary()
+ yield r.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='discards', text=subject,
+ )
+ else:
+ for line in self.expand_lines(NO_DISCARDED_REVISIONS_TEMPLATE):
+ yield line
+
+ elif not self.old.commit_sha1 and not self.new.commit_sha1:
+ for line in self.expand_lines(NON_COMMIT_UPDATE_TEMPLATE):
+ yield line
+
+ def generate_create_summary(self, push):
+ """Called for the creation of a reference."""
+
+ # This is a new reference and so oldrev is not valid
+ (sha1, subject) = self.new.get_summary()
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='at',
+ rev_short=sha1, text=subject,
+ )
+ yield '\n'
+
+ def generate_update_summary(self, push):
+ """Called for the change of a pre-existing branch."""
+
+ return iter([])
+
+ def generate_delete_summary(self, push):
+ """Called for the deletion of any type of reference."""
+
+ (sha1, subject) = self.old.get_summary()
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='was',
+ rev_short=sha1, text=subject,
+ )
+ yield '\n'
+
+
+class BranchChange(ReferenceChange):
+ refname_type = 'branch'
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ ReferenceChange.__init__(
+ self, environment,
+ refname=refname, short_refname=short_refname,
+ old=old, new=new, rev=rev,
+ )
+ self.recipients = environment.get_refchange_recipients(self)
+
+
+class AnnotatedTagChange(ReferenceChange):
+ refname_type = 'annotated tag'
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ ReferenceChange.__init__(
+ self, environment,
+ refname=refname, short_refname=short_refname,
+ old=old, new=new, rev=rev,
+ )
+ self.recipients = environment.get_announce_recipients(self)
+ self.show_shortlog = environment.announce_show_shortlog
+
+ ANNOTATED_TAG_FORMAT = (
+ '%(*objectname)\n'
+ '%(*objecttype)\n'
+ '%(taggername)\n'
+ '%(taggerdate)'
+ )
+
+ def describe_tag(self, push):
+ """Describe the new value of an annotated tag."""
+
+ # Use git for-each-ref to pull out the individual fields from
+ # the tag
+ [tagobject, tagtype, tagger, tagged] = read_git_lines(
+ ['for-each-ref', '--format=%s' % (self.ANNOTATED_TAG_FORMAT,), self.refname],
+ )
+
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='tagging',
+ rev_short=tagobject, text='(%s)' % (tagtype,),
+ )
+ if tagtype == 'commit':
+ # If the tagged object is a commit, then we assume this is a
+ # release, and so we calculate which tag this tag is
+ # replacing
+ try:
+ prevtag = read_git_output(['describe', '--abbrev=0', '%s^' % (self.new,)])
+ except CommandError:
+ prevtag = None
+ if prevtag:
+ yield ' replaces %s\n' % (prevtag,)
+ else:
+ prevtag = None
+ yield ' length %s bytes\n' % (read_git_output(['cat-file', '-s', tagobject]),)
+
+ yield ' tagged by %s\n' % (tagger,)
+ yield ' on %s\n' % (tagged,)
+ yield '\n'
+
+ # Show the content of the tag message; this might contain a
+ # change log or release notes so is worth displaying.
+ yield LOGBEGIN
+ contents = list(read_git_lines(['cat-file', 'tag', self.new.sha1], keepends=True))
+ contents = contents[contents.index('\n') + 1:]
+ if contents and contents[-1][-1:] != '\n':
+ contents.append('\n')
+ for line in contents:
+ yield line
+
+ if self.show_shortlog and tagtype == 'commit':
+ # Only commit tags make sense to have rev-list operations
+ # performed on them
+ yield '\n'
+ if prevtag:
+ # Show changes since the previous release
+ revlist = read_git_output(
+ ['rev-list', '--pretty=short', '%s..%s' % (prevtag, self.new,)],
+ keepends=True,
+ )
+ else:
+ # No previous tag, show all the changes since time
+ # began
+ revlist = read_git_output(
+ ['rev-list', '--pretty=short', '%s' % (self.new,)],
+ keepends=True,
+ )
+ for line in read_git_lines(['shortlog'], input=revlist, keepends=True):
+ yield line
+
+ yield LOGEND
+ yield '\n'
+
+ def generate_create_summary(self, push):
+ """Called for the creation of an annotated tag."""
+
+ for line in self.expand_lines(TAG_CREATED_TEMPLATE):
+ yield line
+
+ for line in self.describe_tag(push):
+ yield line
+
+ def generate_update_summary(self, push):
+ """Called for the update of an annotated tag.
+
+ This is probably a rare event and may not even be allowed."""
+
+ for line in self.expand_lines(TAG_UPDATED_TEMPLATE):
+ yield line
+
+ for line in self.describe_tag(push):
+ yield line
+
+ def generate_delete_summary(self, push):
+ """Called when a non-annotated reference is updated."""
+
+ for line in self.expand_lines(TAG_DELETED_TEMPLATE):
+ yield line
+
+ yield self.expand(' tag was %(oldrev_short)s\n')
+ yield '\n'
+
+
+class NonAnnotatedTagChange(ReferenceChange):
+ refname_type = 'tag'
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ ReferenceChange.__init__(
+ self, environment,
+ refname=refname, short_refname=short_refname,
+ old=old, new=new, rev=rev,
+ )
+ self.recipients = environment.get_refchange_recipients(self)
+
+ def generate_create_summary(self, push):
+ """Called for the creation of an annotated tag."""
+
+ for line in self.expand_lines(TAG_CREATED_TEMPLATE):
+ yield line
+
+ def generate_update_summary(self, push):
+ """Called when a non-annotated reference is updated."""
+
+ for line in self.expand_lines(TAG_UPDATED_TEMPLATE):
+ yield line
+
+ def generate_delete_summary(self, push):
+ """Called when a non-annotated reference is updated."""
+
+ for line in self.expand_lines(TAG_DELETED_TEMPLATE):
+ yield line
+
+ for line in ReferenceChange.generate_delete_summary(self, push):
+ yield line
+
+
+class OtherReferenceChange(ReferenceChange):
+ refname_type = 'reference'
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ # We use the full refname as short_refname, because otherwise
+ # the full name of the reference would not be obvious from the
+ # text of the email.
+ ReferenceChange.__init__(
+ self, environment,
+ refname=refname, short_refname=refname,
+ old=old, new=new, rev=rev,
+ )
+ self.recipients = environment.get_refchange_recipients(self)
+
+
+class Mailer(object):
+ """An object that can send emails."""
+
+ def send(self, lines, to_addrs):
+ """Send an email consisting of lines.
+
+ lines must be an iterable over the lines constituting the
+ header and body of the email. to_addrs is a list of recipient
+ addresses (can be needed even if lines already contains a
+ "To:" field). It can be either a string (comma-separated list
+ of email addresses) or a Python list of individual email
+ addresses.
+
+ """
+
+ raise NotImplementedError()
+
+
+class SendMailer(Mailer):
+ """Send emails using 'sendmail -oi -t'."""
+
+ SENDMAIL_CANDIDATES = [
+ '/usr/sbin/sendmail',
+ '/usr/lib/sendmail',
+ ]
+
+ @staticmethod
+ def find_sendmail():
+ for path in SendMailer.SENDMAIL_CANDIDATES:
+ if os.access(path, os.X_OK):
+ return path
+ else:
+ raise ConfigurationException(
+ 'No sendmail executable found. '
+ 'Try setting multimailhook.sendmailCommand.'
+ )
+
+ def __init__(self, command=None, envelopesender=None):
+ """Construct a SendMailer instance.
+
+ command should be the command and arguments used to invoke
+ sendmail, as a list of strings. If an envelopesender is
+ provided, it will also be passed to the command, via '-f
+ envelopesender'."""
+
+ if command:
+ self.command = command[:]
+ else:
+ self.command = [self.find_sendmail(), '-oi', '-t']
+
+ if envelopesender:
+ self.command.extend(['-f', envelopesender])
+
+ def send(self, lines, to_addrs):
+ try:
+ p = subprocess.Popen(self.command, stdin=subprocess.PIPE)
+ except OSError, e:
+ sys.stderr.write(
+ '*** Cannot execute command: %s\n' % ' '.join(self.command)
+ + '*** %s\n' % str(e)
+ + '*** Try setting multimailhook.mailer to "smtp"\n'
+ '*** to send emails without using the sendmail command.\n'
+ )
+ sys.exit(1)
+ try:
+ p.stdin.writelines(lines)
+ except:
+ sys.stderr.write(
+ '*** Error while generating commit email\n'
+ '*** - mail sending aborted.\n'
+ )
+ p.terminate()
+ raise
+ else:
+ p.stdin.close()
+ retcode = p.wait()
+ if retcode:
+ raise CommandError(self.command, retcode)
+
+
+class SMTPMailer(Mailer):
+ """Send emails using Python's smtplib."""
+
+ def __init__(self, envelopesender, smtpserver):
+ if not envelopesender:
+ sys.stderr.write(
+ 'fatal: git_multimail: cannot use SMTPMailer without a sender address.\n'
+ 'please set either multimailhook.envelopeSender or user.email\n'
+ )
+ sys.exit(1)
+ self.envelopesender = envelopesender
+ self.smtpserver = smtpserver
+ try:
+ self.smtp = smtplib.SMTP(self.smtpserver)
+ except Exception, e:
+ sys.stderr.write('*** Error establishing SMTP connection to %s***\n' % self.smtpserver)
+ sys.stderr.write('*** %s\n' % str(e))
+ sys.exit(1)
+
+ def __del__(self):
+ self.smtp.quit()
+
+ def send(self, lines, to_addrs):
+ try:
+ msg = ''.join(lines)
+ # turn comma-separated list into Python list if needed.
+ if isinstance(to_addrs, basestring):
+ to_addrs = [email for (name, email) in getaddresses([to_addrs])]
+ self.smtp.sendmail(self.envelopesender, to_addrs, msg)
+ except Exception, e:
+ sys.stderr.write('*** Error sending email***\n')
+ sys.stderr.write('*** %s\n' % str(e))
+ self.smtp.quit()
+ sys.exit(1)
+
+
+class OutputMailer(Mailer):
+ """Write emails to an output stream, bracketed by lines of '=' characters.
+
+ This is intended for debugging purposes."""
+
+ SEPARATOR = '=' * 75 + '\n'
+
+ def __init__(self, f):
+ self.f = f
+
+ def send(self, lines, to_addrs):
+ self.f.write(self.SEPARATOR)
+ self.f.writelines(lines)
+ self.f.write(self.SEPARATOR)
+
+
+def get_git_dir():
+ """Determine GIT_DIR.
+
+ Determine GIT_DIR either from the GIT_DIR environment variable or
+ from the working directory, using Git's usual rules."""
+
+ try:
+ return read_git_output(['rev-parse', '--git-dir'])
+ except CommandError:
+ sys.stderr.write('fatal: git_multimail: not in a git directory\n')
+ sys.exit(1)
+
+
+class Environment(object):
+ """Describes the environment in which the push is occurring.
+
+ An Environment object encapsulates information about the local
+ environment. For example, it knows how to determine:
+
+ * the name of the repository to which the push occurred
+
+ * what user did the push
+
+ * what users want to be informed about various types of changes.
+
+ An Environment object is expected to have the following methods:
+
+ get_repo_shortname()
+
+ Return a short name for the repository, for display
+ purposes.
+
+ get_repo_path()
+
+ Return the absolute path to the Git repository.
+
+ get_emailprefix()
+
+ Return a string that will be prefixed to every email's
+ subject.
+
+ get_pusher()
+
+ Return the username of the person who pushed the changes.
+ This value is used in the email body to indicate who
+ pushed the change.
+
+ get_pusher_email() (may return None)
+
+ Return the email address of the person who pushed the
+ changes. The value should be a single RFC 2822 email
+ address as a string; e.g., "Joe User <user@example.com>"
+ if available, otherwise "user@example.com". If set, the
+ value is used as the Reply-To address for refchange
+ emails. If it is impossible to determine the pusher's
+ email, this attribute should be set to None (in which case
+ no Reply-To header will be output).
+
+ get_sender()
+
+ Return the address to be used as the 'From' email address
+ in the email envelope.
+
+ get_fromaddr()
+
+ Return the 'From' email address used in the email 'From:'
+ headers. (May be a full RFC 2822 email address like 'Joe
+ User <user@example.com>'.)
+
+ get_administrator()
+
+ Return the name and/or email of the repository
+ administrator. This value is used in the footer as the
+ person to whom requests to be removed from the
+ notification list should be sent. Ideally, it should
+ include a valid email address.
+
+ get_reply_to_refchange()
+ get_reply_to_commit()
+
+ Return the address to use in the email "Reply-To" header,
+ as a string. These can be an RFC 2822 email address, or
+ None to omit the "Reply-To" header.
+ get_reply_to_refchange() is used for refchange emails;
+ get_reply_to_commit() is used for individual commit
+ emails.
+
+ They should also define the following attributes:
+
+ announce_show_shortlog (bool)
+
+ True iff announce emails should include a shortlog.
+
+ refchange_showlog (bool)
+
+ True iff refchanges emails should include a detailed log.
+
+ diffopts (list of strings)
+
+ The options that should be passed to 'git diff' for the
+ summary email. The value should be a list of strings
+ representing words to be passed to the command.
+
+ logopts (list of strings)
+
+ Analogous to diffopts, but contains options passed to
+ 'git log' when generating the detailed log for a set of
+ commits (see refchange_showlog)
+
+ commitlogopts (list of strings)
+
+ The options that should be passed to 'git log' for each
+ commit mail. The value should be a list of strings
+ representing words to be passed to the command.
+
+ """
+
+ REPO_NAME_RE = re.compile(r'^(?P<name>.+?)(?:\.git)$')
+
+ def __init__(self, osenv=None):
+ self.osenv = osenv or os.environ
+ self.announce_show_shortlog = False
+ self.maxcommitemails = 500
+ self.diffopts = ['--stat', '--summary', '--find-copies-harder']
+ self.logopts = []
+ self.refchange_showlog = False
+ self.commitlogopts = ['-C', '--stat', '-p', '--cc']
+
+ self.COMPUTED_KEYS = [
+ 'administrator',
+ 'charset',
+ 'emailprefix',
+ 'fromaddr',
+ 'pusher',
+ 'pusher_email',
+ 'repo_path',
+ 'repo_shortname',
+ 'sender',
+ ]
+
+ self._values = None
+
+ def get_repo_shortname(self):
+ """Use the last part of the repo path, with ".git" stripped off if present."""
+
+ basename = os.path.basename(os.path.abspath(self.get_repo_path()))
+ m = self.REPO_NAME_RE.match(basename)
+ if m:
+ return m.group('name')
+ else:
+ return basename
+
+ def get_pusher(self):
+ raise NotImplementedError()
+
+ def get_pusher_email(self):
+ return None
+
+ def get_administrator(self):
+ return 'the administrator of this repository'
+
+ def get_emailprefix(self):
+ return ''
+
+ def get_repo_path(self):
+ if read_git_output(['rev-parse', '--is-bare-repository']) == 'true':
+ path = get_git_dir()
+ else:
+ path = read_git_output(['rev-parse', '--show-toplevel'])
+ return os.path.abspath(path)
+
+ def get_charset(self):
+ return CHARSET
+
+ def get_values(self):
+ """Return a dictionary {keyword : expansion} for this Environment.
+
+ This method is called by Change._compute_values(). The keys
+ in the returned dictionary are available to be used in any of
+ the templates. The dictionary is created by calling
+ self.get_NAME() for each of the attributes named in
+ COMPUTED_KEYS and recording those that do not return None.
+ The return value is always a new dictionary."""
+
+ if self._values is None:
+ values = {}
+
+ for key in self.COMPUTED_KEYS:
+ value = getattr(self, 'get_%s' % (key,))()
+ if value is not None:
+ values[key] = value
+
+ self._values = values
+
+ return self._values.copy()
+
+ def get_refchange_recipients(self, refchange):
+ """Return the recipients for notifications about refchange.
+
+ Return the list of email addresses to which notifications
+ about the specified ReferenceChange should be sent."""
+
+ raise NotImplementedError()
+
+ def get_announce_recipients(self, annotated_tag_change):
+ """Return the recipients for notifications about annotated_tag_change.
+
+ Return the list of email addresses to which notifications
+ about the specified AnnotatedTagChange should be sent."""
+
+ raise NotImplementedError()
+
+ def get_reply_to_refchange(self, refchange):
+ return self.get_pusher_email()
+
+ def get_revision_recipients(self, revision):
+ """Return the recipients for messages about revision.
+
+ Return the list of email addresses to which notifications
+ about the specified Revision should be sent. This method
+ could be overridden, for example, to take into account the
+ contents of the revision when deciding whom to notify about
+ it. For example, there could be a scheme for users to express
+ interest in particular files or subdirectories, and only
+ receive notification emails for revisions that affecting those
+ files."""
+
+ raise NotImplementedError()
+
+ def get_reply_to_commit(self, revision):
+ return revision.author
+
+ def filter_body(self, lines):
+ """Filter the lines intended for an email body.
+
+ lines is an iterable over the lines that would go into the
+ email body. Filter it (e.g., limit the number of lines, the
+ line length, character set, etc.), returning another iterable.
+ See FilterLinesEnvironmentMixin and MaxlinesEnvironmentMixin
+ for classes implementing this functionality."""
+
+ return lines
+
+
+class ConfigEnvironmentMixin(Environment):
+ """A mixin that sets self.config to its constructor's config argument.
+
+ This class's constructor consumes the "config" argument.
+
+ Mixins that need to inspect the config should inherit from this
+ class (1) to make sure that "config" is still in the constructor
+ arguments with its own constructor runs and/or (2) to be sure that
+ self.config is set after construction."""
+
+ def __init__(self, config, **kw):
+ super(ConfigEnvironmentMixin, self).__init__(**kw)
+ self.config = config
+
+
+class ConfigOptionsEnvironmentMixin(ConfigEnvironmentMixin):
+ """An Environment that reads most of its information from "git config"."""
+
+ def __init__(self, config, **kw):
+ super(ConfigOptionsEnvironmentMixin, self).__init__(
+ config=config, **kw
+ )
+
+ self.announce_show_shortlog = config.get_bool(
+ 'announceshortlog', default=self.announce_show_shortlog
+ )
+
+ self.refchange_showlog = config.get_bool(
+ 'refchangeshowlog', default=self.refchange_showlog
+ )
+
+ maxcommitemails = config.get('maxcommitemails')
+ if maxcommitemails is not None:
+ try:
+ self.maxcommitemails = int(maxcommitemails)
+ except ValueError:
+ sys.stderr.write(
+ '*** Malformed value for multimailhook.maxCommitEmails: %s\n' % maxcommitemails
+ + '*** Expected a number. Ignoring.\n'
+ )
+
+ diffopts = config.get('diffopts')
+ if diffopts is not None:
+ self.diffopts = shlex.split(diffopts)
+
+ logopts = config.get('logopts')
+ if logopts is not None:
+ self.logopts = shlex.split(logopts)
+
+ commitlogopts = config.get('commitlogopts')
+ if commitlogopts is not None:
+ self.commitlogopts = shlex.split(commitlogopts)
+
+ reply_to = config.get('replyTo')
+ self.__reply_to_refchange = config.get('replyToRefchange', default=reply_to)
+ if (
+ self.__reply_to_refchange is not None
+ and self.__reply_to_refchange.lower() == 'author'
+ ):
+ raise ConfigurationException(
+ '"author" is not an allowed setting for replyToRefchange'
+ )
+ self.__reply_to_commit = config.get('replyToCommit', default=reply_to)
+
+ def get_administrator(self):
+ return (
+ self.config.get('administrator')
+ or self.get_sender()
+ or super(ConfigOptionsEnvironmentMixin, self).get_administrator()
+ )
+
+ def get_repo_shortname(self):
+ return (
+ self.config.get('reponame')
+ or super(ConfigOptionsEnvironmentMixin, self).get_repo_shortname()
+ )
+
+ def get_emailprefix(self):
+ emailprefix = self.config.get('emailprefix')
+ if emailprefix and emailprefix.strip():
+ return emailprefix.strip() + ' '
+ else:
+ return '[%s] ' % (self.get_repo_shortname(),)
+
+ def get_sender(self):
+ return self.config.get('envelopesender')
+
+ def get_fromaddr(self):
+ fromaddr = self.config.get('from')
+ if fromaddr:
+ return fromaddr
+ else:
+ config = Config('user')
+ fromname = config.get('name', default='')
+ fromemail = config.get('email', default='')
+ if fromemail:
+ return formataddr([fromname, fromemail])
+ else:
+ return self.get_sender()
+
+ def get_reply_to_refchange(self, refchange):
+ if self.__reply_to_refchange is None:
+ return super(ConfigOptionsEnvironmentMixin, self).get_reply_to_refchange(refchange)
+ elif self.__reply_to_refchange.lower() == 'pusher':
+ return self.get_pusher_email()
+ elif self.__reply_to_refchange.lower() == 'none':
+ return None
+ else:
+ return self.__reply_to_refchange
+
+ def get_reply_to_commit(self, revision):
+ if self.__reply_to_commit is None:
+ return super(ConfigOptionsEnvironmentMixin, self).get_reply_to_commit(revision)
+ elif self.__reply_to_commit.lower() == 'author':
+ return revision.get_author()
+ elif self.__reply_to_commit.lower() == 'pusher':
+ return self.get_pusher_email()
+ elif self.__reply_to_commit.lower() == 'none':
+ return None
+ else:
+ return self.__reply_to_commit
+
+
+class FilterLinesEnvironmentMixin(Environment):
+ """Handle encoding and maximum line length of body lines.
+
+ emailmaxlinelength (int or None)
+
+ The maximum length of any single line in the email body.
+ Longer lines are truncated at that length with ' [...]'
+ appended.
+
+ strict_utf8 (bool)
+
+ If this field is set to True, then the email body text is
+ expected to be UTF-8. Any invalid characters are
+ converted to U+FFFD, the Unicode replacement character
+ (encoded as UTF-8, of course).
+
+ """
+
+ def __init__(self, strict_utf8=True, emailmaxlinelength=500, **kw):
+ super(FilterLinesEnvironmentMixin, self).__init__(**kw)
+ self.__strict_utf8 = strict_utf8
+ self.__emailmaxlinelength = emailmaxlinelength
+
+ def filter_body(self, lines):
+ lines = super(FilterLinesEnvironmentMixin, self).filter_body(lines)
+ if self.__strict_utf8:
+ lines = (line.decode(ENCODING, 'replace') for line in lines)
+ # Limit the line length in Unicode-space to avoid
+ # splitting characters:
+ if self.__emailmaxlinelength:
+ lines = limit_linelength(lines, self.__emailmaxlinelength)
+ lines = (line.encode(ENCODING, 'replace') for line in lines)
+ elif self.__emailmaxlinelength:
+ lines = limit_linelength(lines, self.__emailmaxlinelength)
+
+ return lines
+
+
+class ConfigFilterLinesEnvironmentMixin(
+ ConfigEnvironmentMixin,
+ FilterLinesEnvironmentMixin,
+ ):
+ """Handle encoding and maximum line length based on config."""
+
+ def __init__(self, config, **kw):
+ strict_utf8 = config.get_bool('emailstrictutf8', default=None)
+ if strict_utf8 is not None:
+ kw['strict_utf8'] = strict_utf8
+
+ emailmaxlinelength = config.get('emailmaxlinelength')
+ if emailmaxlinelength is not None:
+ kw['emailmaxlinelength'] = int(emailmaxlinelength)
+
+ super(ConfigFilterLinesEnvironmentMixin, self).__init__(
+ config=config, **kw
+ )
+
+
+class MaxlinesEnvironmentMixin(Environment):
+ """Limit the email body to a specified number of lines."""
+
+ def __init__(self, emailmaxlines, **kw):
+ super(MaxlinesEnvironmentMixin, self).__init__(**kw)
+ self.__emailmaxlines = emailmaxlines
+
+ def filter_body(self, lines):
+ lines = super(MaxlinesEnvironmentMixin, self).filter_body(lines)
+ if self.__emailmaxlines:
+ lines = limit_lines(lines, self.__emailmaxlines)
+ return lines
+
+
+class ConfigMaxlinesEnvironmentMixin(
+ ConfigEnvironmentMixin,
+ MaxlinesEnvironmentMixin,
+ ):
+ """Limit the email body to the number of lines specified in config."""
+
+ def __init__(self, config, **kw):
+ emailmaxlines = int(config.get('emailmaxlines', default='0'))
+ super(ConfigMaxlinesEnvironmentMixin, self).__init__(
+ config=config,
+ emailmaxlines=emailmaxlines,
+ **kw
+ )
+
+
+class FQDNEnvironmentMixin(Environment):
+ """A mixin that sets the host's FQDN to its constructor argument."""
+
+ def __init__(self, fqdn, **kw):
+ super(FQDNEnvironmentMixin, self).__init__(**kw)
+ self.COMPUTED_KEYS += ['fqdn']
+ self.__fqdn = fqdn
+
+ def get_fqdn(self):
+ """Return the fully-qualified domain name for this host.
+
+ Return None if it is unavailable or unwanted."""
+
+ return self.__fqdn
+
+
+class ConfigFQDNEnvironmentMixin(
+ ConfigEnvironmentMixin,
+ FQDNEnvironmentMixin,
+ ):
+ """Read the FQDN from the config."""
+
+ def __init__(self, config, **kw):
+ fqdn = config.get('fqdn')
+ super(ConfigFQDNEnvironmentMixin, self).__init__(
+ config=config,
+ fqdn=fqdn,
+ **kw
+ )
+
+
+class ComputeFQDNEnvironmentMixin(FQDNEnvironmentMixin):
+ """Get the FQDN by calling socket.getfqdn()."""
+
+ def __init__(self, **kw):
+ super(ComputeFQDNEnvironmentMixin, self).__init__(
+ fqdn=socket.getfqdn(),
+ **kw
+ )
+
+
+class PusherDomainEnvironmentMixin(ConfigEnvironmentMixin):
+ """Deduce pusher_email from pusher by appending an emaildomain."""
+
+ def __init__(self, **kw):
+ super(PusherDomainEnvironmentMixin, self).__init__(**kw)
+ self.__emaildomain = self.config.get('emaildomain')
+
+ def get_pusher_email(self):
+ if self.__emaildomain:
+ # Derive the pusher's full email address in the default way:
+ return '%s@%s' % (self.get_pusher(), self.__emaildomain)
+ else:
+ return super(PusherDomainEnvironmentMixin, self).get_pusher_email()
+
+
+class StaticRecipientsEnvironmentMixin(Environment):
+ """Set recipients statically based on constructor parameters."""
+
+ def __init__(
+ self,
+ refchange_recipients, announce_recipients, revision_recipients,
+ **kw
+ ):
+ super(StaticRecipientsEnvironmentMixin, self).__init__(**kw)
+
+ # The recipients for various types of notification emails, as
+ # RFC 2822 email addresses separated by commas (or the empty
+ # string if no recipients are configured). Although there is
+ # a mechanism to choose the recipient lists based on on the
+ # actual *contents* of the change being reported, we only
+ # choose based on the *type* of the change. Therefore we can
+ # compute them once and for all:
+ if not (refchange_recipients
+ or announce_recipients
+ or revision_recipients):
+ raise ConfigurationException('No email recipients configured!')
+ self.__refchange_recipients = refchange_recipients
+ self.__announce_recipients = announce_recipients
+ self.__revision_recipients = revision_recipients
+
+ def get_refchange_recipients(self, refchange):
+ return self.__refchange_recipients
+
+ def get_announce_recipients(self, annotated_tag_change):
+ return self.__announce_recipients
+
+ def get_revision_recipients(self, revision):
+ return self.__revision_recipients
+
+
+class ConfigRecipientsEnvironmentMixin(
+ ConfigEnvironmentMixin,
+ StaticRecipientsEnvironmentMixin
+ ):
+ """Determine recipients statically based on config."""
+
+ def __init__(self, config, **kw):
+ super(ConfigRecipientsEnvironmentMixin, self).__init__(
+ config=config,
+ refchange_recipients=self._get_recipients(
+ config, 'refchangelist', 'mailinglist',
+ ),
+ announce_recipients=self._get_recipients(
+ config, 'announcelist', 'refchangelist', 'mailinglist',
+ ),
+ revision_recipients=self._get_recipients(
+ config, 'commitlist', 'mailinglist',
+ ),
+ **kw
+ )
+
+ def _get_recipients(self, config, *names):
+ """Return the recipients for a particular type of message.
+
+ Return the list of email addresses to which a particular type
+ of notification email should be sent, by looking at the config
+ value for "multimailhook.$name" for each of names. Use the
+ value from the first name that is configured. The return
+ value is a (possibly empty) string containing RFC 2822 email
+ addresses separated by commas. If no configuration could be
+ found, raise a ConfigurationException."""
+
+ for name in names:
+ retval = config.get_recipients(name)
+ if retval is not None:
+ return retval
+ else:
+ return ''
+
+
+class ProjectdescEnvironmentMixin(Environment):
+ """Make a "projectdesc" value available for templates.
+
+ By default, it is set to the first line of $GIT_DIR/description
+ (if that file is present and appears to be set meaningfully)."""
+
+ def __init__(self, **kw):
+ super(ProjectdescEnvironmentMixin, self).__init__(**kw)
+ self.COMPUTED_KEYS += ['projectdesc']
+
+ def get_projectdesc(self):
+ """Return a one-line descripition of the project."""
+
+ git_dir = get_git_dir()
+ try:
+ projectdesc = open(os.path.join(git_dir, 'description')).readline().strip()
+ if projectdesc and not projectdesc.startswith('Unnamed repository'):
+ return projectdesc
+ except IOError:
+ pass
+
+ return 'UNNAMED PROJECT'
+
+
+class GenericEnvironmentMixin(Environment):
+ def get_pusher(self):
+ return self.osenv.get('USER', 'unknown user')
+
+
+class GenericEnvironment(
+ ProjectdescEnvironmentMixin,
+ ConfigMaxlinesEnvironmentMixin,
+ ComputeFQDNEnvironmentMixin,
+ ConfigFilterLinesEnvironmentMixin,
+ ConfigRecipientsEnvironmentMixin,
+ PusherDomainEnvironmentMixin,
+ ConfigOptionsEnvironmentMixin,
+ GenericEnvironmentMixin,
+ Environment,
+ ):
+ pass
+
+
+class GitoliteEnvironmentMixin(Environment):
+ def get_repo_shortname(self):
+ # The gitolite environment variable $GL_REPO is a pretty good
+ # repo_shortname (though it's probably not as good as a value
+ # the user might have explicitly put in his config).
+ return (
+ self.osenv.get('GL_REPO', None)
+ or super(GitoliteEnvironmentMixin, self).get_repo_shortname()
+ )
+
+ def get_pusher(self):
+ return self.osenv.get('GL_USER', 'unknown user')
+
+
+class IncrementalDateTime(object):
+ """Simple wrapper to give incremental date/times.
+
+ Each call will result in a date/time a second later than the
+ previous call. This can be used to falsify email headers, to
+ increase the likelihood that email clients sort the emails
+ correctly."""
+
+ def __init__(self):
+ self.time = time.time()
+
+ def next(self):
+ formatted = formatdate(self.time, True)
+ self.time += 1
+ return formatted
+
+
+class GitoliteEnvironment(
+ ProjectdescEnvironmentMixin,
+ ConfigMaxlinesEnvironmentMixin,
+ ComputeFQDNEnvironmentMixin,
+ ConfigFilterLinesEnvironmentMixin,
+ ConfigRecipientsEnvironmentMixin,
+ PusherDomainEnvironmentMixin,
+ ConfigOptionsEnvironmentMixin,
+ GitoliteEnvironmentMixin,
+ Environment,
+ ):
+ pass
+
+
+class Push(object):
+ """Represent an entire push (i.e., a group of ReferenceChanges).
+
+ It is easy to figure out what commits were added to a *branch* by
+ a Reference change:
+
+ git rev-list change.old..change.new
+
+ or removed from a *branch*:
+
+ git rev-list change.new..change.old
+
+ But it is not quite so trivial to determine which entirely new
+ commits were added to the *repository* by a push and which old
+ commits were discarded by a push. A big part of the job of this
+ class is to figure out these things, and to make sure that new
+ commits are only detailed once even if they were added to multiple
+ references.
+
+ The first step is to determine the "other" references--those
+ unaffected by the current push. They are computed by
+ Push._compute_other_ref_sha1s() by listing all references then
+ removing any affected by this push.
+
+ The commits contained in the repository before this push were
+
+ git rev-list other1 other2 other3 ... change1.old change2.old ...
+
+ Where "changeN.old" is the old value of one of the references
+ affected by this push.
+
+ The commits contained in the repository after this push are
+
+ git rev-list other1 other2 other3 ... change1.new change2.new ...
+
+ The commits added by this push are the difference between these
+ two sets, which can be written
+
+ git rev-list \
+ ^other1 ^other2 ... \
+ ^change1.old ^change2.old ... \
+ change1.new change2.new ...
+
+ The commits removed by this push can be computed by
+
+ git rev-list \
+ ^other1 ^other2 ... \
+ ^change1.new ^change2.new ... \
+ change1.old change2.old ...
+
+ The last point is that it is possible that other pushes are
+ occurring simultaneously to this one, so reference values can
+ change at any time. It is impossible to eliminate all race
+ conditions, but we reduce the window of time during which problems
+ can occur by translating reference names to SHA1s as soon as
+ possible and working with SHA1s thereafter (because SHA1s are
+ immutable)."""
+
+ # A map {(changeclass, changetype) : integer} specifying the order
+ # that reference changes will be processed if multiple reference
+ # changes are included in a single push. The order is significant
+ # mostly because new commit notifications are threaded together
+ # with the first reference change that includes the commit. The
+ # following order thus causes commits to be grouped with branch
+ # changes (as opposed to tag changes) if possible.
+ SORT_ORDER = dict(
+ (value, i) for (i, value) in enumerate([
+ (BranchChange, 'update'),
+ (BranchChange, 'create'),
+ (AnnotatedTagChange, 'update'),
+ (AnnotatedTagChange, 'create'),
+ (NonAnnotatedTagChange, 'update'),
+ (NonAnnotatedTagChange, 'create'),
+ (BranchChange, 'delete'),
+ (AnnotatedTagChange, 'delete'),
+ (NonAnnotatedTagChange, 'delete'),
+ (OtherReferenceChange, 'update'),
+ (OtherReferenceChange, 'create'),
+ (OtherReferenceChange, 'delete'),
+ ])
+ )
+
+ def __init__(self, changes):
+ self.changes = sorted(changes, key=self._sort_key)
+
+ # The SHA-1s of commits referred to by references unaffected
+ # by this push:
+ other_ref_sha1s = self._compute_other_ref_sha1s()
+
+ self._old_rev_exclusion_spec = self._compute_rev_exclusion_spec(
+ other_ref_sha1s.union(
+ change.old.sha1
+ for change in self.changes
+ if change.old.type in ['commit', 'tag']
+ )
+ )
+ self._new_rev_exclusion_spec = self._compute_rev_exclusion_spec(
+ other_ref_sha1s.union(
+ change.new.sha1
+ for change in self.changes
+ if change.new.type in ['commit', 'tag']
+ )
+ )
+
+ @classmethod
+ def _sort_key(klass, change):
+ return (klass.SORT_ORDER[change.__class__, change.change_type], change.refname,)
+
+ def _compute_other_ref_sha1s(self):
+ """Return the GitObjects referred to by references unaffected by this push."""
+
+ # The refnames being changed by this push:
+ updated_refs = set(
+ change.refname
+ for change in self.changes
+ )
+
+ # The SHA-1s of commits referred to by all references in this
+ # repository *except* updated_refs:
+ sha1s = set()
+ fmt = (
+ '%(objectname) %(objecttype) %(refname)\n'
+ '%(*objectname) %(*objecttype) %(refname)'
+ )
+ for line in read_git_lines(['for-each-ref', '--format=%s' % (fmt,)]):
+ (sha1, type, name) = line.split(' ', 2)
+ if sha1 and type == 'commit' and name not in updated_refs:
+ sha1s.add(sha1)
+
+ return sha1s
+
+ def _compute_rev_exclusion_spec(self, sha1s):
+ """Return an exclusion specification for 'git rev-list'.
+
+ git_objects is an iterable over GitObject instances. Return a
+ string that can be passed to the standard input of 'git
+ rev-list --stdin' to exclude all of the commits referred to by
+ git_objects."""
+
+ return ''.join(
+ ['^%s\n' % (sha1,) for sha1 in sorted(sha1s)]
+ )
+
+ def get_new_commits(self, reference_change=None):
+ """Return a list of commits added by this push.
+
+ Return a list of the object names of commits that were added
+ by the part of this push represented by reference_change. If
+ reference_change is None, then return a list of *all* commits
+ added by this push."""
+
+ if not reference_change:
+ new_revs = sorted(
+ change.new.sha1
+ for change in self.changes
+ if change.new
+ )
+ elif not reference_change.new.commit_sha1:
+ return []
+ else:
+ new_revs = [reference_change.new.commit_sha1]
+
+ cmd = ['rev-list', '--stdin'] + new_revs
+ return read_git_lines(cmd, input=self._old_rev_exclusion_spec)
+
+ def get_discarded_commits(self, reference_change):
+ """Return a list of commits discarded by this push.
+
+ Return a list of the object names of commits that were
+ entirely discarded from the repository by the part of this
+ push represented by reference_change."""
+
+ if not reference_change.old.commit_sha1:
+ return []
+ else:
+ old_revs = [reference_change.old.commit_sha1]
+
+ cmd = ['rev-list', '--stdin'] + old_revs
+ return read_git_lines(cmd, input=self._new_rev_exclusion_spec)
+
+ def send_emails(self, mailer, body_filter=None):
+ """Use send all of the notification emails needed for this push.
+
+ Use send all of the notification emails (including reference
+ change emails and commit emails) needed for this push. Send
+ the emails using mailer. If body_filter is not None, then use
+ it to filter the lines that are intended for the email
+ body."""
+
+ # The sha1s of commits that were introduced by this push.
+ # They will be removed from this set as they are processed, to
+ # guarantee that one (and only one) email is generated for
+ # each new commit.
+ unhandled_sha1s = set(self.get_new_commits())
+ send_date = IncrementalDateTime()
+ for change in self.changes:
+ # Check if we've got anyone to send to
+ if not change.recipients:
+ sys.stderr.write(
+ '*** no recipients configured so no email will be sent\n'
+ '*** for %r update %s->%s\n'
+ % (change.refname, change.old.sha1, change.new.sha1,)
+ )
+ else:
+ sys.stderr.write('Sending notification emails to: %s\n' % (change.recipients,))
+ extra_values = {'send_date' : send_date.next()}
+ mailer.send(
+ change.generate_email(self, body_filter, extra_values),
+ change.recipients,
+ )
+
+ sha1s = []
+ for sha1 in reversed(list(self.get_new_commits(change))):
+ if sha1 in unhandled_sha1s:
+ sha1s.append(sha1)
+ unhandled_sha1s.remove(sha1)
+
+ max_emails = change.environment.maxcommitemails
+ if max_emails and len(sha1s) > max_emails:
+ sys.stderr.write(
+ '*** Too many new commits (%d), not sending commit emails.\n' % len(sha1s)
+ + '*** Try setting multimailhook.maxCommitEmails to a greater value\n'
+ + '*** Currently, multimailhook.maxCommitEmails=%d\n' % max_emails
+ )
+ return
+
+ for (num, sha1) in enumerate(sha1s):
+ rev = Revision(change, GitObject(sha1), num=num+1, tot=len(sha1s))
+ if rev.recipients:
+ extra_values = {'send_date' : send_date.next()}
+ mailer.send(
+ rev.generate_email(self, body_filter, extra_values),
+ rev.recipients,
+ )
+
+ # Consistency check:
+ if unhandled_sha1s:
+ sys.stderr.write(
+ 'ERROR: No emails were sent for the following new commits:\n'
+ ' %s\n'
+ % ('\n '.join(sorted(unhandled_sha1s)),)
+ )
+
+
+def run_as_post_receive_hook(environment, mailer):
+ changes = []
+ for line in sys.stdin:
+ (oldrev, newrev, refname) = line.strip().split(' ', 2)
+ changes.append(
+ ReferenceChange.create(environment, oldrev, newrev, refname)
+ )
+ push = Push(changes)
+ push.send_emails(mailer, body_filter=environment.filter_body)
+
+
+def run_as_update_hook(environment, mailer, refname, oldrev, newrev):
+ changes = [
+ ReferenceChange.create(
+ environment,
+ read_git_output(['rev-parse', '--verify', oldrev]),
+ read_git_output(['rev-parse', '--verify', newrev]),
+ refname,
+ ),
+ ]
+ push = Push(changes)
+ push.send_emails(mailer, body_filter=environment.filter_body)
+
+
+def choose_mailer(config, environment):
+ mailer = config.get('mailer', default='sendmail')
+
+ if mailer == 'smtp':
+ smtpserver = config.get('smtpserver', default='localhost')
+ mailer = SMTPMailer(
+ envelopesender=(environment.get_sender() or environment.get_fromaddr()),
+ smtpserver=smtpserver,
+ )
+ elif mailer == 'sendmail':
+ command = config.get('sendmailcommand')
+ if command:
+ command = shlex.split(command)
+ mailer = SendMailer(command=command, envelopesender=environment.get_sender())
+ else:
+ sys.stderr.write(
+ 'fatal: multimailhook.mailer is set to an incorrect value: "%s"\n' % mailer
+ + 'please use one of "smtp" or "sendmail".\n'
+ )
+ sys.exit(1)
+ return mailer
+
+
+KNOWN_ENVIRONMENTS = {
+ 'generic' : GenericEnvironmentMixin,
+ 'gitolite' : GitoliteEnvironmentMixin,
+ }
+
+
+def choose_environment(config, osenv=None, env=None, recipients=None):
+ if not osenv:
+ osenv = os.environ
+
+ environment_mixins = [
+ ProjectdescEnvironmentMixin,
+ ConfigMaxlinesEnvironmentMixin,
+ ComputeFQDNEnvironmentMixin,
+ ConfigFilterLinesEnvironmentMixin,
+ PusherDomainEnvironmentMixin,
+ ConfigOptionsEnvironmentMixin,
+ ]
+ environment_kw = {
+ 'osenv' : osenv,
+ 'config' : config,
+ }
+
+ if not env:
+ env = config.get('environment')
+
+ if not env:
+ if 'GL_USER' in osenv and 'GL_REPO' in osenv:
+ env = 'gitolite'
+ else:
+ env = 'generic'
+
+ environment_mixins.append(KNOWN_ENVIRONMENTS[env])
+
+ if recipients:
+ environment_mixins.insert(0, StaticRecipientsEnvironmentMixin)
+ environment_kw['refchange_recipients'] = recipients
+ environment_kw['announce_recipients'] = recipients
+ environment_kw['revision_recipients'] = recipients
+ else:
+ environment_mixins.insert(0, ConfigRecipientsEnvironmentMixin)
+
+ environment_klass = type(
+ 'EffectiveEnvironment',
+ tuple(environment_mixins) + (Environment,),
+ {},
+ )
+ return environment_klass(**environment_kw)
+
+
+def main(args):
+ parser = optparse.OptionParser(
+ description=__doc__,
+ usage='%prog [OPTIONS]\n or: %prog [OPTIONS] REFNAME OLDREV NEWREV',
+ )
+
+ parser.add_option(
+ '--environment', '--env', action='store', type='choice',
+ choices=['generic', 'gitolite'], default=None,
+ help=(
+ 'Choose type of environment is in use. Default is taken from '
+ 'multimailhook.environment if set; otherwise "generic".'
+ ),
+ )
+ parser.add_option(
+ '--stdout', action='store_true', default=False,
+ help='Output emails to stdout rather than sending them.',
+ )
+ parser.add_option(
+ '--recipients', action='store', default=None,
+ help='Set list of email recipients for all types of emails.',
+ )
+ parser.add_option(
+ '--show-env', action='store_true', default=False,
+ help=(
+ 'Write to stderr the values determined for the environment '
+ '(intended for debugging purposes).'
+ ),
+ )
+
+ (options, args) = parser.parse_args(args)
+
+ config = Config('multimailhook')
+
+ try:
+ environment = choose_environment(
+ config, osenv=os.environ,
+ env=options.environment,
+ recipients=options.recipients,
+ )
+
+ if options.show_env:
+ sys.stderr.write('Environment values:\n')
+ for (k,v) in sorted(environment.get_values().items()):
+ sys.stderr.write(' %s : %r\n' % (k,v))
+ sys.stderr.write('\n')
+
+ if options.stdout:
+ mailer = OutputMailer(sys.stdout)
+ else:
+ mailer = choose_mailer(config, environment)
+
+ # Dual mode: if arguments were specified on the command line, run
+ # like an update hook; otherwise, run as a post-receive hook.
+ if args:
+ if len(args) != 3:
+ parser.error('Need zero or three non-option arguments')
+ (refname, oldrev, newrev) = args
+ run_as_update_hook(environment, mailer, refname, oldrev, newrev)
+ else:
+ run_as_post_receive_hook(environment, mailer)
+ except ConfigurationException, e:
+ sys.exit(str(e))
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/contrib/hooks/multimail/migrate-mailhook-config b/contrib/hooks/multimail/migrate-mailhook-config
new file mode 100755
index 0000000000..04eeaac413
--- /dev/null
+++ b/contrib/hooks/multimail/migrate-mailhook-config
@@ -0,0 +1,269 @@
+#! /usr/bin/env python2
+
+"""Migrate a post-receive-email configuration to be usable with git_multimail.py.
+
+See README.migrate-from-post-receive-email for more information.
+
+"""
+
+import sys
+import optparse
+
+from git_multimail import CommandError
+from git_multimail import Config
+from git_multimail import read_output
+
+
+OLD_NAMES = [
+ 'mailinglist',
+ 'announcelist',
+ 'envelopesender',
+ 'emailprefix',
+ 'showrev',
+ 'emailmaxlines',
+ 'diffopts',
+ ]
+
+NEW_NAMES = [
+ 'environment',
+ 'reponame',
+ 'mailinglist',
+ 'refchangelist',
+ 'commitlist',
+ 'announcelist',
+ 'announceshortlog',
+ 'envelopesender',
+ 'administrator',
+ 'emailprefix',
+ 'emailmaxlines',
+ 'diffopts',
+ 'emaildomain',
+ ]
+
+
+INFO = """\
+
+SUCCESS!
+
+Your post-receive-email configuration has been converted to
+git-multimail format. Please see README and
+README.migrate-from-post-receive-email to learn about other
+git-multimail configuration possibilities.
+
+For example, git-multimail has the following new options with no
+equivalent in post-receive-email. You might want to read about them
+to see if they would be useful in your situation:
+
+"""
+
+
+def _check_old_config_exists(old):
+ """Check that at least one old configuration value is set."""
+
+ for name in OLD_NAMES:
+ if old.has_key(name):
+ return True
+
+ return False
+
+
+def _check_new_config_clear(new):
+ """Check that none of the new configuration names are set."""
+
+ retval = True
+ for name in NEW_NAMES:
+ if new.has_key(name):
+ if retval:
+ sys.stderr.write('INFO: The following configuration values already exist:\n\n')
+ sys.stderr.write(' "%s.%s"\n' % (new.section, name))
+ retval = False
+
+ return retval
+
+
+def erase_values(config, names):
+ for name in names:
+ if config.has_key(name):
+ try:
+ sys.stderr.write('...unsetting "%s.%s"\n' % (config.section, name))
+ config.unset_all(name)
+ except CommandError:
+ sys.stderr.write(
+ '\nWARNING: could not unset "%s.%s". '
+ 'Perhaps it is not set at the --local level?\n\n'
+ % (config.section, name)
+ )
+
+
+def is_section_empty(section, local):
+ """Return True iff the specified configuration section is empty.
+
+ Iff local is True, use the --local option when invoking 'git
+ config'."""
+
+ if local:
+ local_option = ['--local']
+ else:
+ local_option = []
+
+ try:
+ read_output(
+ ['git', 'config']
+ + local_option
+ + ['--get-regexp', '^%s\.' % (section,)]
+ )
+ except CommandError, e:
+ if e.retcode == 1:
+ # This means that no settings were found.
+ return True
+ else:
+ raise
+ else:
+ return False
+
+
+def remove_section_if_empty(section):
+ """If the specified configuration section is empty, delete it."""
+
+ try:
+ empty = is_section_empty(section, local=True)
+ except CommandError:
+ # Older versions of git do not support the --local option, so
+ # if the first attempt fails, try without --local.
+ try:
+ empty = is_section_empty(section, local=False)
+ except CommandError:
+ sys.stderr.write(
+ '\nINFO: If configuration section "%s.*" is empty, you might want '
+ 'to delete it.\n\n'
+ % (section,)
+ )
+ return
+
+ if empty:
+ sys.stderr.write('...removing section "%s.*"\n' % (section,))
+ read_output(['git', 'config', '--remove-section', section])
+ else:
+ sys.stderr.write(
+ '\nINFO: Configuration section "%s.*" still has contents. '
+ 'It will not be deleted.\n\n'
+ % (section,)
+ )
+
+
+def migrate_config(strict=False, retain=False, overwrite=False):
+ old = Config('hooks')
+ new = Config('multimailhook')
+ if not _check_old_config_exists(old):
+ sys.exit(
+ 'Your repository has no post-receive-email configuration. '
+ 'Nothing to do.'
+ )
+ if not _check_new_config_clear(new):
+ if overwrite:
+ sys.stderr.write('\nWARNING: Erasing the above values...\n\n')
+ erase_values(new, NEW_NAMES)
+ else:
+ sys.exit(
+ '\nERROR: Refusing to overwrite existing values. Use the --overwrite\n'
+ 'option to continue anyway.'
+ )
+
+ name = 'showrev'
+ if old.has_key(name):
+ msg = 'git-multimail does not support "%s.%s"' % (old.section, name,)
+ if strict:
+ sys.exit(
+ 'ERROR: %s.\n'
+ 'Please unset that value then try again, or run without --strict.'
+ % (msg,)
+ )
+ else:
+ sys.stderr.write('\nWARNING: %s (ignoring).\n\n' % (msg,))
+
+ for name in ['mailinglist', 'announcelist']:
+ if old.has_key(name):
+ sys.stderr.write(
+ '...copying "%s.%s" to "%s.%s"\n' % (old.section, name, new.section, name)
+ )
+ new.set_recipients(name, old.get_recipients(name))
+
+ if strict:
+ sys.stderr.write(
+ '...setting "%s.commitlist" to the empty string\n' % (new.section,)
+ )
+ new.set_recipients('commitlist', '')
+ sys.stderr.write(
+ '...setting "%s.announceshortlog" to "true"\n' % (new.section,)
+ )
+ new.set('announceshortlog', 'true')
+
+ for name in ['envelopesender', 'emailmaxlines', 'diffopts']:
+ if old.has_key(name):
+ sys.stderr.write(
+ '...copying "%s.%s" to "%s.%s"\n' % (old.section, name, new.section, name)
+ )
+ new.set(name, old.get(name))
+
+ name = 'emailprefix'
+ if old.has_key(name):
+ sys.stderr.write(
+ '...copying "%s.%s" to "%s.%s"\n' % (old.section, name, new.section, name)
+ )
+ new.set(name, old.get(name))
+ elif strict:
+ sys.stderr.write(
+ '...setting "%s.%s" to "[SCM]" to preserve old subject lines\n'
+ % (new.section, name)
+ )
+ new.set(name, '[SCM]')
+
+ if not retain:
+ erase_values(old, OLD_NAMES)
+ remove_section_if_empty(old.section)
+
+ sys.stderr.write(INFO)
+ for name in NEW_NAMES:
+ if name not in OLD_NAMES:
+ sys.stderr.write(' "%s.%s"\n' % (new.section, name,))
+ sys.stderr.write('\n')
+
+
+def main(args):
+ parser = optparse.OptionParser(
+ description=__doc__,
+ usage='%prog [OPTIONS]',
+ )
+
+ parser.add_option(
+ '--strict', action='store_true', default=False,
+ help=(
+ 'Slavishly configure git-multimail as closely as possible to '
+ 'the post-receive-email configuration. Default is to turn '
+ 'on some new features that have no equivalent in post-receive-email.'
+ ),
+ )
+ parser.add_option(
+ '--retain', action='store_true', default=False,
+ help=(
+ 'Retain the post-receive-email configuration values. '
+ 'Default is to delete them after the new values are set.'
+ ),
+ )
+ parser.add_option(
+ '--overwrite', action='store_true', default=False,
+ help=(
+ 'Overwrite any existing git-multimail configuration settings. '
+ 'Default is to abort if such settings already exist.'
+ ),
+ )
+
+ (options, args) = parser.parse_args(args)
+
+ if args:
+ parser.error('Unexpected arguments: %s' % (' '.join(args),))
+
+ migrate_config(strict=options.strict, retain=options.retain, overwrite=options.overwrite)
+
+
+main(sys.argv[1:])
diff --git a/contrib/hooks/multimail/post-receive b/contrib/hooks/multimail/post-receive
new file mode 100755
index 0000000000..4d46828ba5
--- /dev/null
+++ b/contrib/hooks/multimail/post-receive
@@ -0,0 +1,90 @@
+#! /usr/bin/env python2
+
+"""Example post-receive hook based on git-multimail.
+
+This script is a simple example of a post-receive hook implemented
+using git_multimail.py as a Python module. It is intended to be
+customized before use; see the comments in the script to help you get
+started.
+
+It is possible to use git_multimail.py itself as a post-receive or
+update hook, configured via git config settings and/or command-line
+parameters. But for more flexibility, it can also be imported as a
+Python module by a custom post-receive script as done here. The
+latter has the following advantages:
+
+* The tool's behavior can be customized using arbitrary Python code,
+ without having to edit git_multimail.py.
+
+* Configuration settings can be read from other sources; for example,
+ user names and email addresses could be read from LDAP or from a
+ database. Or the settings can even be hardcoded in the importing
+ Python script, if this is preferred.
+
+This script is a very basic example of how to use git_multimail.py as
+a module. The comments below explain some of the points at which the
+script's behavior could be changed or customized.
+
+"""
+
+import sys
+import os
+
+# If necessary, add the path to the directory containing
+# git_multimail.py to the Python path as follows. (This is not
+# necessary if git_multimail.py is in the same directory as this
+# script):
+
+#LIBDIR = 'path/to/directory/containing/module'
+#sys.path.insert(0, LIBDIR)
+
+import git_multimail
+
+
+# It is possible to modify the output templates here; e.g.:
+
+#git_multimail.FOOTER_TEMPLATE = """\
+#
+#-- \n\
+#This email was generated by the wonderful git-multimail tool.
+#"""
+
+
+# Specify which "git config" section contains the configuration for
+# git-multimail:
+config = git_multimail.Config('multimailhook')
+
+
+# Select the type of environment:
+environment = git_multimail.GenericEnvironment(config=config)
+#environment = git_multimail.GitoliteEnvironment(config=config)
+
+
+# Choose the method of sending emails based on the git config:
+mailer = git_multimail.choose_mailer(config, environment)
+
+# Alternatively, you may hardcode the mailer using code like one of
+# the following:
+
+# Use "/usr/sbin/sendmail -oi -t" to send emails. The envelopesender
+# argument is optional:
+#mailer = git_multimail.SendMailer(
+# command=['/usr/sbin/sendmail', '-oi', '-t'],
+# envelopesender='git-repo@example.com',
+# )
+
+# Use Python's smtplib to send emails. Both arguments are required.
+#mailer = git_multimail.SMTPMailer(
+# envelopesender='git-repo@example.com',
+# # The smtpserver argument can also include a port number; e.g.,
+# # smtpserver='mail.example.com:25'
+# smtpserver='mail.example.com',
+# )
+
+# OutputMailer is intended only for testing; it writes the emails to
+# the specified file stream.
+#mailer = git_multimail.OutputMailer(sys.stdout)
+
+
+# Read changes from stdin and send notification emails:
+git_multimail.run_as_post_receive_hook(environment, mailer)
diff --git a/contrib/hooks/post-receive-email b/contrib/hooks/post-receive-email
index 2a66063e44..8747b84334 100755
--- a/contrib/hooks/post-receive-email
+++ b/contrib/hooks/post-receive-email
@@ -2,20 +2,28 @@
#
# Copyright (c) 2007 Andy Parkins
#
-# An example hook script to mail out commit update information. This hook
-# sends emails listing new revisions to the repository introduced by the
-# change being reported. The rule is that (for branch updates) each commit
-# will appear on one email and one email only.
+# An example hook script to mail out commit update information.
+#
+# NOTE: This script is no longer under active development. There
+# is another script, git-multimail, which is more capable and
+# configurable and is largely backwards-compatible with this script;
+# please see "contrib/hooks/multimail/". For instructions on how to
+# migrate from post-receive-email to git-multimail, please see
+# "README.migrate-from-post-receive-email" in that directory.
+#
+# This hook sends emails listing new revisions to the repository
+# introduced by the change being reported. The rule is that (for
+# branch updates) each commit will appear on one email and one email
+# only.
#
# This hook is stored in the contrib/hooks directory. Your distribution
# will have put this somewhere standard. You should make this script
# executable then link to it in the repository you would like to use it in.
# For example, on debian the hook is stored in
-# /usr/share/doc/git-core/contrib/hooks/post-receive-email:
+# /usr/share/git-core/contrib/hooks/post-receive-email:
#
-# chmod a+x post-receive-email
# cd /path/to/your/repository.git
-# ln -sf /usr/share/doc/git-core/contrib/hooks/post-receive-email hooks/post-receive
+# ln -sf /usr/share/git-core/contrib/hooks/post-receive-email hooks/post-receive
#
# This hook script assumes it is enabled on the central repository of a
# project, with all users pushing only to it and not between each other. It
@@ -23,6 +31,13 @@
# possible for the email to be from someone other than the person doing the
# push.
#
+# To help with debugging and use on pre-v1.5.1 git servers, this script will
+# also obey the interface of hooks/update, taking its arguments on the
+# command line. Unfortunately, hooks/update is called once for each ref.
+# To avoid firing one email per ref, this script just prints its output to
+# the screen when used in this mode. The output can then be redirected if
+# wanted.
+#
# Config
# ------
# hooks.mailinglist
@@ -48,6 +63,16 @@
# "t=%s; printf 'http://.../?id=%%s' \$t; echo;echo; git show -C \$t; echo"
# Be careful if "..." contains things that will be expanded by shell "eval"
# or printf.
+# hooks.emailmaxlines
+# The maximum number of lines that should be included in the generated
+# email body. If not specified, there is no limit.
+# Lines beyond the limit are suppressed and counted, and a final
+# line is added indicating the number of suppressed lines.
+# hooks.diffopts
+# Alternate options for the git diff-tree invocation that shows changes.
+# Default is "--stat --summary --find-copies-harder". Add -p to those
+# options to include a unified diff of changes in addition to the usual
+# summary output.
#
# Notes
# -----
@@ -59,19 +84,10 @@
# ---------------------------- Functions
#
-# Top level email generation function. This decides what type of update
-# this is and calls the appropriate body-generation routine after outputting
-# the common header
-#
-# Note this function doesn't actually generate any email output, that is
-# taken care of by the functions it calls:
-# - generate_email_header
-# - generate_create_XXXX_email
-# - generate_update_XXXX_email
-# - generate_delete_XXXX_email
-# - generate_email_footer
+# Function to prepare for email generation. This decides what type
+# of update this is and whether an email should even be generated.
#
-generate_email()
+prep_for_email()
{
# --- Arguments
oldrev=$(git rev-parse $1)
@@ -140,13 +156,13 @@ generate_email()
short_refname=${refname##refs/remotes/}
echo >&2 "*** Push-update of tracking branch, $refname"
echo >&2 "*** - no email generated."
- exit 0
+ return 1
;;
*)
# Anything else (is there anything else?)
echo >&2 "*** Unknown type of update to $refname ($rev_type)"
echo >&2 "*** - no email generated"
- exit 1
+ return 1
;;
esac
@@ -162,9 +178,32 @@ generate_email()
esac
echo >&2 "*** $config_name is not set so no email will be sent"
echo >&2 "*** for $refname update $oldrev->$newrev"
- exit 0
+ return 1
fi
+ return 0
+}
+
+#
+# Top level email generation function. This calls the appropriate
+# body-generation routine after outputting the common header.
+#
+# Note this function doesn't actually generate any email output, that is
+# taken care of by the functions it calls:
+# - generate_email_header
+# - generate_create_XXXX_email
+# - generate_update_XXXX_email
+# - generate_delete_XXXX_email
+# - generate_email_footer
+#
+# Note also that this function cannot 'exit' from the script; when this
+# function is running (in hook script mode), the send_mail() function
+# is already executing in another process, connected via a pipe, and
+# if this function exits without, whatever has been generated to that
+# point will be sent as an email... even if nothing has been generated.
+#
+generate_email()
+{
# Email parameters
# The email subject will contain the best description of the ref
# that we can build from the parameters
@@ -185,7 +224,12 @@ generate_email()
fn_name=atag
;;
esac
- generate_${change_type}_${fn_name}_email
+
+ if [ -z "$maxlines" ]; then
+ generate_${change_type}_${fn_name}_email
+ else
+ generate_${change_type}_${fn_name}_email | limit_lines $maxlines
+ fi
generate_email_footer
}
@@ -196,11 +240,15 @@ generate_email_header()
# Generate header
cat <<-EOF
To: $recipients
- Subject: ${emailprefix}$projectdesc $refname_type, $short_refname, ${change_type}d. $describe
+ Subject: ${emailprefix}$projectdesc $refname_type $short_refname ${change_type}d. $describe
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset=utf-8
+ Content-Transfer-Encoding: 8bit
X-Git-Refname: $refname
X-Git-Reftype: $refname_type
X-Git-Oldrev: $oldrev
X-Git-Newrev: $newrev
+ Auto-Submitted: auto-generated
This is an automated email from the git hooks/post-receive script. It was
generated because a ref change was pushed to the repository containing
@@ -315,8 +363,8 @@ generate_update_branch_email()
# "remotes/" will be ignored as well.
# List all of the revisions that were removed by this update, in a
- # fast forward update, this list will be empty, because rev-list O
- # ^N is empty. For a non fast forward, O ^N is the list of removed
+ # fast-forward update, this list will be empty, because rev-list O
+ # ^N is empty. For a non-fast-forward, O ^N is the list of removed
# revisions
fast_forward=""
rev=""
@@ -367,7 +415,7 @@ generate_update_branch_email()
echo " \\"
echo " O -- O -- O ($oldrev)"
echo ""
- echo "The removed revisions are not necessarilly gone - if another reference"
+ echo "The removed revisions are not necessarily gone - if another reference"
echo "still refers to them they will stay in the repository."
rewind_only=1
else
@@ -411,10 +459,10 @@ generate_update_branch_email()
# revision because the base is effectively a random revision at this
# point - the user will be interested in what this revision changed
# - including the undoing of previous revisions in the case of
- # non-fast forward updates.
+ # non-fast-forward updates.
echo ""
echo "Summary of changes:"
- git diff-tree --stat --summary --find-copies-harder $oldrev..$newrev
+ git diff-tree $diffopts $oldrev..$newrev
}
#
@@ -424,8 +472,8 @@ generate_delete_branch_email()
{
echo " was $oldrev"
echo ""
- echo $LOGEND
- git show -s --pretty=oneline $oldrev
+ echo $LOGBEGIN
+ git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
echo $LOGEND
}
@@ -501,11 +549,11 @@ generate_atag_email()
# performed on them
if [ -n "$prevtag" ]; then
# Show changes since the previous release
- git rev-list --pretty=short "$prevtag..$newrev" | git shortlog
+ git shortlog "$prevtag..$newrev"
else
# No previous tag, show all the changes since time
# began
- git rev-list --pretty=short $newrev | git shortlog
+ git shortlog $newrev
fi
;;
*)
@@ -524,8 +572,8 @@ generate_delete_atag_email()
{
echo " was $oldrev"
echo ""
- echo $LOGEND
- git show -s --pretty=oneline $oldrev
+ echo $LOGBEGIN
+ git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
echo $LOGEND
}
@@ -571,7 +619,7 @@ generate_general_email()
echo ""
if [ "$newrev_type" = "commit" ]; then
echo $LOGBEGIN
- git show --no-color --root -s --pretty=medium $newrev
+ git diff-tree -s --always --encoding=UTF-8 --pretty=medium $newrev
echo $LOGEND
else
# What can we do here? The tag marks an object that is not
@@ -589,8 +637,8 @@ generate_delete_general_email()
{
echo " was $oldrev"
echo ""
- echo $LOGEND
- git show -s --pretty=oneline $oldrev
+ echo $LOGBEGIN
+ git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
echo $LOGEND
}
@@ -635,6 +683,24 @@ show_new_revisions()
}
+limit_lines()
+{
+ lines=0
+ skipped=0
+ while IFS="" read -r line; do
+ lines=$((lines + 1))
+ if [ $lines -gt $1 ]; then
+ skipped=$((skipped + 1))
+ else
+ printf "%s\n" "$line"
+ fi
+ done
+ if [ $skipped -ne 0 ]; then
+ echo "... $skipped lines suppressed ..."
+ fi
+}
+
+
send_mail()
{
if [ -n "$envelopesender" ]; then
@@ -659,7 +725,7 @@ if [ -z "$GIT_DIR" ]; then
exit 1
fi
-projectdesc=$(sed -ne '1p' "$GIT_DIR/description")
+projectdesc=$(sed -ne '1p' "$GIT_DIR/description" 2>/dev/null)
# Check if the description is unchanged from it's default, and shorten it to
# a more manageable length if it is
if expr "$projectdesc" : "Unnamed repository.*$" >/dev/null
@@ -672,6 +738,9 @@ announcerecipients=$(git config hooks.announcelist)
envelopesender=$(git config hooks.envelopesender)
emailprefix=$(git config hooks.emailprefix || echo '[SCM] ')
custom_showrev=$(git config hooks.showrev)
+maxlines=$(git config hooks.emailmaxlines)
+diffopts=$(git config hooks.diffopts)
+: ${diffopts:="--stat --summary --find-copies-harder"}
# --- Main loop
# Allow dual mode: run from the command line just like the update hook, or
@@ -680,10 +749,11 @@ if [ -n "$1" -a -n "$2" -a -n "$3" ]; then
# Output to the terminal in command line mode - if someone wanted to
# resend an email; they could redirect the output to sendmail
# themselves
- PAGER= generate_email $2 $3 $1
+ prep_for_email $2 $3 $1 && PAGER= generate_email
else
while read oldrev newrev refname
do
- generate_email $oldrev $newrev $refname | send_mail
+ prep_for_email $oldrev $newrev $refname || continue
+ generate_email $maxlines | send_mail
done
fi
diff --git a/contrib/hooks/pre-auto-gc-battery b/contrib/hooks/pre-auto-gc-battery
index 1f914c94aa..9d0c2d1990 100644..100755
--- a/contrib/hooks/pre-auto-gc-battery
+++ b/contrib/hooks/pre-auto-gc-battery
@@ -13,7 +13,6 @@
# For example, if the hook is stored in
# /usr/share/git-core/contrib/hooks/pre-auto-gc-battery:
#
-# chmod a+x pre-auto-gc-battery
# cd /path/to/your/repository.git
# ln -sf /usr/share/git-core/contrib/hooks/pre-auto-gc-battery \
# hooks/pre-auto-gc
diff --git a/contrib/hooks/setgitperms.perl b/contrib/hooks/setgitperms.perl
index a577ad095f..2770a1b1d2 100644..100755
--- a/contrib/hooks/setgitperms.perl
+++ b/contrib/hooks/setgitperms.perl
@@ -24,7 +24,7 @@ use File::Find;
use File::Basename;
my $usage =
-"Usage: setgitperms.perl [OPTION]... <--read|--write>
+"usage: setgitperms.perl [OPTION]... <--read|--write>
This program uses a file `.gitmeta` to store/restore permissions and uid/gid
info for all files/dirs tracked by git in the repository.
diff --git a/contrib/hooks/update-paranoid b/contrib/hooks/update-paranoid
index d18b317b2f..d18b317b2f 100644..100755
--- a/contrib/hooks/update-paranoid
+++ b/contrib/hooks/update-paranoid
diff --git a/contrib/mw-to-git/.gitignore b/contrib/mw-to-git/.gitignore
new file mode 100644
index 0000000000..ae545b013d
--- /dev/null
+++ b/contrib/mw-to-git/.gitignore
@@ -0,0 +1,2 @@
+git-remote-mediawiki
+git-mw
diff --git a/contrib/mw-to-git/.perlcriticrc b/contrib/mw-to-git/.perlcriticrc
new file mode 100644
index 0000000000..5a9955d757
--- /dev/null
+++ b/contrib/mw-to-git/.perlcriticrc
@@ -0,0 +1,28 @@
+# These 3 rules demand to add the s, m and x flag to *every* regexp. This is
+# overkill and would be harmful for readability.
+[-RegularExpressions::RequireExtendedFormatting]
+[-RegularExpressions::RequireDotMatchAnything]
+[-RegularExpressions::RequireLineBoundaryMatching]
+
+# This rule says that builtin functions should not be called with parentheses
+# e.g.: (taken from CPAN's documentation)
+# open($handle, '>', $filename); #not ok
+# open $handle, '>', $filename; #ok
+# Applying such a rule would mean modifying a huge number of lines for a
+# question of style.
+[-CodeLayout::ProhibitParensWithBuiltins]
+
+# This rule states that each system call should have its return value checked
+# The problem is that it includes the print call. Checking every print call's
+# return value would be harmful to the code readabilty.
+# This configuration keeps all default function but print.
+[InputOutput::RequireCheckedSyscalls]
+functions = open say close
+
+# This rules demands to add a dependancy for the Readonly module. This is not
+# wished.
+[-ValuesAndExpressions::ProhibitConstantPragma]
+
+# This rule is not really useful (rather a question of style) and produces many
+# warnings among the code.
+[-ValuesAndExpressions::ProhibitNoisyQuotes]
diff --git a/contrib/mw-to-git/Git/Mediawiki.pm b/contrib/mw-to-git/Git/Mediawiki.pm
new file mode 100644
index 0000000000..d13c4dfa7d
--- /dev/null
+++ b/contrib/mw-to-git/Git/Mediawiki.pm
@@ -0,0 +1,100 @@
+package Git::Mediawiki;
+
+use 5.008;
+use strict;
+use Git;
+
+BEGIN {
+
+our ($VERSION, @ISA, @EXPORT, @EXPORT_OK);
+
+# Totally unstable API.
+$VERSION = '0.01';
+
+require Exporter;
+
+@ISA = qw(Exporter);
+
+@EXPORT = ();
+
+# Methods which can be called as standalone functions as well:
+@EXPORT_OK = qw(clean_filename smudge_filename connect_maybe
+ EMPTY HTTP_CODE_OK HTTP_CODE_PAGE_NOT_FOUND);
+}
+
+# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
+use constant SLASH_REPLACEMENT => '%2F';
+
+# Used to test for empty strings
+use constant EMPTY => q{};
+
+# HTTP codes
+use constant HTTP_CODE_OK => 200;
+use constant HTTP_CODE_PAGE_NOT_FOUND => 404;
+
+sub clean_filename {
+ my $filename = shift;
+ $filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
+ # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
+ # Do a variant of URL-encoding, i.e. looks like URL-encoding,
+ # but with _ added to prevent MediaWiki from thinking this is
+ # an actual special character.
+ $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
+ # If we use the uri escape before
+ # we should unescape here, before anything
+
+ return $filename;
+}
+
+sub smudge_filename {
+ my $filename = shift;
+ $filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g;
+ $filename =~ s/ /_/g;
+ # Decode forbidden characters encoded in clean_filename
+ $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf('%c', hex($1))/ge;
+ return $filename;
+}
+
+sub connect_maybe {
+ my $wiki = shift;
+ if ($wiki) {
+ return $wiki;
+ }
+
+ my $remote_name = shift;
+ my $remote_url = shift;
+ my ($wiki_login, $wiki_password, $wiki_domain);
+
+ $wiki_login = Git::config("remote.${remote_name}.mwLogin");
+ $wiki_password = Git::config("remote.${remote_name}.mwPassword");
+ $wiki_domain = Git::config("remote.${remote_name}.mwDomain");
+
+ $wiki = MediaWiki::API->new;
+ $wiki->{config}->{api_url} = "${remote_url}/api.php";
+ if ($wiki_login) {
+ my %credential = (
+ 'url' => $remote_url,
+ 'username' => $wiki_login,
+ 'password' => $wiki_password
+ );
+ Git::credential(\%credential);
+ my $request = {lgname => $credential{username},
+ lgpassword => $credential{password},
+ lgdomain => $wiki_domain};
+ if ($wiki->login($request)) {
+ Git::credential(\%credential, 'approve');
+ print {*STDERR} qq(Logged in mediawiki user "$credential{username}".\n);
+ } else {
+ print {*STDERR} qq(Failed to log in mediawiki user "$credential{username}" on ${remote_url}\n);
+ print {*STDERR} ' (error ' .
+ $wiki->{error}->{code} . ': ' .
+ $wiki->{error}->{details} . ")\n";
+ Git::credential(\%credential, 'reject');
+ exit 1;
+ }
+ }
+
+ return $wiki;
+}
+
+1; # Famous last words
diff --git a/contrib/mw-to-git/Makefile b/contrib/mw-to-git/Makefile
new file mode 100644
index 0000000000..a4b6f7a2cd
--- /dev/null
+++ b/contrib/mw-to-git/Makefile
@@ -0,0 +1,57 @@
+#
+# Copyright (C) 2013
+# Matthieu Moy <Matthieu.Moy@imag.fr>
+#
+# To build and test:
+#
+# make
+# bin-wrapper/git mw preview Some_page.mw
+# bin-wrapper/git clone mediawiki::http://example.com/wiki/
+#
+# To install, run Git's toplevel 'make install' then run:
+#
+# make install
+
+GIT_MEDIAWIKI_PM=Git/Mediawiki.pm
+SCRIPT_PERL=git-remote-mediawiki.perl
+SCRIPT_PERL+=git-mw.perl
+GIT_ROOT_DIR=../..
+HERE=contrib/mw-to-git/
+
+INSTALL = install
+
+SCRIPT_PERL_FULL=$(patsubst %,$(HERE)/%,$(SCRIPT_PERL))
+INSTLIBDIR=$(shell $(MAKE) -C $(GIT_ROOT_DIR)/perl \
+ -s --no-print-directory instlibdir)
+DESTDIR_SQ = $(subst ','\'',$(DESTDIR))
+INSTLIBDIR_SQ = $(subst ','\'',$(INSTLIBDIR))
+
+all: build
+
+test: all
+ $(MAKE) -C t
+
+check: perlcritic test
+
+install_pm:
+ $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(INSTLIBDIR_SQ)/Git'
+ $(INSTALL) -m 644 $(GIT_MEDIAWIKI_PM) \
+ '$(DESTDIR_SQ)$(INSTLIBDIR_SQ)/$(GIT_MEDIAWIKI_PM)'
+
+build:
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+ build-perl-script
+
+install: install_pm
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+ install-perl-script
+
+clean:
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+ clean-perl-script
+
+perlcritic:
+ perlcritic -5 $(SCRIPT_PERL)
+ -perlcritic -2 $(SCRIPT_PERL)
+
+.PHONY: all test check install_pm install clean perlcritic
diff --git a/contrib/mw-to-git/bin-wrapper/git b/contrib/mw-to-git/bin-wrapper/git
new file mode 100755
index 0000000000..6663ae57e8
--- /dev/null
+++ b/contrib/mw-to-git/bin-wrapper/git
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# git executable wrapper script for Git-Mediawiki to run tests without
+# installing all the scripts and perl packages.
+
+GIT_ROOT_DIR=../../..
+GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd ${GIT_ROOT_DIR} && pwd)
+
+GITPERLLIB="$GIT_EXEC_PATH"'/contrib/mw-to-git'"${GITPERLLIB:+:$GITPERLLIB}"
+PATH="$GIT_EXEC_PATH"'/contrib/mw-to-git:'"$PATH"
+
+export GITPERLLIB PATH
+
+exec "${GIT_EXEC_PATH}/bin-wrappers/git" "$@"
diff --git a/contrib/mw-to-git/git-mw.perl b/contrib/mw-to-git/git-mw.perl
new file mode 100755
index 0000000000..28df3ee321
--- /dev/null
+++ b/contrib/mw-to-git/git-mw.perl
@@ -0,0 +1,368 @@
+#!/usr/bin/perl
+
+# Copyright (C) 2013
+# Benoit Person <benoit.person@ensimag.imag.fr>
+# Celestin Matte <celestin.matte@ensimag.imag.fr>
+# License: GPL v2 or later
+
+# Set of tools for git repo with a mediawiki remote.
+# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
+
+use strict;
+use warnings;
+
+use Getopt::Long;
+use URI::URL qw(url);
+use LWP::UserAgent;
+use HTML::TreeBuilder;
+
+use Git;
+use MediaWiki::API;
+use Git::Mediawiki qw(clean_filename connect_maybe
+ EMPTY HTTP_CODE_PAGE_NOT_FOUND);
+
+# By default, use UTF-8 to communicate with Git and the user
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
+
+# Global parameters
+my $verbose = 0;
+sub v_print {
+ if ($verbose) {
+ return print {*STDERR} @_;
+ }
+ return;
+}
+
+# Preview parameters
+my $file_name = EMPTY;
+my $remote_name = EMPTY;
+my $preview_file_name = EMPTY;
+my $autoload = 0;
+sub file {
+ $file_name = shift;
+ return $file_name;
+}
+
+my %commands = (
+ 'help' =>
+ [\&help, {}, \&help],
+ 'preview' =>
+ [\&preview, {
+ '<>' => \&file,
+ 'output|o=s' => \$preview_file_name,
+ 'remote|r=s' => \$remote_name,
+ 'autoload|a' => \$autoload
+ }, \&preview_help]
+);
+
+# Search for sub-command
+my $cmd = $commands{'help'};
+for (0..@ARGV-1) {
+ if (defined $commands{$ARGV[$_]}) {
+ $cmd = $commands{$ARGV[$_]};
+ splice @ARGV, $_, 1;
+ last;
+ }
+};
+GetOptions( %{$cmd->[1]},
+ 'help|h' => \&{$cmd->[2]},
+ 'verbose|v' => \$verbose);
+
+# Launch command
+&{$cmd->[0]};
+
+############################# Preview Functions ################################
+
+sub preview_help {
+ print {*STDOUT} <<'END';
+USAGE: git mw preview [--remote|-r <remote name>] [--autoload|-a]
+ [--output|-o <output filename>] [--verbose|-v]
+ <blob> | <filename>
+
+DESCRIPTION:
+Preview is an utiliy to preview local content of a mediawiki repo as if it was
+pushed on the remote.
+
+For that, preview searches for the remote name of the current branch's
+upstream if --remote is not set. If that remote is not found or if it
+is not a mediawiki, it lists all mediawiki remotes configured and asks
+you to replay your command with the --remote option set properly.
+
+Then, it searches for a file named 'filename'. If it's not found in
+the current dir, it will assume it's a blob.
+
+The content retrieved in the file (or in the blob) will then be parsed
+by the remote mediawiki and combined with a template retrieved from
+the mediawiki.
+
+Finally, preview will save the HTML result in a file. and autoload it
+in your default web browser if the option --autoload is present.
+
+OPTIONS:
+ -r <remote name>, --remote <remote name>
+ If the remote is a mediawiki, the template and the parse engine
+ used for the preview will be those of that remote.
+ If not, a list of valid remotes will be shown.
+
+ -a, --autoload
+ Try to load the HTML output in a new tab (or new window) of your
+ default web browser.
+
+ -o <output filename>, --output <output filename>
+ Change the HTML output filename. Default filename is based on the
+ input filename with its extension replaced by '.html'.
+
+ -v, --verbose
+ Show more information on what's going on under the hood.
+END
+ exit;
+}
+
+sub preview {
+ my $wiki;
+ my ($remote_url, $wiki_page_name);
+ my ($new_content, $template);
+ my $file_content;
+
+ if ($file_name eq EMPTY) {
+ die "Missing file argument, see `git mw help`\n";
+ }
+
+ v_print("### Selecting remote\n");
+ if ($remote_name eq EMPTY) {
+ $remote_name = find_upstream_remote_name();
+ if ($remote_name) {
+ $remote_url = mediawiki_remote_url_maybe($remote_name);
+ }
+
+ if (! $remote_url) {
+ my @valid_remotes = find_mediawiki_remotes();
+
+ if ($#valid_remotes == 0) {
+ print {*STDERR} "No mediawiki remote in this repo. \n";
+ exit 1;
+ } else {
+ my $remotes_list = join("\n\t", @valid_remotes);
+ print {*STDERR} <<"MESSAGE";
+There are multiple mediawiki remotes, which of:
+ ${remotes_list}
+do you want ? Use the -r option to specify the remote.
+MESSAGE
+ }
+
+ exit 1;
+ }
+ } else {
+ if (!is_valid_remote($remote_name)) {
+ die "${remote_name} is not a remote\n";
+ }
+
+ $remote_url = mediawiki_remote_url_maybe($remote_name);
+ if (! $remote_url) {
+ die "${remote_name} is not a mediawiki remote\n";
+ }
+ }
+ v_print("selected remote:\n\tname: ${remote_name}\n\turl: ${remote_url}\n");
+
+ $wiki = connect_maybe($wiki, $remote_name, $remote_url);
+
+ # Read file content
+ if (! -e $file_name) {
+ $file_content = git_cmd_try {
+ Git::command('cat-file', 'blob', $file_name); }
+ "%s failed w/ code %d";
+
+ if ($file_name =~ /(.+):(.+)/) {
+ $file_name = $2;
+ }
+ } else {
+ open my $read_fh, "<", $file_name
+ or die "could not open ${file_name}: $!\n";
+ $file_content = do { local $/ = undef; <$read_fh> };
+ close $read_fh
+ or die "unable to close: $!\n";
+ }
+
+ v_print("### Retrieving template\n");
+ ($wiki_page_name = clean_filename($file_name)) =~ s/\.[^.]+$//;
+ $template = get_template($remote_url, $wiki_page_name);
+
+ v_print("### Parsing local content\n");
+ $new_content = $wiki->api({
+ action => 'parse',
+ text => $file_content,
+ title => $wiki_page_name
+ }, {
+ skip_encoding => 1
+ }) or die "No response from remote mediawiki\n";
+ $new_content = $new_content->{'parse'}->{'text'}->{'*'};
+
+ v_print("### Merging contents\n");
+ if ($preview_file_name eq EMPTY) {
+ ($preview_file_name = $file_name) =~ s/\.[^.]+$/.html/;
+ }
+ open(my $save_fh, '>:encoding(UTF-8)', $preview_file_name)
+ or die "Could not open: $!\n";
+ print {$save_fh} merge_contents($template, $new_content, $remote_url);
+ close($save_fh)
+ or die "Could not close: $!\n";
+
+ v_print("### Results\n");
+ if ($autoload) {
+ v_print("Launching browser w/ file: ${preview_file_name}");
+ system('git', 'web--browse', $preview_file_name);
+ } else {
+ print {*STDERR} "Preview file saved as: ${preview_file_name}\n";
+ }
+
+ exit;
+}
+
+# uses global scope variable: $remote_name
+sub merge_contents {
+ my $template = shift;
+ my $content = shift;
+ my $remote_url = shift;
+ my ($content_tree, $html_tree, $mw_content_text);
+ my $template_content_id = 'bodyContent';
+
+ $html_tree = HTML::TreeBuilder->new;
+ $html_tree->parse($template);
+
+ $content_tree = HTML::TreeBuilder->new;
+ $content_tree->parse($content);
+
+ $template_content_id = Git::config("remote.${remote_name}.mwIDcontent")
+ || $template_content_id;
+ v_print("Using '${template_content_id}' as the content ID\n");
+
+ $mw_content_text = $html_tree->look_down('id', $template_content_id);
+ if (!defined $mw_content_text) {
+ print {*STDERR} <<"CONFIG";
+Could not combine the new content with the template. You might want to
+configure `mediawiki.IDContent` in your config:
+ git config --add remote.${remote_name}.mwIDcontent <id>
+and re-run the command afterward.
+CONFIG
+ exit 1;
+ }
+ $mw_content_text->delete_content();
+ $mw_content_text->push_content($content_tree);
+
+ make_links_absolute($html_tree, $remote_url);
+
+ return $html_tree->as_HTML;
+}
+
+sub make_links_absolute {
+ my $html_tree = shift;
+ my $remote_url = shift;
+ for (@{ $html_tree->extract_links() }) {
+ my ($link, $element, $attr) = @{ $_ };
+ my $url = url($link)->canonical;
+ if ($url !~ /#/) {
+ $element->attr($attr, URI->new_abs($url, $remote_url));
+ }
+ }
+ return $html_tree;
+}
+
+sub is_valid_remote {
+ my $remote = shift;
+ my @remotes = git_cmd_try {
+ Git::command('remote') }
+ "%s failed w/ code %d";
+ my $found_remote = 0;
+ foreach my $remote (@remotes) {
+ if ($remote eq $remote) {
+ $found_remote = 1;
+ last;
+ }
+ }
+ return $found_remote;
+}
+
+sub find_mediawiki_remotes {
+ my @remotes = git_cmd_try {
+ Git::command('remote'); }
+ "%s failed w/ code %d";
+ my $remote_url;
+ my @valid_remotes = ();
+ foreach my $remote (@remotes) {
+ $remote_url = mediawiki_remote_url_maybe($remote);
+ if ($remote_url) {
+ push(@valid_remotes, $remote);
+ }
+ }
+ return @valid_remotes;
+}
+
+sub find_upstream_remote_name {
+ my $current_branch = git_cmd_try {
+ Git::command_oneline('symbolic-ref', '--short', 'HEAD') }
+ "%s failed w/ code %d";
+ return Git::config("branch.${current_branch}.remote");
+}
+
+sub mediawiki_remote_url_maybe {
+ my $remote = shift;
+
+ # Find remote url
+ my $remote_url = Git::config("remote.${remote}.url");
+ if ($remote_url =~ s/mediawiki::(.*)/$1/) {
+ return url($remote_url)->canonical;
+ }
+
+ return;
+}
+
+sub get_template {
+ my $url = shift;
+ my $page_name = shift;
+ my ($req, $res, $code, $url_after);
+
+ $req = LWP::UserAgent->new;
+ if ($verbose) {
+ $req->show_progress(1);
+ }
+
+ $res = $req->get("${url}/index.php?title=${page_name}");
+ if (!$res->is_success) {
+ $code = $res->code;
+ $url_after = $res->request()->uri(); # resolve all redirections
+ if ($code == HTTP_CODE_PAGE_NOT_FOUND) {
+ if ($verbose) {
+ print {*STDERR} <<"WARNING";
+Warning: Failed to retrieve '$page_name'. Create it on the mediawiki if you want
+all the links to work properly.
+Trying to use the mediawiki homepage as a fallback template ...
+WARNING
+ }
+
+ # LWP automatically redirects GET request
+ $res = $req->get("${url}/index.php");
+ if (!$res->is_success) {
+ $url_after = $res->request()->uri(); # resolve all redirections
+ die "Failed to get homepage @ ${url_after} w/ code ${code}\n";
+ }
+ } else {
+ die "Failed to get '${page_name}' @ ${url_after} w/ code ${code}\n";
+ }
+ }
+
+ return $res->decoded_content;
+}
+
+############################## Help Functions ##################################
+
+sub help {
+ print {*STDOUT} <<'END';
+usage: git mw <command> <args>
+
+git mw commands are:
+ help Display help information about git mw
+ preview Parse and render local file into HTML
+END
+ exit;
+}
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
new file mode 100755
index 0000000000..8dd74a9a40
--- /dev/null
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -0,0 +1,1338 @@
+#! /usr/bin/perl
+
+# Copyright (C) 2011
+# Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
+# Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
+# Claire Fousse <claire.fousse@ensimag.imag.fr>
+# David Amouyal <david.amouyal@ensimag.imag.fr>
+# Matthieu Moy <matthieu.moy@grenoble-inp.fr>
+# License: GPL v2 or later
+
+# Gateway between Git and MediaWiki.
+# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
+
+use strict;
+use MediaWiki::API;
+use Git;
+use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
+ EMPTY HTTP_CODE_OK);
+use DateTime::Format::ISO8601;
+use warnings;
+
+# By default, use UTF-8 to communicate with Git and the user
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
+
+use URI::Escape;
+
+# It's not always possible to delete pages (may require some
+# privileges). Deleted pages are replaced with this content.
+use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
+
+# It's not possible to create empty pages. New empty files in Git are
+# sent with this content instead.
+use constant EMPTY_CONTENT => "<!-- empty page -->\n";
+
+# used to reflect file creation or deletion in diff.
+use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
+
+# Used on Git's side to reflect empty edit messages on the wiki
+use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+
+# Number of pages taken into account at once in submodule get_mw_page_list
+use constant SLICE_SIZE => 50;
+
+# Number of linked mediafile to get at once in get_linked_mediafiles
+# The query is split in small batches because of the MW API limit of
+# the number of links to be returned (500 links max).
+use constant BATCH_SIZE => 10;
+
+if (@ARGV != 2) {
+ exit_error_usage();
+}
+
+my $remotename = $ARGV[0];
+my $url = $ARGV[1];
+
+# Accept both space-separated and multiple keys in config file.
+# Spaces should be written as _ anyway because we'll use chomp.
+my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
+chomp(@tracked_pages);
+
+# Just like @tracked_pages, but for MediaWiki categories.
+my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
+chomp(@tracked_categories);
+
+# Import media files on pull
+my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
+chomp($import_media);
+$import_media = ($import_media eq 'true');
+
+# Export media files on push
+my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
+chomp($export_media);
+$export_media = !($export_media eq 'false');
+
+my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
+# Note: mwPassword is discourraged. Use the credential system instead.
+my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
+my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
+chomp($wiki_login);
+chomp($wiki_passwd);
+chomp($wiki_domain);
+
+# Import only last revisions (both for clone and fetch)
+my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
+chomp($shallow_import);
+$shallow_import = ($shallow_import eq 'true');
+
+# Fetch (clone and pull) by revisions instead of by pages. This behavior
+# is more efficient when we have a wiki with lots of pages and we fetch
+# the revisions quite often so that they concern only few pages.
+# Possible values:
+# - by_rev: perform one query per new revision on the remote wiki
+# - by_page: query each tracked page for new revision
+my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
+if (!$fetch_strategy) {
+ $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
+}
+chomp($fetch_strategy);
+if (!$fetch_strategy) {
+ $fetch_strategy = 'by_page';
+}
+
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
+# Dumb push: don't update notes and mediawiki ref to reflect the last push.
+#
+# Configurable with mediawiki.dumbPush, or per-remote with
+# remote.<remotename>.dumbPush.
+#
+# This means the user will have to re-import the just-pushed
+# revisions. On the other hand, this means that the Git revisions
+# corresponding to MediaWiki revisions are all imported from the wiki,
+# regardless of whether they were initially created in Git or from the
+# web interface, hence all users will get the same history (i.e. if
+# the push from Git to MediaWiki loses some information, everybody
+# will get the history with information lost). If the import is
+# deterministic, this means everybody gets the same sha1 for each
+# MediaWiki revision.
+my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
+if (!$dumb_push) {
+ $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
+}
+chomp($dumb_push);
+$dumb_push = ($dumb_push eq 'true');
+
+my $wiki_name = $url;
+$wiki_name =~ s{[^/]*://}{};
+# If URL is like http://user:password@example.com/, we clearly don't
+# want the password in $wiki_name. While we're there, also remove user
+# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
+$wiki_name =~ s/^.*@//;
+
+# Commands parser
+while (<STDIN>) {
+ chomp;
+
+ if (!parse_command($_)) {
+ last;
+ }
+
+ BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
+ # command is fully processed.
+}
+
+########################## Functions ##############################
+
+## error handling
+sub exit_error_usage {
+ die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
+ "parameters\n" .
+ "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
+ "module directly.\n" .
+ "This module can be used the following way:\n" .
+ "\tgit clone mediawiki://<address of a mediawiki>\n" .
+ "Then, use git commit, push and pull as with every normal git repository.\n";
+}
+
+sub parse_command {
+ my ($line) = @_;
+ my @cmd = split(/ /, $line);
+ if (!defined $cmd[0]) {
+ return 0;
+ }
+ if ($cmd[0] eq 'capabilities') {
+ die("Too many arguments for capabilities\n")
+ if (defined($cmd[1]));
+ mw_capabilities();
+ } elsif ($cmd[0] eq 'list') {
+ die("Too many arguments for list\n") if (defined($cmd[2]));
+ mw_list($cmd[1]);
+ } elsif ($cmd[0] eq 'import') {
+ die("Invalid argument for import\n")
+ if ($cmd[1] eq EMPTY);
+ die("Too many arguments for import\n")
+ if (defined($cmd[2]));
+ mw_import($cmd[1]);
+ } elsif ($cmd[0] eq 'option') {
+ die("Invalid arguments for option\n")
+ if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
+ die("Too many arguments for option\n")
+ if (defined($cmd[3]));
+ mw_option($cmd[1],$cmd[2]);
+ } elsif ($cmd[0] eq 'push') {
+ mw_push($cmd[1]);
+ } else {
+ print {*STDERR} "Unknown command. Aborting...\n";
+ return 0;
+ }
+ return 1;
+}
+
+# MediaWiki API instance, created lazily.
+my $mediawiki;
+
+sub fatal_mw_error {
+ my $action = shift;
+ print STDERR "fatal: could not $action.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ if ($url =~ /^https/) {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
+ print STDERR "fatal: and the SSL certificate is correct.\n";
+ } else {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ }
+ print STDERR "fatal: (error " .
+ $mediawiki->{error}->{code} . ': ' .
+ $mediawiki->{error}->{details} . ")\n";
+ exit 1;
+}
+
+## Functions for listing pages on the remote wiki
+sub get_mw_tracked_pages {
+ my $pages = shift;
+ get_mw_page_list(\@tracked_pages, $pages);
+ return;
+}
+
+sub get_mw_page_list {
+ my $page_list = shift;
+ my $pages = shift;
+ my @some_pages = @{$page_list};
+ while (@some_pages) {
+ my $last_page = SLICE_SIZE;
+ if ($#some_pages < $last_page) {
+ $last_page = $#some_pages;
+ }
+ my @slice = @some_pages[0..$last_page];
+ get_mw_first_pages(\@slice, $pages);
+ @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
+ }
+ return;
+}
+
+sub get_mw_tracked_categories {
+ my $pages = shift;
+ foreach my $category (@tracked_categories) {
+ if (index($category, ':') < 0) {
+ # Mediawiki requires the Category
+ # prefix, but let's not force the user
+ # to specify it.
+ $category = "Category:${category}";
+ }
+ my $mw_pages = $mediawiki->list( {
+ action => 'query',
+ list => 'categorymembers',
+ cmtitle => $category,
+ cmlimit => 'max' } )
+ || die $mediawiki->{error}->{code} . ': '
+ . $mediawiki->{error}->{details} . "\n";
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+ }
+ return;
+}
+
+sub get_mw_all_pages {
+ my $pages = shift;
+ # No user-provided list, get the list of pages from the API.
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 'max'
+ });
+ if (!defined($mw_pages)) {
+ fatal_mw_error("get the list of wiki pages");
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+ return;
+}
+
+# queries the wiki for a set of pages. Meant to be used within a loop
+# querying the wiki for slices of page list.
+sub get_mw_first_pages {
+ my $some_pages = shift;
+ my @some_pages = @{$some_pages};
+
+ my $pages = shift;
+
+ # pattern 'page1|page2|...' required by the API
+ my $titles = join('|', @some_pages);
+
+ my $mw_pages = $mediawiki->api({
+ action => 'query',
+ titles => $titles,
+ });
+ if (!defined($mw_pages)) {
+ fatal_mw_error("query the list of wiki pages");
+ }
+ while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
+ if ($id < 0) {
+ print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
+ } else {
+ $pages->{$page->{title}} = $page;
+ }
+ }
+ return;
+}
+
+# Get the list of pages to be fetched according to configuration.
+sub get_mw_pages {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+ print {*STDERR} "Listing pages on remote wiki...\n";
+
+ my %pages; # hash on page titles to avoid duplicates
+ my $user_defined;
+ if (@tracked_pages) {
+ $user_defined = 1;
+ # The user provided a list of pages titles, but we
+ # still need to query the API to get the page IDs.
+ get_mw_tracked_pages(\%pages);
+ }
+ if (@tracked_categories) {
+ $user_defined = 1;
+ get_mw_tracked_categories(\%pages);
+ }
+ if (!$user_defined) {
+ get_mw_all_pages(\%pages);
+ }
+ if ($import_media) {
+ print {*STDERR} "Getting media files for selected pages...\n";
+ if ($user_defined) {
+ get_linked_mediafiles(\%pages);
+ } else {
+ get_all_mediafiles(\%pages);
+ }
+ }
+ print {*STDERR} (scalar keys %pages) . " pages found.\n";
+ return %pages;
+}
+
+# usage: $out = run_git("command args");
+# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
+sub run_git {
+ my $args = shift;
+ my $encoding = (shift || 'encoding(UTF-8)');
+ open(my $git, "-|:${encoding}", "git ${args}")
+ or die "Unable to fork: $!\n";
+ my $res = do {
+ local $/ = undef;
+ <$git>
+ };
+ close($git);
+
+ return $res;
+}
+
+
+sub get_all_mediafiles {
+ my $pages = shift;
+ # Attach list of all pages for media files from the API,
+ # they are in a different namespace, only one namespace
+ # can be queried at the same moment
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ apnamespace => get_mw_namespace_id('File'),
+ aplimit => 'max'
+ });
+ if (!defined($mw_pages)) {
+ print {*STDERR} "fatal: could not get the list of pages for media files.\n";
+ print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
+ print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+ return;
+}
+
+sub get_linked_mediafiles {
+ my $pages = shift;
+ my @titles = map { $_->{title} } values(%{$pages});
+
+ my $batch = BATCH_SIZE;
+ while (@titles) {
+ if ($#titles < $batch) {
+ $batch = $#titles;
+ }
+ my @slice = @titles[0..$batch];
+
+ # pattern 'page1|page2|...' required by the API
+ my $mw_titles = join('|', @slice);
+
+ # Media files could be included or linked from
+ # a page, get all related
+ my $query = {
+ action => 'query',
+ prop => 'links|images',
+ titles => $mw_titles,
+ plnamespace => get_mw_namespace_id('File'),
+ pllimit => 'max'
+ };
+ my $result = $mediawiki->api($query);
+
+ while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
+ my @media_titles;
+ if (defined($page->{links})) {
+ my @link_titles
+ = map { $_->{title} } @{$page->{links}};
+ push(@media_titles, @link_titles);
+ }
+ if (defined($page->{images})) {
+ my @image_titles
+ = map { $_->{title} } @{$page->{images}};
+ push(@media_titles, @image_titles);
+ }
+ if (@media_titles) {
+ get_mw_page_list(\@media_titles, $pages);
+ }
+ }
+
+ @titles = @titles[($batch+1)..$#titles];
+ }
+ return;
+}
+
+sub get_mw_mediafile_for_page_revision {
+ # Name of the file on Wiki, with the prefix.
+ my $filename = shift;
+ my $timestamp = shift;
+ my %mediafile;
+
+ # Search if on a media file with given timestamp exists on
+ # MediaWiki. In that case download the file.
+ my $query = {
+ action => 'query',
+ prop => 'imageinfo',
+ titles => "File:${filename}",
+ iistart => $timestamp,
+ iiend => $timestamp,
+ iiprop => 'timestamp|archivename|url',
+ iilimit => 1
+ };
+ my $result = $mediawiki->api($query);
+
+ my ($fileid, $file) = each( %{$result->{query}->{pages}} );
+ # If not defined it means there is no revision of the file for
+ # given timestamp.
+ if (defined($file->{imageinfo})) {
+ $mediafile{title} = $filename;
+
+ my $fileinfo = pop(@{$file->{imageinfo}});
+ $mediafile{timestamp} = $fileinfo->{timestamp};
+ # Mediawiki::API's download function doesn't support https URLs
+ # and can't download old versions of files.
+ print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+ $mediafile{content} = download_mw_mediafile($fileinfo->{url});
+ }
+ return %mediafile;
+}
+
+sub download_mw_mediafile {
+ my $download_url = shift;
+
+ my $response = $mediawiki->{ua}->get($download_url);
+ if ($response->code == HTTP_CODE_OK) {
+ # It is tempting to return
+ # $response->decoded_content({charset => "none"}), but
+ # when doing so, utf8::downgrade($content) fails with
+ # "Wide character in subroutine entry".
+ $response->decode();
+ return $response->content();
+ } else {
+ print {*STDERR} "Error downloading mediafile from :\n";
+ print {*STDERR} "URL: ${download_url}\n";
+ print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
+ exit 1;
+ }
+}
+
+sub get_last_local_revision {
+ # Get note regarding last mediawiki revision
+ my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
+ my @note_info = split(/ /, $note);
+
+ my $lastrevision_number;
+ if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
+ print {*STDERR} 'No previous mediawiki revision found';
+ $lastrevision_number = 0;
+ } else {
+ # Notes are formatted : mediawiki_revision: #number
+ $lastrevision_number = $note_info[1];
+ chomp($lastrevision_number);
+ print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
+ }
+ return $lastrevision_number;
+}
+
+# Get the last remote revision without taking in account which pages are
+# tracked or not. This function makes a single request to the wiki thus
+# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
+# option.
+sub get_last_global_remote_rev {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+ my $query = {
+ action => 'query',
+ list => 'recentchanges',
+ prop => 'revisions',
+ rclimit => '1',
+ rcdir => 'older',
+ };
+ my $result = $mediawiki->api($query);
+ return $result->{query}->{recentchanges}[0]->{revid};
+}
+
+# Get the last remote revision concerning the tracked pages and the tracked
+# categories.
+sub get_last_remote_revision {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+ my %pages_hash = get_mw_pages();
+ my @pages = values(%pages_hash);
+
+ my $max_rev_num = 0;
+
+ print {*STDERR} "Getting last revision id on tracked pages...\n";
+
+ foreach my $page (@pages) {
+ my $id = $page->{pageid};
+
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'ids|timestamp',
+ pageids => $id,
+ };
+
+ my $result = $mediawiki->api($query);
+
+ my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
+
+ $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
+
+ $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
+ }
+
+ print {*STDERR} "Last remote revision found is $max_rev_num.\n";
+ return $max_rev_num;
+}
+
+# Clean content before sending it to MediaWiki
+sub mediawiki_clean {
+ my $string = shift;
+ my $page_created = shift;
+ # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
+ # This function right trims a string and adds a \n at the end to follow this rule
+ $string =~ s/\s+$//;
+ if ($string eq EMPTY && $page_created) {
+ # Creating empty pages is forbidden.
+ $string = EMPTY_CONTENT;
+ }
+ return $string."\n";
+}
+
+# Filter applied on MediaWiki data before adding them to Git
+sub mediawiki_smudge {
+ my $string = shift;
+ if ($string eq EMPTY_CONTENT) {
+ $string = EMPTY;
+ }
+ # This \n is important. This is due to mediawiki's way to handle end of files.
+ return "${string}\n";
+}
+
+sub literal_data {
+ my ($content) = @_;
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ return;
+}
+
+sub literal_data_raw {
+ # Output possibly binary content.
+ my ($content) = @_;
+ # Avoid confusion between size in bytes and in characters
+ utf8::downgrade($content);
+ binmode STDOUT, ':raw';
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ binmode STDOUT, ':encoding(UTF-8)';
+ return;
+}
+
+sub mw_capabilities {
+ # Revisions are imported to the private namespace
+ # refs/mediawiki/$remotename/ by the helper and fetched into
+ # refs/remotes/$remotename later by fetch.
+ print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
+ print {*STDOUT} "import\n";
+ print {*STDOUT} "list\n";
+ print {*STDOUT} "push\n";
+ if ($dumb_push) {
+ print {*STDOUT} "no-private-update\n";
+ }
+ print {*STDOUT} "\n";
+ return;
+}
+
+sub mw_list {
+ # MediaWiki do not have branches, we consider one branch arbitrarily
+ # called master, and HEAD pointing to it.
+ print {*STDOUT} "? refs/heads/master\n";
+ print {*STDOUT} "\@refs/heads/master HEAD\n";
+ print {*STDOUT} "\n";
+ return;
+}
+
+sub mw_option {
+ print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
+ print {*STDOUT} "unsupported\n";
+ return;
+}
+
+sub fetch_mw_revisions_for_page {
+ my $page = shift;
+ my $id = shift;
+ my $fetch_from = shift;
+ my @page_revs = ();
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'ids',
+ rvdir => 'newer',
+ rvstartid => $fetch_from,
+ rvlimit => 500,
+ pageids => $id,
+
+ # Let MediaWiki know that we support the latest API.
+ continue => '',
+ };
+
+ my $revnum = 0;
+ # Get 500 revisions at a time due to the mediawiki api limit
+ while (1) {
+ my $result = $mediawiki->api($query);
+
+ # Parse each of those 500 revisions
+ foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
+ my $page_rev_ids;
+ $page_rev_ids->{pageid} = $page->{pageid};
+ $page_rev_ids->{revid} = $revision->{revid};
+ push(@page_revs, $page_rev_ids);
+ $revnum++;
+ }
+
+ if ($result->{'query-continue'}) { # For legacy APIs
+ $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
+ } elsif ($result->{continue}) { # For newer APIs
+ $query->{rvstartid} = $result->{continue}->{rvcontinue};
+ $query->{continue} = $result->{continue}->{continue};
+ } else {
+ last;
+ }
+ }
+ if ($shallow_import && @page_revs) {
+ print {*STDERR} " Found 1 revision (shallow import).\n";
+ @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
+ return $page_revs[0];
+ }
+ print {*STDERR} " Found ${revnum} revision(s).\n";
+ return @page_revs;
+}
+
+sub fetch_mw_revisions {
+ my $pages = shift; my @pages = @{$pages};
+ my $fetch_from = shift;
+
+ my @revisions = ();
+ my $n = 1;
+ foreach my $page (@pages) {
+ my $id = $page->{pageid};
+ print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
+ $n++;
+ my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
+ @revisions = (@page_revs, @revisions);
+ }
+
+ return ($n, @revisions);
+}
+
+sub fe_escape_path {
+ my $path = shift;
+ $path =~ s/\\/\\\\/g;
+ $path =~ s/"/\\"/g;
+ $path =~ s/\n/\\n/g;
+ return qq("${path}");
+}
+
+sub import_file_revision {
+ my $commit = shift;
+ my %commit = %{$commit};
+ my $full_import = shift;
+ my $n = shift;
+ my $mediafile = shift;
+ my %mediafile;
+ if ($mediafile) {
+ %mediafile = %{$mediafile};
+ }
+
+ my $title = $commit{title};
+ my $comment = $commit{comment};
+ my $content = $commit{content};
+ my $author = $commit{author};
+ my $date = $commit{date};
+
+ print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
+ print {*STDOUT} "mark :${n}\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+ literal_data($comment);
+
+ # If it's not a clone, we need to know where to start from
+ if (!$full_import && $n == 1) {
+ print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
+ }
+ if ($content ne DELETED_CONTENT) {
+ print {*STDOUT} 'M 644 inline ' .
+ fe_escape_path("${title}.mw") . "\n";
+ literal_data($content);
+ if (%mediafile) {
+ print {*STDOUT} 'M 644 inline '
+ . fe_escape_path($mediafile{title}) . "\n";
+ literal_data_raw($mediafile{content});
+ }
+ print {*STDOUT} "\n\n";
+ } else {
+ print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
+ }
+
+ # mediawiki revision number in the git note
+ if ($full_import && $n == 1) {
+ print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
+ }
+ print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+ literal_data('Note added by git-mediawiki during import');
+ if (!$full_import && $n == 1) {
+ print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
+ }
+ print {*STDOUT} "N inline :${n}\n";
+ literal_data("mediawiki_revision: $commit{mw_revision}");
+ print {*STDOUT} "\n\n";
+ return;
+}
+
+# parse a sequence of
+# <cmd> <arg1>
+# <cmd> <arg2>
+# \n
+# (like batch sequence of import and sequence of push statements)
+sub get_more_refs {
+ my $cmd = shift;
+ my @refs;
+ while (1) {
+ my $line = <STDIN>;
+ if ($line =~ /^$cmd (.*)$/) {
+ push(@refs, $1);
+ } elsif ($line eq "\n") {
+ return @refs;
+ } else {
+ die("Invalid command in a '$cmd' batch: $_\n");
+ }
+ }
+ return;
+}
+
+sub mw_import {
+ # multiple import commands can follow each other.
+ my @refs = (shift, get_more_refs('import'));
+ foreach my $ref (@refs) {
+ mw_import_ref($ref);
+ }
+ print {*STDOUT} "done\n";
+ return;
+}
+
+sub mw_import_ref {
+ my $ref = shift;
+ # The remote helper will call "import HEAD" and
+ # "import refs/heads/master".
+ # Since HEAD is a symbolic ref to master (by convention,
+ # followed by the output of the command "list" that we gave),
+ # we don't need to do anything in this case.
+ if ($ref eq 'HEAD') {
+ return;
+ }
+
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+ print {*STDERR} "Searching revisions...\n";
+ my $last_local = get_last_local_revision();
+ my $fetch_from = $last_local + 1;
+ if ($fetch_from == 1) {
+ print {*STDERR} ", fetching from beginning.\n";
+ } else {
+ print {*STDERR} ", fetching from here.\n";
+ }
+
+ my $n = 0;
+ if ($fetch_strategy eq 'by_rev') {
+ print {*STDERR} "Fetching & writing export data by revs...\n";
+ $n = mw_import_ref_by_revs($fetch_from);
+ } elsif ($fetch_strategy eq 'by_page') {
+ print {*STDERR} "Fetching & writing export data by pages...\n";
+ $n = mw_import_ref_by_pages($fetch_from);
+ } else {
+ print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
+ print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
+ exit 1;
+ }
+
+ if ($fetch_from == 1 && $n == 0) {
+ print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
+ # Something has to be done remote-helper side. If nothing is done, an error is
+ # thrown saying that HEAD is referring to unknown object 0000000000000000000
+ # and the clone fails.
+ }
+ return;
+}
+
+sub mw_import_ref_by_pages {
+
+ my $fetch_from = shift;
+ my %pages_hash = get_mw_pages();
+ my @pages = values(%pages_hash);
+
+ my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
+
+ @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
+ my @revision_ids = map { $_->{revid} } @revisions;
+
+ return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+sub mw_import_ref_by_revs {
+
+ my $fetch_from = shift;
+ my %pages_hash = get_mw_pages();
+
+ my $last_remote = get_last_global_remote_rev();
+ my @revision_ids = $fetch_from..$last_remote;
+ return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+# Import revisions given in second argument (array of integers).
+# Only pages appearing in the third argument (hash indexed by page titles)
+# will be imported.
+sub mw_import_revids {
+ my $fetch_from = shift;
+ my $revision_ids = shift;
+ my $pages = shift;
+
+ my $n = 0;
+ my $n_actual = 0;
+ my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
+
+ foreach my $pagerevid (@{$revision_ids}) {
+ # Count page even if we skip it, since we display
+ # $n/$total and $total includes skipped pages.
+ $n++;
+
+ # fetch the content of the pages
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'content|timestamp|comment|user|ids',
+ revids => $pagerevid,
+ };
+
+ my $result = $mediawiki->api($query);
+
+ if (!$result) {
+ die "Failed to retrieve modified page for revision $pagerevid\n";
+ }
+
+ if (defined($result->{query}->{badrevids}->{$pagerevid})) {
+ # The revision id does not exist on the remote wiki.
+ next;
+ }
+
+ if (!defined($result->{query}->{pages})) {
+ die "Invalid revision ${pagerevid}.\n";
+ }
+
+ my @result_pages = values(%{$result->{query}->{pages}});
+ my $result_page = $result_pages[0];
+ my $rev = $result_pages[0]->{revisions}->[0];
+
+ my $page_title = $result_page->{title};
+
+ if (!exists($pages->{$page_title})) {
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}),
+ ": Skipping revision #$rev->{revid} of ${page_title}\n";
+ next;
+ }
+
+ $n_actual++;
+
+ my %commit;
+ $commit{author} = $rev->{user} || 'Anonymous';
+ $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
+ $commit{title} = smudge_filename($page_title);
+ $commit{mw_revision} = $rev->{revid};
+ $commit{content} = mediawiki_smudge($rev->{'*'});
+
+ if (!defined($rev->{timestamp})) {
+ $last_timestamp++;
+ } else {
+ $last_timestamp = $rev->{timestamp};
+ }
+ $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
+
+ # Differentiates classic pages and media files.
+ my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
+ my %mediafile;
+ if ($namespace) {
+ my $id = get_mw_namespace_id($namespace);
+ if ($id && $id == get_mw_namespace_id('File')) {
+ %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
+ }
+ }
+ # If this is a revision of the media page for new version
+ # of a file do one common commit for both file and media page.
+ # Else do commit only for that page.
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
+ import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
+ }
+
+ return $n_actual;
+}
+
+sub error_non_fast_forward {
+ my $advice = run_git('config --bool advice.pushNonFastForward');
+ chomp($advice);
+ if ($advice ne 'false') {
+ # Native git-push would show this after the summary.
+ # We can't ask it to display it cleanly, so print it
+ # ourselves before.
+ print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
+ print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+ print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
+ }
+ print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
+ return 0;
+}
+
+sub mw_upload_file {
+ my $complete_file_name = shift;
+ my $new_sha1 = shift;
+ my $extension = shift;
+ my $file_deleted = shift;
+ my $summary = shift;
+ my $newrevid;
+ my $path = "File:${complete_file_name}";
+ my %hashFiles = get_allowed_file_extensions();
+ if (!exists($hashFiles{$extension})) {
+ print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
+ print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
+ return $newrevid;
+ }
+ # Deleting and uploading a file requires a priviledged user
+ if ($file_deleted) {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
+ my $query = {
+ action => 'delete',
+ title => $path,
+ reason => $summary
+ };
+ if (!$mediawiki->edit($query)) {
+ print {*STDERR} "Failed to delete file on remote wiki\n";
+ print {*STDERR} "Check your permissions on the remote site. Error code:\n";
+ print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+ exit 1;
+ }
+ } else {
+ # Don't let perl try to interpret file content as UTF-8 => use "raw"
+ my $content = run_git("cat-file blob ${new_sha1}", 'raw');
+ if ($content ne EMPTY) {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
+ $mediawiki->{config}->{upload_url} =
+ "${url}/index.php/Special:Upload";
+ $mediawiki->edit({
+ action => 'upload',
+ filename => $complete_file_name,
+ comment => $summary,
+ file => [undef,
+ $complete_file_name,
+ Content => $content],
+ ignorewarnings => 1,
+ }, {
+ skip_encoding => 1
+ } ) || die $mediawiki->{error}->{code} . ':'
+ . $mediawiki->{error}->{details} . "\n";
+ my $last_file_page = $mediawiki->get_page({title => $path});
+ $newrevid = $last_file_page->{revid};
+ print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
+ } else {
+ print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
+ }
+ }
+ return $newrevid;
+}
+
+sub mw_push_file {
+ my $diff_info = shift;
+ # $diff_info contains a string in this format:
+ # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
+ my @diff_info_split = split(/[ \t]/, $diff_info);
+
+ # Filename, including .mw extension
+ my $complete_file_name = shift;
+ # Commit message
+ my $summary = shift;
+ # MediaWiki revision number. Keep the previous one by default,
+ # in case there's no edit to perform.
+ my $oldrevid = shift;
+ my $newrevid;
+
+ if ($summary eq EMPTY_MESSAGE) {
+ $summary = EMPTY;
+ }
+
+ my $new_sha1 = $diff_info_split[3];
+ my $old_sha1 = $diff_info_split[2];
+ my $page_created = ($old_sha1 eq NULL_SHA1);
+ my $page_deleted = ($new_sha1 eq NULL_SHA1);
+ $complete_file_name = clean_filename($complete_file_name);
+
+ my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
+ if (!defined($extension)) {
+ $extension = EMPTY;
+ }
+ if ($extension eq 'mw') {
+ my $ns = get_mw_namespace_id_for_page($complete_file_name);
+ if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
+ print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
+ return ($oldrevid, 'ok');
+ }
+ my $file_content;
+ if ($page_deleted) {
+ # Deleting a page usually requires
+ # special privileges. A common
+ # convention is to replace the page
+ # with this content instead:
+ $file_content = DELETED_CONTENT;
+ } else {
+ $file_content = run_git("cat-file blob ${new_sha1}");
+ }
+
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+ my $result = $mediawiki->edit( {
+ action => 'edit',
+ summary => $summary,
+ title => $title,
+ basetimestamp => $basetimestamps{$oldrevid},
+ text => mediawiki_clean($file_content, $page_created),
+ }, {
+ skip_encoding => 1 # Helps with names with accentuated characters
+ });
+ if (!$result) {
+ if ($mediawiki->{error}->{code} == 3) {
+ # edit conflicts, considered as non-fast-forward
+ print {*STDERR} 'Warning: Error ' .
+ $mediawiki->{error}->{code} .
+ ' from mediawiki: ' . $mediawiki->{error}->{details} .
+ ".\n";
+ return ($oldrevid, 'non-fast-forward');
+ } else {
+ # Other errors. Shouldn't happen => just die()
+ die 'Fatal: Error ' .
+ $mediawiki->{error}->{code} .
+ ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
+ }
+ }
+ $newrevid = $result->{edit}->{newrevid};
+ print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
+ } elsif ($export_media) {
+ $newrevid = mw_upload_file($complete_file_name, $new_sha1,
+ $extension, $page_deleted,
+ $summary);
+ } else {
+ print {*STDERR} "Ignoring media file ${title}\n";
+ }
+ $newrevid = ($newrevid or $oldrevid);
+ return ($newrevid, 'ok');
+}
+
+sub mw_push {
+ # multiple push statements can follow each other
+ my @refsspecs = (shift, get_more_refs('push'));
+ my $pushed;
+ for my $refspec (@refsspecs) {
+ my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
+ or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
+ if ($force) {
+ print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
+ }
+ if ($local eq EMPTY) {
+ print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} cannot delete\n";
+ next;
+ }
+ if ($remote ne 'refs/heads/master') {
+ print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} only master allowed\n";
+ next;
+ }
+ if (mw_push_revision($local, $remote)) {
+ $pushed = 1;
+ }
+ }
+
+ # Notify Git that the push is done
+ print {*STDOUT} "\n";
+
+ if ($pushed && $dumb_push) {
+ print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
+ print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
+ print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
+ print {*STDERR} "\n";
+ print {*STDERR} " git pull --rebase\n";
+ print {*STDERR} "\n";
+ }
+ return;
+}
+
+sub mw_push_revision {
+ my $local = shift;
+ my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
+ my $last_local_revid = get_last_local_revision();
+ print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
+ my $last_remote_revid = get_last_remote_revision();
+ my $mw_revision = $last_remote_revid;
+
+ # Get sha1 of commit pointed by local HEAD
+ my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
+ chomp($HEAD_sha1);
+ # Get sha1 of commit pointed by remotes/$remotename/master
+ my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
+ chomp($remoteorigin_sha1);
+
+ if ($last_local_revid > 0 &&
+ $last_local_revid < $last_remote_revid) {
+ return error_non_fast_forward($remote);
+ }
+
+ if ($HEAD_sha1 eq $remoteorigin_sha1) {
+ # nothing to push
+ return 0;
+ }
+
+ # Get every commit in between HEAD and refs/remotes/origin/master,
+ # including HEAD and refs/remotes/origin/master
+ my @commit_pairs = ();
+ if ($last_local_revid > 0) {
+ my $parsed_sha1 = $remoteorigin_sha1;
+ # Find a path from last MediaWiki commit to pushed commit
+ print {*STDERR} "Computing path from local to remote ...\n";
+ my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
+ my %local_ancestry;
+ foreach my $line (@local_ancestry) {
+ if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
+ foreach my $parent (split(/ /, $parents)) {
+ $local_ancestry{$parent} = $child;
+ }
+ } elsif (!$line =~ /^([a-f0-9]+)/) {
+ die "Unexpected output from git rev-list: ${line}\n";
+ }
+ }
+ while ($parsed_sha1 ne $HEAD_sha1) {
+ my $child = $local_ancestry{$parsed_sha1};
+ if (!$child) {
+ print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
+ return error_non_fast_forward($remote);
+ }
+ push(@commit_pairs, [$parsed_sha1, $child]);
+ $parsed_sha1 = $child;
+ }
+ } else {
+ # No remote mediawiki revision. Export the whole
+ # history (linearized with --first-parent)
+ print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
+ my $history = run_git("rev-list --first-parent --children ${local}");
+ my @history = split(/\n/, $history);
+ @history = @history[1..$#history];
+ foreach my $line (reverse @history) {
+ my @commit_info_split = split(/[ \n]/, $line);
+ push(@commit_pairs, \@commit_info_split);
+ }
+ }
+
+ foreach my $commit_info_split (@commit_pairs) {
+ my $sha1_child = @{$commit_info_split}[0];
+ my $sha1_commit = @{$commit_info_split}[1];
+ my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
+ # TODO: we could detect rename, and encode them with a #redirect on the wiki.
+ # TODO: for now, it's just a delete+add
+ my @diff_info_list = split(/\0/, $diff_infos);
+ # Keep the subject line of the commit message as mediawiki comment for the revision
+ my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
+ chomp($commit_msg);
+ # Push every blob
+ while (@diff_info_list) {
+ my $status;
+ # git diff-tree -z gives an output like
+ # <metadata>\0<filename1>\0
+ # <metadata>\0<filename2>\0
+ # and we've split on \0.
+ my $info = shift(@diff_info_list);
+ my $file = shift(@diff_info_list);
+ ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
+ if ($status eq 'non-fast-forward') {
+ # we may already have sent part of the
+ # commit to MediaWiki, but it's too
+ # late to cancel it. Stop the push in
+ # the middle, but still give an
+ # accurate error message.
+ return error_non_fast_forward($remote);
+ }
+ if ($status ne 'ok') {
+ die("Unknown error from mw_push_file()\n");
+ }
+ }
+ if (!$dumb_push) {
+ run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
+ }
+ }
+
+ print {*STDOUT} "ok ${remote}\n";
+ return 1;
+}
+
+sub get_allowed_file_extensions {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+ my $query = {
+ action => 'query',
+ meta => 'siteinfo',
+ siprop => 'fileextensions'
+ };
+ my $result = $mediawiki->api($query);
+ my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
+ my %hashFile = map { $_ => 1 } @file_extensions;
+
+ return %hashFile;
+}
+
+# In memory cache for MediaWiki namespace ids.
+my %namespace_id;
+
+# Namespaces whose id is cached in the configuration file
+# (to avoid duplicates)
+my %cached_mw_namespace_id;
+
+# Return MediaWiki id for a canonical namespace name.
+# Ex.: "File", "Project".
+sub get_mw_namespace_id {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
+ my $name = shift;
+
+ if (!exists $namespace_id{$name}) {
+ # Look at configuration file, if the record for that namespace is
+ # already cached. Namespaces are stored in form:
+ # "Name_of_namespace:Id_namespace", ex.: "File:6".
+ my @temp = split(/\n/,
+ run_git("config --get-all remote.${remotename}.namespaceCache"));
+ chomp(@temp);
+ foreach my $ns (@temp) {
+ my ($n, $id) = split(/:/, $ns);
+ if ($id eq 'notANameSpace') {
+ $namespace_id{$n} = {is_namespace => 0};
+ } else {
+ $namespace_id{$n} = {is_namespace => 1, id => $id};
+ }
+ $cached_mw_namespace_id{$n} = 1;
+ }
+ }
+
+ if (!exists $namespace_id{$name}) {
+ print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
+ # NS not found => get namespace id from MW and store it in
+ # configuration file.
+ my $query = {
+ action => 'query',
+ meta => 'siteinfo',
+ siprop => 'namespaces'
+ };
+ my $result = $mediawiki->api($query);
+
+ while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
+ if (defined($ns->{id}) && defined($ns->{canonical})) {
+ $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
+ if ($ns->{'*'}) {
+ # alias (e.g. french Fichier: as alias for canonical File:)
+ $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
+ }
+ }
+ }
+ }
+
+ my $ns = $namespace_id{$name};
+ my $id;
+
+ if (!defined $ns) {
+ print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
+ $ns = {is_namespace => 0};
+ $namespace_id{$name} = $ns;
+ }
+
+ if ($ns->{is_namespace}) {
+ $id = $ns->{id};
+ }
+
+ # Store "notANameSpace" as special value for inexisting namespaces
+ my $store_id = ($id || 'notANameSpace');
+
+ # Store explicitly requested namespaces on disk
+ if (!exists $cached_mw_namespace_id{$name}) {
+ run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
+ $cached_mw_namespace_id{$name} = 1;
+ }
+ return $id;
+}
+
+sub get_mw_namespace_id_for_page {
+ my $namespace = shift;
+ if ($namespace =~ /^([^:]*):/) {
+ return get_mw_namespace_id($namespace);
+ } else {
+ return;
+ }
+}
diff --git a/contrib/mw-to-git/git-remote-mediawiki.txt b/contrib/mw-to-git/git-remote-mediawiki.txt
new file mode 100644
index 0000000000..23b7ef9f62
--- /dev/null
+++ b/contrib/mw-to-git/git-remote-mediawiki.txt
@@ -0,0 +1,7 @@
+Git-Mediawiki is a project which aims the creation of a gate
+between git and mediawiki, allowing git users to push and pull
+objects from mediawiki just as one would do with a classic git
+repository thanks to remote-helpers.
+
+For more information, visit the wiki at
+https://github.com/moy/Git-Mediawiki/wiki
diff --git a/contrib/mw-to-git/t/.gitignore b/contrib/mw-to-git/t/.gitignore
new file mode 100644
index 0000000000..a7a40b4964
--- /dev/null
+++ b/contrib/mw-to-git/t/.gitignore
@@ -0,0 +1,4 @@
+WEB/
+wiki/
+trash directory.t*/
+test-results/
diff --git a/contrib/mw-to-git/t/Makefile b/contrib/mw-to-git/t/Makefile
new file mode 100644
index 0000000000..f422203fa0
--- /dev/null
+++ b/contrib/mw-to-git/t/Makefile
@@ -0,0 +1,31 @@
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+## Test git-remote-mediawiki
+
+all: test
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+
+.PHONY: help test clean all
+
+help:
+ @echo 'Run "$(MAKE) test" to launch test scripts'
+ @echo 'Run "$(MAKE) clean" to remove trash folders'
+
+test:
+ @for t in $(T); do \
+ echo "$$t"; \
+ "./$$t" || exit 1; \
+ done
+
+clean:
+ $(RM) -r 'trash directory'.*
diff --git a/contrib/mw-to-git/t/README b/contrib/mw-to-git/t/README
new file mode 100644
index 0000000000..03f6ee5d72
--- /dev/null
+++ b/contrib/mw-to-git/t/README
@@ -0,0 +1,124 @@
+Tests for Mediawiki-to-Git
+==========================
+
+Introduction
+------------
+This manual describes how to install the git-remote-mediawiki test
+environment on a machine with git installed on it.
+
+Prerequisite
+------------
+
+In order to run this test environment correctly, you will need to
+install the following packages (Debian/Ubuntu names, may need to be
+adapted for another distribution):
+
+* lighttpd
+* php5
+* php5-cgi
+* php5-cli
+* php5-curl
+* php5-sqlite
+
+Principles and Technical Choices
+--------------------------------
+
+The test environment makes it easy to install and manipulate one or
+several MediaWiki instances. To allow developers to run the testsuite
+easily, the environment does not require root privilege (except to
+install the required packages if needed). It starts a webserver
+instance on the user's account (using lighttpd greatly helps for
+that), and does not need a separate database daemon (thanks to the use
+of sqlite).
+
+Run the test environment
+------------------------
+
+Install a new wiki
+~~~~~~~~~~~~~~~~~~
+
+Once you have all the prerequisite, you need to install a MediaWiki
+instance on your machine. If you already have one, it is still
+strongly recommended to install one with the script provided. Here's
+how to work it:
+
+a. change directory to contrib/mw-to-git/t/
+b. if needed, edit test.config to choose your installation parameters
+c. run `./install-wiki.sh install`
+d. check on your favourite web browser if your wiki is correctly
+ installed.
+
+Remove an existing wiki
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Edit the file test.config to fit the wiki you want to delete, and then
+execute the command `./install-wiki.sh delete` from the
+contrib/mw-to-git/t directory.
+
+Run the existing tests
+~~~~~~~~~~~~~~~~~~~~~~
+
+The provided tests are currently in the `contrib/mw-to-git/t` directory.
+The files are all the t936[0-9]-*.sh shell scripts.
+
+a. Run all tests:
+To do so, run "make test" from the contrib/mw-to-git/ directory.
+
+b. Run a specific test:
+To run a given test <test_name>, run ./<test_name> from the
+contrib/mw-to-git/t directory.
+
+How to create new tests
+-----------------------
+
+Available functions
+~~~~~~~~~~~~~~~~~~~
+
+The test environment of git-remote-mediawiki provides some functions
+useful to test its behaviour. for more details about the functions'
+parameters, please refer to the `test-gitmw-lib.sh` and
+`test-gitmw.pl` files.
+
+** `test_check_wiki_precond`:
+Check if the tests must be skipped or not. Please use this function
+at the beginning of each new test file.
+
+** `wiki_getpage`:
+Fetch a given page from the wiki and puts its content in the
+directory in parameter.
+
+** `wiki_delete_page`:
+Delete a given page from the wiki.
+
+** `wiki_edit_page`:
+Create or modify a given page in the wiki. You can specify several
+parameters like a summary for the page edition, or add the page to a
+given category.
+See test-gitmw.pl for more details.
+
+** `wiki_getallpage`:
+Fetch all pages from the wiki into a given directory. The directory
+is created if it does not exists.
+
+** `test_diff_directories`:
+Compare the content of two directories. The content must be the same.
+Use this function to compare the content of a git directory and a wiki
+one created by wiki_getallpage.
+
+** `test_contains_N_files`:
+Check if the given directory contains a given number of file.
+
+** `wiki_page_exists`:
+Tests if a given page exists on the wiki.
+
+** `wiki_reset`:
+Reset the wiki, i.e. flush the database. Use this function at the
+beginning of each new test, except if the test re-uses the same wiki
+(and history) as the previous test.
+
+How to write a new test
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Please, follow the standards given by git. See git/t/README.
+New file should be named as t936[0-9]-*.sh.
+Be sure to reset your wiki regulary with the function `wiki_reset`.
diff --git a/contrib/mw-to-git/t/install-wiki.sh b/contrib/mw-to-git/t/install-wiki.sh
new file mode 100755
index 0000000000..c215213c4b
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+
+# This script installs or deletes a MediaWiki on your computer.
+# It requires a web server with PHP and SQLite running. In addition, if you
+# do not have MediaWiki sources on your computer, the option 'install'
+# downloads them for you.
+# Please set the CONFIGURATION VARIABLES in ./test-gitmw-lib.sh
+
+WIKI_TEST_DIR=$(cd "$(dirname "$0")" && pwd)
+
+if test -z "$WIKI_TEST_DIR"
+then
+ WIKI_TEST_DIR=.
+fi
+
+. "$WIKI_TEST_DIR"/test-gitmw-lib.sh
+usage () {
+ echo "usage: "
+ echo " ./install-wiki.sh <install | delete | --help>"
+ echo " install | -i : Install a wiki on your computer."
+ echo " delete | -d : Delete the wiki and all its pages and "
+ echo " content."
+ echo " start | -s : Start the previously configured lighttpd daemon"
+ echo " stop : Stop lighttpd daemon."
+}
+
+
+# Argument: install, delete, --help | -h
+case "$1" in
+ "install" | "-i")
+ wiki_install
+ exit 0
+ ;;
+ "delete" | "-d")
+ wiki_delete
+ exit 0
+ ;;
+ "start" | "-s")
+ start_lighttpd
+ exit
+ ;;
+ "stop")
+ stop_lighttpd
+ exit
+ ;;
+ "--help" | "-h")
+ usage
+ exit 0
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+esac
diff --git a/contrib/mw-to-git/t/install-wiki/.gitignore b/contrib/mw-to-git/t/install-wiki/.gitignore
new file mode 100644
index 0000000000..b5a2a4408c
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki/.gitignore
@@ -0,0 +1 @@
+wikidb.sqlite
diff --git a/contrib/mw-to-git/t/install-wiki/LocalSettings.php b/contrib/mw-to-git/t/install-wiki/LocalSettings.php
new file mode 100644
index 0000000000..745e47e881
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki/LocalSettings.php
@@ -0,0 +1,129 @@
+<?php
+# This file was automatically generated by the MediaWiki 1.19.0
+# installer. If you make manual changes, please keep track in case you
+# need to recreate them later.
+#
+# See includes/DefaultSettings.php for all configurable settings
+# and their default values, but don't forget to make changes in _this_
+# file, not there.
+#
+# Further documentation for configuration settings may be found at:
+# http://www.mediawiki.org/wiki/Manual:Configuration_settings
+
+# Protect against web entry
+if ( !defined( 'MEDIAWIKI' ) ) {
+ exit;
+}
+
+## Uncomment this to disable output compression
+# $wgDisableOutputCompression = true;
+
+$wgSitename = "Git-MediaWiki-Test";
+$wgMetaNamespace = "Git-MediaWiki-Test";
+
+## The URL base path to the directory containing the wiki;
+## defaults for all runtime URL paths are based off of this.
+## For more information on customizing the URLs please see:
+## http://www.mediawiki.org/wiki/Manual:Short_URL
+$wgScriptPath = "@WG_SCRIPT_PATH@";
+$wgScriptExtension = ".php";
+
+## The protocol and server name to use in fully-qualified URLs
+$wgServer = "@WG_SERVER@";
+
+## The relative URL path to the skins directory
+$wgStylePath = "$wgScriptPath/skins";
+
+## The relative URL path to the logo. Make sure you change this from the default,
+## or else you'll overwrite your logo when you upgrade!
+$wgLogo = "$wgStylePath/common/images/wiki.png";
+
+## UPO means: this is also a user preference option
+
+$wgEnableEmail = true;
+$wgEnableUserEmail = true; # UPO
+
+$wgEmergencyContact = "apache@localhost";
+$wgPasswordSender = "apache@localhost";
+
+$wgEnotifUserTalk = false; # UPO
+$wgEnotifWatchlist = false; # UPO
+$wgEmailAuthentication = true;
+
+## Database settings
+$wgDBtype = "sqlite";
+$wgDBserver = "";
+$wgDBname = "@WG_SQLITE_DATAFILE@";
+$wgDBuser = "";
+$wgDBpassword = "";
+
+# SQLite-specific settings
+$wgSQLiteDataDir = "@WG_SQLITE_DATADIR@";
+
+
+## Shared memory settings
+$wgMainCacheType = CACHE_NONE;
+$wgMemCachedServers = array();
+
+## To enable image uploads, make sure the 'images' directory
+## is writable, then set this to true:
+$wgEnableUploads = true;
+$wgUseImageMagick = true;
+$wgImageMagickConvertCommand ="@CONVERT@";
+$wgFileExtensions[] = 'txt';
+
+# InstantCommons allows wiki to use images from http://commons.wikimedia.org
+$wgUseInstantCommons = false;
+
+## If you use ImageMagick (or any other shell command) on a
+## Linux server, this will need to be set to the name of an
+## available UTF-8 locale
+$wgShellLocale = "en_US.utf8";
+
+## If you want to use image uploads under safe mode,
+## create the directories images/archive, images/thumb and
+## images/temp, and make them all writable. Then uncomment
+## this, if it's not already uncommented:
+#$wgHashedUploadDirectory = false;
+
+## Set $wgCacheDirectory to a writable directory on the web server
+## to make your wiki go slightly faster. The directory should not
+## be publicly accessible from the web.
+#$wgCacheDirectory = "$IP/cache";
+
+# Site language code, should be one of the list in ./languages/Names.php
+$wgLanguageCode = "en";
+
+$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7";
+#$wgSecretKey = "@SECRETKEY@";
+
+
+# Site upgrade key. Must be set to a string (default provided) to turn on the
+# web installer while LocalSettings.php is in place
+$wgUpgradeKey = "ddae7dc87cd0a645";
+
+## Default skin: you can change the default skin. Use the internal symbolic
+## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector':
+$wgDefaultSkin = "vector";
+
+## For attaching licensing metadata to pages, and displaying an
+## appropriate copyright notice / icon. GNU Free Documentation
+## License and Creative Commons licenses are supported so far.
+$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright
+$wgRightsUrl = "";
+$wgRightsText = "";
+$wgRightsIcon = "";
+
+# Path to the GNU diff3 utility. Used for conflict resolution.
+$wgDiff3 = "/usr/bin/diff3";
+
+# Query string length limit for ResourceLoader. You should only set this if
+# your web server has a query string length limit (then set it to that limit),
+# or if you have suhosin.get.max_value_length set in php.ini (then set it to
+# that value)
+$wgResourceLoaderMaxQueryLength = -1;
+
+
+
+# End of automatically generated settings.
+# Add more configuration options below.
diff --git a/contrib/mw-to-git/t/install-wiki/db_install.php b/contrib/mw-to-git/t/install-wiki/db_install.php
new file mode 100644
index 0000000000..0f3f4e018a
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki/db_install.php
@@ -0,0 +1,120 @@
+<?php
+/**
+ * This script generates a SQLite database for a MediaWiki version 1.19.0
+ * You must specify the login of the admin (argument 1) and its
+ * password (argument 2) and the folder where the database file
+ * is located (absolute path in argument 3).
+ * It is used by the script install-wiki.sh in order to make easy the
+ * installation of a MediaWiki.
+ *
+ * In order to generate a SQLite database file, MediaWiki ask the user
+ * to submit some forms in its web browser. This script simulates this
+ * behavior though the functions <get> and <submit>
+ *
+ */
+$argc = $_SERVER['argc'];
+$argv = $_SERVER['argv'];
+
+$login = $argv[2];
+$pass = $argv[3];
+$tmp = $argv[4];
+$port = $argv[5];
+
+$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php';
+$db_dir = urlencode($tmp);
+$tmp_cookie = tempnam($tmp, "COOKIE_");
+/*
+ * Fetchs a page with cURL.
+ */
+function get($page_name = "") {
+ $curl = curl_init();
+ $page_name_add = "";
+ if ($page_name != "") {
+ $page_name_add = '?page='.$page_name;
+ }
+ $url = $GLOBALS['url'].$page_name_add;
+ $tmp_cookie = $GLOBALS['tmp_cookie'];
+ curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+ curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+ curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_HEADER, true);
+ curl_setopt($curl, CURLOPT_URL, $url);
+
+ $page = curl_exec($curl);
+ if (!$page) {
+ die("Could not get page: $url\n");
+ }
+ curl_close($curl);
+ return $page;
+}
+
+/*
+ * Submits a form with cURL.
+ */
+function submit($page_name, $option = "") {
+ $curl = curl_init();
+ $datapost = 'submit-continue=Continue+%E2%86%92';
+ if ($option != "") {
+ $datapost = $option.'&'.$datapost;
+ }
+ $url = $GLOBALS['url'].'?page='.$page_name;
+ $tmp_cookie = $GLOBALS['tmp_cookie'];
+ curl_setopt($curl, CURLOPT_URL, $url);
+ curl_setopt($curl, CURLOPT_POST, true);
+ curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+ curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost);
+ curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+ curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+
+ $page = curl_exec($curl);
+ if (!$page) {
+ die("Could not get page: $url\n");
+ }
+ curl_close($curl);
+ return "$page";
+}
+
+/*
+ * Here starts this script: simulates the behavior of the user
+ * submitting forms to generates the database file.
+ * Note this simulation was made for the MediaWiki version 1.19.0,
+ * we can't assume it works with other versions.
+ *
+ */
+
+$page = get();
+if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/',
+ $page, $matches)) {
+ echo "Unexpected content for page downloaded:\n";
+ echo "$page";
+ die;
+};
+$timestamp = $matches[1];
+$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en";
+$page = submit('Language', $language);
+
+submit('Welcome');
+
+$db_config = 'DBType=sqlite';
+$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir;
+$db_config = $db_config.'&sqlite_wgDBname='.$argv[1];
+submit('DBConnect', $db_config);
+
+$wiki_config = 'config_wgSitename=TEST';
+$wiki_config = $wiki_config.'&config__NamespaceType=site-name';
+$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki';
+$wiki_config = $wiki_config.'&config__AdminName='.$login;
+
+$wiki_config = $wiki_config.'&config__AdminPassword='.$pass;
+$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass;
+
+$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org';
+$wiki_config = $wiki_config.'&config__SkipOptional=skip';
+submit('Name', $wiki_config);
+submit('Install');
+submit('Install');
+
+unlink($tmp_cookie);
+?>
diff --git a/contrib/mw-to-git/t/push-pull-tests.sh b/contrib/mw-to-git/t/push-pull-tests.sh
new file mode 100644
index 0000000000..9da2dc5ff0
--- /dev/null
+++ b/contrib/mw-to-git/t/push-pull-tests.sh
@@ -0,0 +1,144 @@
+test_push_pull () {
+
+ test_expect_success 'Git pull works after adding a new wiki page' '
+ wiki_reset &&
+
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ wiki_editpage Foo "page created after the git clone" false &&
+
+ (
+ cd mw_dir_1 &&
+ git pull
+ ) &&
+
+ wiki_getallpage ref_page_1 &&
+ test_diff_directories mw_dir_1 ref_page_1
+ '
+
+ test_expect_success 'Git pull works after editing a wiki page' '
+ wiki_reset &&
+
+ wiki_editpage Foo "page created before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ wiki_editpage Foo "new line added on the wiki" true &&
+
+ (
+ cd mw_dir_2 &&
+ git pull
+ ) &&
+
+ wiki_getallpage ref_page_2 &&
+ test_diff_directories mw_dir_2 ref_page_2
+ '
+
+ test_expect_success 'git pull works on conflict handled by auto-merge' '
+ wiki_reset &&
+
+ wiki_editpage Foo "1 init
+3
+5
+ " false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+
+ wiki_editpage Foo "1 init
+2 content added on wiki after clone
+3
+5
+ " false &&
+
+ (
+ cd mw_dir_3 &&
+ echo "1 init
+3
+4 content added on git after clone
+5
+" >Foo.mw &&
+ git commit -am "conflicting change on foo" &&
+ git pull &&
+ git push
+ )
+ '
+
+ test_expect_success 'Git push works after adding a file .mw' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ wiki_getallpage ref_page_4 &&
+ (
+ cd mw_dir_4 &&
+ test_path_is_missing Foo.mw &&
+ touch Foo.mw &&
+ echo "hello world" >>Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "Foo" &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_4 &&
+ test_diff_directories mw_dir_4 ref_page_4
+ '
+
+ test_expect_success 'Git push works after editing a file .mw' '
+ wiki_reset &&
+ wiki_editpage "Foo" "page created before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+
+ (
+ cd mw_dir_5 &&
+ echo "new line added in the file Foo.mw" >>Foo.mw &&
+ git commit -am "edit file Foo.mw" &&
+ git push
+ ) &&
+
+ wiki_getallpage ref_page_5 &&
+ test_diff_directories mw_dir_5 ref_page_5
+ '
+
+ test_expect_failure 'Git push works after deleting a file' '
+ wiki_reset &&
+ wiki_editpage Foo "wiki page added before git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+
+ (
+ cd mw_dir_6 &&
+ git rm Foo.mw &&
+ git commit -am "page Foo.mw deleted" &&
+ git push
+ ) &&
+
+ test_must_fail wiki_page_exist Foo
+ '
+
+ test_expect_success 'Merge conflict expected and solving it' '
+ wiki_reset &&
+
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ wiki_editpage Foo "1 conflict
+3 wiki
+4" false &&
+
+ (
+ cd mw_dir_7 &&
+ echo "1 conflict
+2 git
+4" >Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "conflict created" &&
+ test_must_fail git pull &&
+ "$PERL_PATH" -pi -e "s/[<=>].*//g" Foo.mw &&
+ git commit -am "merge conflict solved" &&
+ git push
+ )
+ '
+
+ test_expect_failure 'git pull works after deleting a wiki page' '
+ wiki_reset &&
+ wiki_editpage Foo "wiki page added before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+
+ wiki_delete_page Foo &&
+ (
+ cd mw_dir_8 &&
+ git pull &&
+ test_path_is_missing Foo.mw
+ )
+ '
+}
diff --git a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
new file mode 100755
index 0000000000..22f069db48
--- /dev/null
+++ b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
@@ -0,0 +1,257 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+
+test_description='Test the Git Mediawiki remote helper: git clone'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone creates the expected git log with one file' '
+ wiki_reset &&
+ wiki_editpage foo "this is not important" false -c cat -s "this must be the same" &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ (
+ cd mw_dir_1 &&
+ git log --format=%s HEAD^..HEAD >log.tmp
+ ) &&
+ echo "this must be the same" >msg.tmp &&
+ diff -b mw_dir_1/log.tmp msg.tmp
+'
+
+
+test_expect_success 'Git clone creates the expected git log with multiple files' '
+ wiki_reset &&
+ wiki_editpage daddy "this is not important" false -s="this must be the same" &&
+ wiki_editpage daddy "neither is this" true -s="this must also be the same" &&
+ wiki_editpage daddy "neither is this" true -s="same same same" &&
+ wiki_editpage dj "dont care" false -s="identical" &&
+ wiki_editpage dj "dont care either" true -s="identical too" &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ (
+ cd mw_dir_2 &&
+ git log --format=%s Daddy.mw >logDaddy.tmp &&
+ git log --format=%s Dj.mw >logDj.tmp
+ ) &&
+ echo "same same same" >msgDaddy.tmp &&
+ echo "this must also be the same" >>msgDaddy.tmp &&
+ echo "this must be the same" >>msgDaddy.tmp &&
+ echo "identical too" >msgDj.tmp &&
+ echo "identical" >>msgDj.tmp &&
+ diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp &&
+ diff -b mw_dir_2/logDj.tmp msgDj.tmp
+'
+
+
+test_expect_success 'Git clone creates only Main_Page.mw with an empty wiki' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+ test_contains_N_files mw_dir_3 1 &&
+ test_path_is_file mw_dir_3/Main_Page.mw
+'
+
+test_expect_success 'Git clone does not fetch a deleted page' '
+ wiki_reset &&
+ wiki_editpage foo "this page must be deleted before the clone" false &&
+ wiki_delete_page foo &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ test_contains_N_files mw_dir_4 1 &&
+ test_path_is_file mw_dir_4/Main_Page.mw &&
+ test_path_is_missing mw_dir_4/Foo.mw
+'
+
+test_expect_success 'Git clone works with page added' '
+ wiki_reset &&
+ wiki_editpage foo " I will be cloned" false &&
+ wiki_editpage bar "I will be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+ wiki_getallpage ref_page_5 &&
+ test_diff_directories mw_dir_5 ref_page_5 &&
+ wiki_delete_page foo &&
+ wiki_delete_page bar
+'
+
+test_expect_success 'Git clone works with an edited page ' '
+ wiki_reset &&
+ wiki_editpage foo "this page will be edited" \
+ false -s "first edition of page foo"&&
+ wiki_editpage foo "this page has been edited and must be on the clone " true &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+ test_path_is_file mw_dir_6/Foo.mw &&
+ test_path_is_file mw_dir_6/Main_Page.mw &&
+ wiki_getallpage mw_dir_6/page_ref_6 &&
+ test_diff_directories mw_dir_6 mw_dir_6/page_ref_6 &&
+ (
+ cd mw_dir_6 &&
+ git log --format=%s HEAD^ Foo.mw > ../Foo.log
+ ) &&
+ echo "first edition of page foo" > FooExpect.log &&
+ diff FooExpect.log Foo.log
+'
+
+
+test_expect_success 'Git clone works with several pages and some deleted ' '
+ wiki_reset &&
+ wiki_editpage foo "this page will not be deleted" false &&
+ wiki_editpage bar "I must not be erased" false &&
+ wiki_editpage namnam "I will not be there at the end" false &&
+ wiki_editpage nyancat "nyan nyan nyan delete me" false &&
+ wiki_delete_page namnam &&
+ wiki_delete_page nyancat &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ test_path_is_file mw_dir_7/Foo.mw &&
+ test_path_is_file mw_dir_7/Bar.mw &&
+ test_path_is_missing mw_dir_7/Namnam.mw &&
+ test_path_is_missing mw_dir_7/Nyancat.mw &&
+ wiki_getallpage mw_dir_7/page_ref_7 &&
+ test_diff_directories mw_dir_7 mw_dir_7/page_ref_7
+'
+
+
+test_expect_success 'Git clone works with one specific page cloned ' '
+ wiki_reset &&
+ wiki_editpage foo "I will not be cloned" false &&
+ wiki_editpage bar "Do not clone me" false &&
+ wiki_editpage namnam "I will be cloned :)" false -s="this log must stay" &&
+ wiki_editpage nyancat "nyan nyan nyan you cant clone me" false &&
+ git clone -c remote.origin.pages=namnam \
+ mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+ test_contains_N_files mw_dir_8 1 &&
+ test_path_is_file mw_dir_8/Namnam.mw &&
+ test_path_is_missing mw_dir_8/Main_Page.mw &&
+ (
+ cd mw_dir_8 &&
+ echo "this log must stay" >msg.tmp &&
+ git log --format=%s >log.tmp &&
+ diff -b msg.tmp log.tmp
+ ) &&
+ wiki_check_content mw_dir_8/Namnam.mw Namnam
+'
+
+test_expect_success 'Git clone works with multiple specific page cloned ' '
+ wiki_reset &&
+ wiki_editpage foo "I will be there" false &&
+ wiki_editpage bar "I will not disapear" false &&
+ wiki_editpage namnam "I be erased" false &&
+ wiki_editpage nyancat "nyan nyan nyan you will not erase me" false &&
+ wiki_delete_page namnam &&
+ git clone -c remote.origin.pages="foo bar nyancat namnam" \
+ mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+ test_contains_N_files mw_dir_9 3 &&
+ test_path_is_missing mw_dir_9/Namnam.mw &&
+ test_path_is_file mw_dir_9/Foo.mw &&
+ test_path_is_file mw_dir_9/Nyancat.mw &&
+ test_path_is_file mw_dir_9/Bar.mw &&
+ wiki_check_content mw_dir_9/Foo.mw Foo &&
+ wiki_check_content mw_dir_9/Bar.mw Bar &&
+ wiki_check_content mw_dir_9/Nyancat.mw Nyancat
+'
+
+test_expect_success 'Mediawiki-clone of several specific pages on wiki' '
+ wiki_reset &&
+ wiki_editpage foo "foo 1" false &&
+ wiki_editpage bar "bar 1" false &&
+ wiki_editpage dummy "dummy 1" false &&
+ wiki_editpage cloned_1 "cloned_1 1" false &&
+ wiki_editpage cloned_2 "cloned_2 2" false &&
+ wiki_editpage cloned_3 "cloned_3 3" false &&
+ mkdir -p ref_page_10 &&
+ wiki_getpage cloned_1 ref_page_10 &&
+ wiki_getpage cloned_2 ref_page_10 &&
+ wiki_getpage cloned_3 ref_page_10 &&
+ git clone -c remote.origin.pages="cloned_1 cloned_2 cloned_3" \
+ mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+ test_diff_directories mw_dir_10 ref_page_10
+'
+
+test_expect_success 'Git clone works with the shallow option' '
+ wiki_reset &&
+ wiki_editpage foo "1st revision, should be cloned" false &&
+ wiki_editpage bar "1st revision, should be cloned" false &&
+ wiki_editpage nyan "1st revision, should not be cloned" false &&
+ wiki_editpage nyan "2nd revision, should be cloned" false &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+ test_contains_N_files mw_dir_11 4 &&
+ test_path_is_file mw_dir_11/Nyan.mw &&
+ test_path_is_file mw_dir_11/Foo.mw &&
+ test_path_is_file mw_dir_11/Bar.mw &&
+ test_path_is_file mw_dir_11/Main_Page.mw &&
+ (
+ cd mw_dir_11 &&
+ test $(git log --oneline Nyan.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Foo.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Bar.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
+ ) &&
+ wiki_check_content mw_dir_11/Nyan.mw Nyan &&
+ wiki_check_content mw_dir_11/Foo.mw Foo &&
+ wiki_check_content mw_dir_11/Bar.mw Bar &&
+ wiki_check_content mw_dir_11/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Git clone works with the shallow option with a delete page' '
+ wiki_reset &&
+ wiki_editpage foo "1st revision, will be deleted" false &&
+ wiki_editpage bar "1st revision, should be cloned" false &&
+ wiki_editpage nyan "1st revision, should not be cloned" false &&
+ wiki_editpage nyan "2nd revision, should be cloned" false &&
+ wiki_delete_page foo &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+ test_contains_N_files mw_dir_12 3 &&
+ test_path_is_file mw_dir_12/Nyan.mw &&
+ test_path_is_missing mw_dir_12/Foo.mw &&
+ test_path_is_file mw_dir_12/Bar.mw &&
+ test_path_is_file mw_dir_12/Main_Page.mw &&
+ (
+ cd mw_dir_12 &&
+ test $(git log --oneline Nyan.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Bar.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
+ ) &&
+ wiki_check_content mw_dir_12/Nyan.mw Nyan &&
+ wiki_check_content mw_dir_12/Bar.mw Bar &&
+ wiki_check_content mw_dir_12/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Test of fetching a category' '
+ wiki_reset &&
+ wiki_editpage Foo "I will be cloned" false -c=Category &&
+ wiki_editpage Bar "Meet me on the repository" false -c=Category &&
+ wiki_editpage Dummy "I will not come" false &&
+ wiki_editpage BarWrong "I will stay online only" false -c=NotCategory &&
+ git clone -c remote.origin.categories="Category" \
+ mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+ wiki_getallpage ref_page_13 Category &&
+ test_diff_directories mw_dir_13 ref_page_13
+'
+
+test_expect_success 'Test of resistance to modification of category on wiki for clone' '
+ wiki_reset &&
+ wiki_editpage Tobedeleted "this page will be deleted" false -c=Catone &&
+ wiki_editpage Tobeedited "this page will be modified" false -c=Catone &&
+ wiki_editpage Normalone "this page wont be modified and will be on git" false -c=Catone &&
+ wiki_editpage Notconsidered "this page will not appear on local" false &&
+ wiki_editpage Othercategory "this page will not appear on local" false -c=Cattwo &&
+ wiki_editpage Tobeedited "this page have been modified" true -c=Catone &&
+ wiki_delete_page Tobedeleted
+ git clone -c remote.origin.categories="Catone" \
+ mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+ wiki_getallpage ref_page_14 Catone &&
+ test_diff_directories mw_dir_14 ref_page_14
+'
+
+test_done
diff --git a/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh b/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
new file mode 100755
index 0000000000..9ea201459b
--- /dev/null
+++ b/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_push_pull
+
+test_done
diff --git a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
new file mode 100755
index 0000000000..6b0dbdac4d
--- /dev/null
+++ b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
@@ -0,0 +1,347 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test git-mediawiki with special characters in filenames'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone works for a wiki with accents in the page names' '
+ wiki_reset &&
+ wiki_editpage féé "This page must be délétéd before clone" false &&
+ wiki_editpage kèè "This page must be deleted before clone" false &&
+ wiki_editpage hàà "This page must be deleted before clone" false &&
+ wiki_editpage kîî "This page must be deleted before clone" false &&
+ wiki_editpage foo "This page must be deleted before clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ wiki_getallpage ref_page_1 &&
+ test_diff_directories mw_dir_1 ref_page_1
+'
+
+
+test_expect_success 'Git pull works with a wiki with accents in the pages names' '
+ wiki_reset &&
+ wiki_editpage kîî "this page must be cloned" false &&
+ wiki_editpage foo "this page must be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ wiki_editpage éàîôû "This page must be pulled" false &&
+ (
+ cd mw_dir_2 &&
+ git pull
+ ) &&
+ wiki_getallpage ref_page_2 &&
+ test_diff_directories mw_dir_2 ref_page_2
+'
+
+
+test_expect_success 'Cloning a chosen page works with accents' '
+ wiki_reset &&
+ wiki_editpage kîî "this page must be cloned" false &&
+ git clone -c remote.origin.pages=kîî \
+ mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+ wiki_check_content mw_dir_3/Kîî.mw Kîî &&
+ test_path_is_file mw_dir_3/Kîî.mw &&
+ rm -rf mw_dir_3
+'
+
+
+test_expect_success 'The shallow option works with accents' '
+ wiki_reset &&
+ wiki_editpage néoà "1st revision, should not be cloned" false &&
+ wiki_editpage néoà "2nd revision, should be cloned" false &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ test_contains_N_files mw_dir_4 2 &&
+ test_path_is_file mw_dir_4/Néoà.mw &&
+ test_path_is_file mw_dir_4/Main_Page.mw &&
+ (
+ cd mw_dir_4 &&
+ test $(git log --oneline Néoà.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
+ ) &&
+ wiki_check_content mw_dir_4/Néoà.mw Néoà &&
+ wiki_check_content mw_dir_4/Main_Page.mw Main_Page
+'
+
+
+test_expect_success 'Cloning works when page name first letter has an accent' '
+ wiki_reset &&
+ wiki_editpage îî "this page must be cloned" false &&
+ git clone -c remote.origin.pages=îî \
+ mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+ test_path_is_file mw_dir_5/Îî.mw &&
+ wiki_check_content mw_dir_5/Îî.mw Îî
+'
+
+
+test_expect_success 'Git push works with a wiki with accents' '
+ wiki_reset &&
+ wiki_editpage féé "lots of accents : éèàÖ" false &&
+ wiki_editpage foo "this page must be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+ (
+ cd mw_dir_6 &&
+ echo "A wild Pîkächû appears on the wiki" >Pîkächû.mw &&
+ git add Pîkächû.mw &&
+ git commit -m "A new page appears" &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_6 &&
+ test_diff_directories mw_dir_6 ref_page_6
+'
+
+test_expect_success 'Git clone works with accentsand spaces' '
+ wiki_reset &&
+ wiki_editpage "é à î" "this page must be délété before the clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ wiki_getallpage ref_page_7 &&
+ test_diff_directories mw_dir_7 ref_page_7
+'
+
+test_expect_success 'character $ in page name (mw -> git)' '
+ wiki_reset &&
+ wiki_editpage file_\$_foo "expect to be called file_$_foo" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+ test_path_is_file mw_dir_8/File_\$_foo.mw &&
+ wiki_getallpage ref_page_8 &&
+ test_diff_directories mw_dir_8 ref_page_8
+'
+
+
+
+test_expect_success 'character $ in file name (git -> mw) ' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+ (
+ cd mw_dir_9 &&
+ echo "this file is called File_\$_foo.mw" >File_\$_foo.mw &&
+ git add . &&
+ git commit -am "file File_\$_foo.mw" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_9 &&
+ test_diff_directories mw_dir_9 ref_page_9
+'
+
+
+test_expect_failure 'capital at the beginning of file names' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+ (
+ cd mw_dir_10 &&
+ echo "my new file foo" >foo.mw &&
+ echo "my new file Foo... Finger crossed" >Foo.mw &&
+ git add . &&
+ git commit -am "file foo.mw" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_10 &&
+ test_diff_directories mw_dir_10 ref_page_10
+'
+
+
+test_expect_failure 'special character at the beginning of file name from mw to git' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+ wiki_editpage {char_1 "expect to be renamed {char_1" false &&
+ wiki_editpage [char_2 "expect to be renamed [char_2" false &&
+ (
+ cd mw_dir_11 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_11/{char_1 &&
+ test_path_is_file mw_dir_11/[char_2
+'
+
+test_expect_success 'Pull page with title containing ":" other than namespace separator' '
+ wiki_editpage Foo:Bar content false &&
+ (
+ cd mw_dir_11 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_11/Foo:Bar.mw
+'
+
+test_expect_success 'Push page with title containing ":" other than namespace separator' '
+ (
+ cd mw_dir_11 &&
+ echo content >NotANameSpace:Page.mw &&
+ git add NotANameSpace:Page.mw &&
+ git commit -m "add page with colon" &&
+ git push
+ ) &&
+ wiki_page_exist NotANameSpace:Page
+'
+
+test_expect_success 'test of correct formatting for file name from mw to git' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+ wiki_editpage char_%_7b_1 "expect to be renamed char{_1" false &&
+ wiki_editpage char_%_5b_2 "expect to be renamed char{_2" false &&
+ (
+ cd mw_dir_12 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_12/Char\{_1.mw &&
+ test_path_is_file mw_dir_12/Char\[_2.mw &&
+ wiki_getallpage ref_page_12 &&
+ mv ref_page_12/Char_%_7b_1.mw ref_page_12/Char\{_1.mw &&
+ mv ref_page_12/Char_%_5b_2.mw ref_page_12/Char\[_2.mw &&
+ test_diff_directories mw_dir_12 ref_page_12
+'
+
+
+test_expect_failure 'test of correct formatting for file name beginning with special character' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+ (
+ cd mw_dir_13 &&
+ echo "my new file {char_1" >\{char_1.mw &&
+ echo "my new file [char_2" >\[char_2.mw &&
+ git add . &&
+ git commit -am "committing some exotic file name..." &&
+ git push &&
+ git pull
+ ) &&
+ wiki_getallpage ref_page_13 &&
+ test_path_is_file ref_page_13/{char_1.mw &&
+ test_path_is_file ref_page_13/[char_2.mw &&
+ test_diff_directories mw_dir_13 ref_page_13
+'
+
+
+test_expect_success 'test of correct formatting for file name from git to mw' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+ (
+ cd mw_dir_14 &&
+ echo "my new file char{_1" >Char\{_1.mw &&
+ echo "my new file char[_2" >Char\[_2.mw &&
+ git add . &&
+ git commit -m "committing some exotic file name..." &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_14 &&
+ mv mw_dir_14/Char\{_1.mw mw_dir_14/Char_%_7b_1.mw &&
+ mv mw_dir_14/Char\[_2.mw mw_dir_14/Char_%_5b_2.mw &&
+ test_diff_directories mw_dir_14 ref_page_14
+'
+
+
+test_expect_success 'git clone with /' '
+ wiki_reset &&
+ wiki_editpage \/fo\/o "this is not important" false -c=Deleted &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+ test_path_is_file mw_dir_15/%2Ffo%2Fo.mw &&
+ wiki_check_content mw_dir_15/%2Ffo%2Fo.mw \/fo\/o
+'
+
+
+test_expect_success 'git push with /' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+ echo "I will be on the wiki" >mw_dir_16/%2Ffo%2Fo.mw &&
+ (
+ cd mw_dir_16 &&
+ git add %2Ffo%2Fo.mw &&
+ git commit -m " %2Ffo%2Fo added" &&
+ git push
+ ) &&
+ wiki_page_exist \/fo\/o &&
+ wiki_check_content mw_dir_16/%2Ffo%2Fo.mw \/fo\/o
+
+'
+
+
+test_expect_success 'git clone with \' '
+ wiki_reset &&
+ wiki_editpage \\ko\\o "this is not important" false -c=Deleted &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+ test_path_is_file mw_dir_17/\\ko\\o.mw &&
+ wiki_check_content mw_dir_17/\\ko\\o.mw \\ko\\o
+'
+
+
+test_expect_success 'git push with \' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_18 &&
+ echo "I will be on the wiki" >mw_dir_18/\\ko\\o.mw &&
+ (
+ cd mw_dir_18 &&
+ git add \\ko\\o.mw &&
+ git commit -m " \\ko\\o added" &&
+ git push
+ )&&
+ wiki_page_exist \\ko\\o &&
+ wiki_check_content mw_dir_18/\\ko\\o.mw \\ko\\o
+
+'
+
+test_expect_success 'git clone with \ in format control' '
+ wiki_reset &&
+ wiki_editpage \\no\\o "this is not important" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_19 &&
+ test_path_is_file mw_dir_19/\\no\\o.mw &&
+ wiki_check_content mw_dir_19/\\no\\o.mw \\no\\o
+'
+
+
+test_expect_success 'git push with \ in format control' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_20 &&
+ echo "I will be on the wiki" >mw_dir_20/\\fo\\o.mw &&
+ (
+ cd mw_dir_20 &&
+ git add \\fo\\o.mw &&
+ git commit -m " \\fo\\o added" &&
+ git push
+ )&&
+ wiki_page_exist \\fo\\o &&
+ wiki_check_content mw_dir_20/\\fo\\o.mw \\fo\\o
+
+'
+
+
+test_expect_success 'fast-import meta-characters in page name (mw -> git)' '
+ wiki_reset &&
+ wiki_editpage \"file\"_\\_foo "expect to be called \"file\"_\\_foo" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_21 &&
+ test_path_is_file mw_dir_21/\"file\"_\\_foo.mw &&
+ wiki_getallpage ref_page_21 &&
+ test_diff_directories mw_dir_21 ref_page_21
+'
+
+
+test_expect_success 'fast-import meta-characters in page name (git -> mw) ' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_22 &&
+ (
+ cd mw_dir_22 &&
+ echo "this file is called \"file\"_\\_foo.mw" >\"file\"_\\_foo &&
+ git add . &&
+ git commit -am "file \"file\"_\\_foo" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_22 &&
+ test_diff_directories mw_dir_22 ref_page_22
+'
+
+
+test_done
diff --git a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
new file mode 100755
index 0000000000..3ff3a09567
--- /dev/null
+++ b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
@@ -0,0 +1,217 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_git_reimport () {
+ git -c remote.origin.dumbPush=true push &&
+ git -c remote.origin.mediaImport=true pull --rebase
+}
+
+# Don't bother with permissions, be administrator by default
+test_expect_success 'setup config' '
+ git config --global remote.origin.mwLogin WikiAdmin &&
+ git config --global remote.origin.mwPassword AdminPass &&
+ test_might_fail git config --global --unset remote.origin.mediaImport
+'
+
+test_expect_success 'git push can upload media (File:) files' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a text file" &&
+ git push &&
+ "$PERL_PATH" -e "print STDOUT \"binary content: \".chr(255);" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a text file with binary content" &&
+ git push
+ )
+'
+
+test_expect_success 'git clone works on previously created wiki with media files' '
+ test_when_finished "rm -rf mw_dir mw_dir_clone" &&
+ git clone -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_clone &&
+ test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt &&
+ (cd mw_dir_clone && git checkout HEAD^) &&
+ (cd mw_dir && git checkout HEAD^) &&
+ test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
+'
+
+test_expect_success 'git push can upload media (File:) files containing valid UTF-8' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ (
+ cd mw_dir &&
+ "$PERL_PATH" -e "print STDOUT \"UTF-8 content: éèàéê€.\";" >Bar.txt &&
+ git add Bar.txt &&
+ git commit -m "add a text file with UTF-8 content" &&
+ git push
+ )
+'
+
+test_expect_success 'git clone works on previously created wiki with media files containing valid UTF-8' '
+ test_when_finished "rm -rf mw_dir mw_dir_clone" &&
+ git clone -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_clone &&
+ test_cmp mw_dir_clone/Bar.txt mw_dir/Bar.txt
+'
+
+test_expect_success 'git push & pull work with locally renamed media files' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "A File" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a file" &&
+ git mv Foo.txt Bar.txt &&
+ git commit -m "Rename a file" &&
+ test_git_reimport &&
+ echo "A File" >expect &&
+ test_cmp expect Bar.txt &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+test_expect_success 'git push can propagate local page deletion' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ test_path_is_missing Foo.mw &&
+ echo "hello world" >Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "Add the page Foo" &&
+ git push &&
+ rm -f Foo.mw &&
+ git commit -am "Delete the page Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.mw
+ )
+'
+
+test_expect_success 'git push can propagate local media file deletion' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "Add the text file Foo" &&
+ git rm Foo.txt &&
+ git commit -m "Delete the file Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+# test failure: the file is correctly uploaded, and then deleted but
+# as no page link to it, the import (which looks at page revisions)
+# doesn't notice the file deletion on the wiki. We fetch the list of
+# files from the wiki, but as the file is deleted, it doesn't appear.
+test_expect_failure 'git pull correctly imports media file deletion when no page link to it' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "Add the text file Foo" &&
+ git push &&
+ git rm Foo.txt &&
+ git commit -m "Delete the file Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+test_expect_success 'git push properly warns about insufficient permissions' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "A File" >foo.forbidden &&
+ git add foo.forbidden &&
+ git commit -m "add a file" &&
+ git push 2>actual &&
+ test_i18ngrep "foo.forbidden is not a permitted file" actual
+ )
+'
+
+test_expect_success 'setup a repository with media files' '
+ wiki_reset &&
+ wiki_editpage testpage "I am linking a file [[File:File.txt]]" false &&
+ echo "File content" >File.txt &&
+ wiki_upload_file File.txt &&
+ echo "Another file content" >AnotherFile.txt &&
+ wiki_upload_file AnotherFile.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=true' '
+ git clone -c remote.origin.pages=testpage \
+ -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+ test_when_finished "rm -rf mw_dir_15" &&
+ test_contains_N_files mw_dir_15 3 &&
+ test_path_is_file mw_dir_15/Testpage.mw &&
+ test_path_is_file mw_dir_15/File:File.txt.mw &&
+ test_path_is_file mw_dir_15/File.txt &&
+ test_path_is_missing mw_dir_15/Main_Page.mw &&
+ test_path_is_missing mw_dir_15/File:AnotherFile.txt.mw &&
+ test_path_is_missing mw_dir_15/AnothetFile.txt &&
+ wiki_check_content mw_dir_15/Testpage.mw Testpage &&
+ test_cmp mw_dir_15/File.txt File.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=false' '
+ test_when_finished "rm -rf mw_dir_16" &&
+ git clone -c remote.origin.pages=testpage \
+ mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+ test_contains_N_files mw_dir_16 1 &&
+ test_path_is_file mw_dir_16/Testpage.mw &&
+ test_path_is_missing mw_dir_16/File:File.txt.mw &&
+ test_path_is_missing mw_dir_16/File.txt &&
+ test_path_is_missing mw_dir_16/Main_Page.mw &&
+ wiki_check_content mw_dir_16/Testpage.mw Testpage
+'
+
+# should behave like mediaimport=false
+test_expect_success 'git clone works with one specific page cloned and mediaimport unset' '
+ test_when_finished "rm -fr mw_dir_17" &&
+ git clone -c remote.origin.pages=testpage \
+ mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+ test_contains_N_files mw_dir_17 1 &&
+ test_path_is_file mw_dir_17/Testpage.mw &&
+ test_path_is_missing mw_dir_17/File:File.txt.mw &&
+ test_path_is_missing mw_dir_17/File.txt &&
+ test_path_is_missing mw_dir_17/Main_Page.mw &&
+ wiki_check_content mw_dir_17/Testpage.mw Testpage
+'
+
+test_done
diff --git a/contrib/mw-to-git/t/t9364-pull-by-rev.sh b/contrib/mw-to-git/t/t9364-pull-by-rev.sh
new file mode 100755
index 0000000000..5c22457a0b
--- /dev/null
+++ b/contrib/mw-to-git/t/t9364-pull-by-rev.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+test_description='Test the Git Mediawiki remote helper: git pull by revision'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_expect_success 'configuration' '
+ git config --global mediawiki.fetchStrategy by_rev
+'
+
+test_push_pull
+
+test_done
diff --git a/contrib/mw-to-git/t/t9365-continuing-queries.sh b/contrib/mw-to-git/t/t9365-continuing-queries.sh
new file mode 100755
index 0000000000..016454749f
--- /dev/null
+++ b/contrib/mw-to-git/t/t9365-continuing-queries.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+test_description='Test the Git Mediawiki remote helper: queries w/ more than 500 results'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_expect_success 'creating page w/ >500 revisions' '
+ wiki_reset &&
+ for i in $(test_seq 501)
+ do
+ echo "creating revision $i" &&
+ wiki_editpage foo "revision $i<br/>" true
+ done
+'
+
+test_expect_success 'cloning page w/ >500 revisions' '
+ git clone mediawiki::'"$WIKI_URL"' mw_dir
+'
+
+test_done
diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh
new file mode 100755
index 0000000000..6546294f15
--- /dev/null
+++ b/contrib/mw-to-git/t/test-gitmw-lib.sh
@@ -0,0 +1,432 @@
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+#
+# CONFIGURATION VARIABLES
+# You might want to change these ones
+#
+
+. ./test.config
+
+WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME"
+CURR_DIR=$(pwd)
+TEST_OUTPUT_DIRECTORY=$(pwd)
+TEST_DIRECTORY="$CURR_DIR"/../../../t
+
+export TEST_OUTPUT_DIRECTORY TEST_DIRECTORY CURR_DIR
+
+if test "$LIGHTTPD" = "false" ; then
+ PORT=80
+else
+ WIKI_DIR_INST="$CURR_DIR/$WEB_WWW"
+fi
+
+wiki_upload_file () {
+ "$CURR_DIR"/test-gitmw.pl upload_file "$@"
+}
+
+wiki_getpage () {
+ "$CURR_DIR"/test-gitmw.pl get_page "$@"
+}
+
+wiki_delete_page () {
+ "$CURR_DIR"/test-gitmw.pl delete_page "$@"
+}
+
+wiki_editpage () {
+ "$CURR_DIR"/test-gitmw.pl edit_page "$@"
+}
+
+die () {
+ die_with_status 1 "$@"
+}
+
+die_with_status () {
+ status=$1
+ shift
+ echo >&2 "$*"
+ exit "$status"
+}
+
+
+# Check the preconditions to run git-remote-mediawiki's tests
+test_check_precond () {
+ if ! test_have_prereq PERL
+ then
+ skip_all='skipping gateway git-mw tests, perl not available'
+ test_done
+ fi
+
+ GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd "../.." && pwd)
+ PATH="$GIT_EXEC_PATH"'/bin-wrapper:'"$PATH"
+
+ if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
+ then
+ skip_all='skipping gateway git-mw tests, no mediawiki found'
+ test_done
+ fi
+}
+
+# test_diff_directories <dir_git> <dir_wiki>
+#
+# Compare the contents of directories <dir_git> and <dir_wiki> with diff
+# and errors if they do not match. The program will
+# not look into .git in the process.
+# Warning: the first argument MUST be the directory containing the git data
+test_diff_directories () {
+ rm -rf "$1_tmp"
+ mkdir -p "$1_tmp"
+ cp "$1"/*.mw "$1_tmp"
+ diff -r -b "$1_tmp" "$2"
+}
+
+# $1=<dir>
+# $2=<N>
+#
+# Check that <dir> contains exactly <N> files
+test_contains_N_files () {
+ if test $(ls -- "$1" | wc -l) -ne "$2"; then
+ echo "directory $1 should contain $2 files"
+ echo "it contains these files:"
+ ls "$1"
+ false
+ fi
+}
+
+
+# wiki_check_content <file_name> <page_name>
+#
+# Compares the contents of the file <file_name> and the wiki page
+# <page_name> and exits with error 1 if they do not match.
+wiki_check_content () {
+ mkdir -p wiki_tmp
+ wiki_getpage "$2" wiki_tmp
+ # replacement of forbidden character in file name
+ page_name=$(printf "%s\n" "$2" | sed -e "s/\//%2F/g")
+
+ diff -b "$1" wiki_tmp/"$page_name".mw
+ if test $? -ne 0
+ then
+ rm -rf wiki_tmp
+ error "ERROR: file $2 not found on wiki"
+ fi
+ rm -rf wiki_tmp
+}
+
+# wiki_page_exist <page_name>
+#
+# Check the existence of the page <page_name> on the wiki and exits
+# with error if it is absent from it.
+wiki_page_exist () {
+ mkdir -p wiki_tmp
+ wiki_getpage "$1" wiki_tmp
+ page_name=$(printf "%s\n" "$1" | sed "s/\//%2F/g")
+ if test -f wiki_tmp/"$page_name".mw ; then
+ rm -rf wiki_tmp
+ else
+ rm -rf wiki_tmp
+ error "test failed: file $1 not found on wiki"
+ fi
+}
+
+# wiki_getallpagename
+#
+# Fetch the name of each page on the wiki.
+wiki_getallpagename () {
+ "$CURR_DIR"/test-gitmw.pl getallpagename
+}
+
+# wiki_getallpagecategory <category>
+#
+# Fetch the name of each page belonging to <category> on the wiki.
+wiki_getallpagecategory () {
+ "$CURR_DIR"/test-gitmw.pl getallpagename "$@"
+}
+
+# wiki_getallpage <dest_dir> [<category>]
+#
+# Fetch all the pages from the wiki and place them in the directory
+# <dest_dir>.
+# If <category> is define, then wiki_getallpage fetch the pages included
+# in <category>.
+wiki_getallpage () {
+ if test -z "$2";
+ then
+ wiki_getallpagename
+ else
+ wiki_getallpagecategory "$2"
+ fi
+ mkdir -p "$1"
+ while read -r line; do
+ wiki_getpage "$line" $1;
+ done < all.txt
+}
+
+# ================= Install part =================
+
+error () {
+ echo "$@" >&2
+ exit 1
+}
+
+# config_lighttpd
+#
+# Create the configuration files and the folders necessary to start lighttpd.
+# Overwrite any existing file.
+config_lighttpd () {
+ mkdir -p $WEB
+ mkdir -p $WEB_TMP
+ mkdir -p $WEB_WWW
+ cat > $WEB/lighttpd.conf <<EOF
+ server.document-root = "$CURR_DIR/$WEB_WWW"
+ server.port = $PORT
+ server.pid-file = "$CURR_DIR/$WEB_TMP/pid"
+
+ server.modules = (
+ "mod_rewrite",
+ "mod_redirect",
+ "mod_access",
+ "mod_accesslog",
+ "mod_fastcgi"
+ )
+
+ index-file.names = ("index.php" , "index.html")
+
+ mimetype.assign = (
+ ".pdf" => "application/pdf",
+ ".sig" => "application/pgp-signature",
+ ".spl" => "application/futuresplash",
+ ".class" => "application/octet-stream",
+ ".ps" => "application/postscript",
+ ".torrent" => "application/x-bittorrent",
+ ".dvi" => "application/x-dvi",
+ ".gz" => "application/x-gzip",
+ ".pac" => "application/x-ns-proxy-autoconfig",
+ ".swf" => "application/x-shockwave-flash",
+ ".tar.gz" => "application/x-tgz",
+ ".tgz" => "application/x-tgz",
+ ".tar" => "application/x-tar",
+ ".zip" => "application/zip",
+ ".mp3" => "audio/mpeg",
+ ".m3u" => "audio/x-mpegurl",
+ ".wma" => "audio/x-ms-wma",
+ ".wax" => "audio/x-ms-wax",
+ ".ogg" => "application/ogg",
+ ".wav" => "audio/x-wav",
+ ".gif" => "image/gif",
+ ".jpg" => "image/jpeg",
+ ".jpeg" => "image/jpeg",
+ ".png" => "image/png",
+ ".xbm" => "image/x-xbitmap",
+ ".xpm" => "image/x-xpixmap",
+ ".xwd" => "image/x-xwindowdump",
+ ".css" => "text/css",
+ ".html" => "text/html",
+ ".htm" => "text/html",
+ ".js" => "text/javascript",
+ ".asc" => "text/plain",
+ ".c" => "text/plain",
+ ".cpp" => "text/plain",
+ ".log" => "text/plain",
+ ".conf" => "text/plain",
+ ".text" => "text/plain",
+ ".txt" => "text/plain",
+ ".dtd" => "text/xml",
+ ".xml" => "text/xml",
+ ".mpeg" => "video/mpeg",
+ ".mpg" => "video/mpeg",
+ ".mov" => "video/quicktime",
+ ".qt" => "video/quicktime",
+ ".avi" => "video/x-msvideo",
+ ".asf" => "video/x-ms-asf",
+ ".asx" => "video/x-ms-asf",
+ ".wmv" => "video/x-ms-wmv",
+ ".bz2" => "application/x-bzip",
+ ".tbz" => "application/x-bzip-compressed-tar",
+ ".tar.bz2" => "application/x-bzip-compressed-tar",
+ "" => "text/plain"
+ )
+
+ fastcgi.server = ( ".php" =>
+ ("localhost" =>
+ ( "socket" => "$CURR_DIR/$WEB_TMP/php.socket",
+ "bin-path" => "$PHP_DIR/php-cgi -c $CURR_DIR/$WEB/php.ini"
+
+ )
+ )
+ )
+EOF
+
+ cat > $WEB/php.ini <<EOF
+ session.save_path ='$CURR_DIR/$WEB_TMP'
+EOF
+}
+
+# start_lighttpd
+#
+# Start or restart daemon lighttpd. If restart, rewrite configuration files.
+start_lighttpd () {
+ if test -f "$WEB_TMP/pid"; then
+ echo "Instance already running. Restarting..."
+ stop_lighttpd
+ fi
+ config_lighttpd
+ "$LIGHTTPD_DIR"/lighttpd -f "$WEB"/lighttpd.conf
+
+ if test $? -ne 0 ; then
+ echo "Could not execute http deamon lighttpd"
+ exit 1
+ fi
+}
+
+# stop_lighttpd
+#
+# Kill daemon lighttpd and removes files and folders associated.
+stop_lighttpd () {
+ test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
+}
+
+# Create the SQLite database of the MediaWiki. If the database file already
+# exists, it will be deleted.
+# This script should be runned from the directory where $FILES_FOLDER is
+# located.
+create_db () {
+ rm -f "$TMP/$DB_FILE"
+
+ echo "Generating the SQLite database file. It can take some time ..."
+ # Run the php script to generate the SQLite database file
+ # with cURL calls.
+ php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \
+ "$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT"
+
+ if [ ! -f "$TMP/$DB_FILE" ] ; then
+ error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first."
+ fi
+
+ # Copy the generated database file into the directory the
+ # user indicated.
+ cp "$TMP/$DB_FILE" "$FILES_FOLDER" ||
+ error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER"
+}
+
+# Install a wiki in your web server directory.
+wiki_install () {
+ if test $LIGHTTPD = "true" ; then
+ start_lighttpd
+ fi
+
+ SERVER_ADDR=$SERVER_ADDR:$PORT
+ # In this part, we change directory to $TMP in order to download,
+ # unpack and copy the files of MediaWiki
+ (
+ mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME"
+ if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then
+ error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist.
+ Please create it and launch the script again."
+ fi
+
+ # Fetch MediaWiki's archive if not already present in the TMP directory
+ MW_FILENAME="mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
+ cd "$TMP"
+ if [ ! -f $MW_FILENAME ] ; then
+ echo "Downloading $MW_VERSION_MAJOR.$MW_VERSION_MINOR sources ..."
+ wget "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/$MW_FILENAME" ||
+ error "Unable to download "\
+ "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\
+ "$MW_FILENAME. "\
+ "Please fix your connection and launch the script again."
+ echo "$MW_FILENAME downloaded in $(pwd). "\
+ "You can delete it later if you want."
+ else
+ echo "Reusing existing $MW_FILENAME downloaded in $(pwd)."
+ fi
+ archive_abs_path=$(pwd)/$MW_FILENAME
+ cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
+ error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
+ tar xzf "$archive_abs_path" --strip-components=1 ||
+ error "Unable to extract WikiMedia's files from $archive_abs_path to "\
+ "$WIKI_DIR_INST/$WIKI_DIR_NAME"
+ ) || exit 1
+
+ create_db
+
+ # Copy the generic LocalSettings.php in the web server's directory
+ # And modify parameters according to the ones set at the top
+ # of this script.
+ # Note that LocalSettings.php is never modified.
+ if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then
+ error "Can't find $FILES_FOLDER/LocalSettings.php " \
+ "in the current folder. "\
+ "Please run the script inside its folder."
+ fi
+ cp "$FILES_FOLDER/LocalSettings.php" \
+ "$FILES_FOLDER/LocalSettings-tmp.php" ||
+ error "Unable to copy $FILES_FOLDER/LocalSettings.php " \
+ "to $FILES_FOLDER/LocalSettings-tmp.php"
+
+ # Parse and set the LocalSettings file of the user according to the
+ # CONFIGURATION VARIABLES section at the beginning of this script
+ file_swap="$FILES_FOLDER/LocalSettings-swap.php"
+ sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SERVER@,http://$SERVER_ADDR," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SQLITE_DATADIR@,$TMP," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+
+ mv "$FILES_FOLDER/LocalSettings-tmp.php" \
+ "$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" ||
+ error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \
+ "in $WIKI_DIR_INST/$WIKI_DIR_NAME"
+ echo "File $FILES_FOLDER/LocalSettings.php is set in" \
+ " $WIKI_DIR_INST/$WIKI_DIR_NAME"
+
+ echo "Your wiki has been installed. You can check it at
+ http://$SERVER_ADDR/$WIKI_DIR_NAME"
+}
+
+# Reset the database of the wiki and the password of the admin
+#
+# Warning: This function must be called only in a subdirectory of t/ directory
+wiki_reset () {
+ # Copy initial database of the wiki
+ if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then
+ error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder."
+ fi
+ cp "../$FILES_FOLDER/$DB_FILE" "$TMP" ||
+ error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP"
+ echo "File $FILES_FOLDER/$DB_FILE is set in $TMP"
+}
+
+# Delete the wiki created in the web server's directory and all its content
+# saved in the database.
+wiki_delete () {
+ if test $LIGHTTPD = "true"; then
+ stop_lighttpd
+ rm -fr "$WEB"
+ else
+ # Delete the wiki's directory.
+ rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||
+ error "Wiki's directory $WIKI_DIR_INST/" \
+ "$WIKI_DIR_NAME could not be deleted"
+ # Delete the wiki's SQLite database.
+ rm -f "$TMP/$DB_FILE" ||
+ error "Database $TMP/$DB_FILE could not be deleted."
+ fi
+
+ # Delete the wiki's SQLite database
+ rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
+ rm -f "$FILES_FOLDER/$DB_FILE"
+ rm -rf "$TMP/mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
+}
diff --git a/contrib/mw-to-git/t/test-gitmw.pl b/contrib/mw-to-git/t/test-gitmw.pl
new file mode 100755
index 0000000000..0ff76259fa
--- /dev/null
+++ b/contrib/mw-to-git/t/test-gitmw.pl
@@ -0,0 +1,225 @@
+#!/usr/bin/perl -w -s
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+# Usage:
+# ./test-gitmw.pl <command> [argument]*
+# Execute in terminal using the name of the function to call as first
+# parameter, and the function's arguments as following parameters
+#
+# Example:
+# ./test-gitmw.pl "get_page" foo .
+# will call <wiki_getpage> with arguments <foo> and <.>
+#
+# Available functions are:
+# "get_page"
+# "delete_page"
+# "edit_page"
+# "getallpagename"
+
+use MediaWiki::API;
+use Getopt::Long;
+use encoding 'utf8';
+use DateTime::Format::ISO8601;
+use open ':encoding(utf8)';
+use constant SLASH_REPLACEMENT => "%2F";
+
+#Parsing of the config file
+
+my $configfile = "$ENV{'CURR_DIR'}/test.config";
+my %config;
+open my $CONFIG, "<", $configfile or die "can't open $configfile: $!";
+while (<$CONFIG>)
+{
+ chomp;
+ s/#.*//;
+ s/^\s+//;
+ s/\s+$//;
+ next unless length;
+ my ($key, $value) = split (/\s*=\s*/,$_, 2);
+ $config{$key} = $value;
+ last if ($key eq 'LIGHTTPD' and $value eq 'false');
+ last if ($key eq 'PORT');
+}
+close $CONFIG or die "can't close $configfile: $!";
+
+my $wiki_address = "http://$config{'SERVER_ADDR'}".":"."$config{'PORT'}";
+my $wiki_url = "$wiki_address/$config{'WIKI_DIR_NAME'}/api.php";
+my $wiki_admin = "$config{'WIKI_ADMIN'}";
+my $wiki_admin_pass = "$config{'WIKI_PASSW'}";
+my $mw = MediaWiki::API->new;
+$mw->{config}->{api_url} = $wiki_url;
+
+
+# wiki_login <name> <password>
+#
+# Logs the user with <name> and <password> in the global variable
+# of the mediawiki $mw
+sub wiki_login {
+ $mw->login( { lgname => "$_[0]",lgpassword => "$_[1]" } )
+ || die "getpage: login failed";
+}
+
+# wiki_getpage <wiki_page> <dest_path>
+#
+# fetch a page <wiki_page> from the wiki referenced in the global variable
+# $mw and copies its content in directory dest_path
+sub wiki_getpage {
+ my $pagename = $_[0];
+ my $destdir = $_[1];
+
+ my $page = $mw->get_page( { title => $pagename } );
+ if (!defined($page)) {
+ die "getpage: wiki does not exist";
+ }
+
+ my $content = $page->{'*'};
+ if (!defined($content)) {
+ die "getpage: page does not exist";
+ }
+
+ $pagename=$page->{'title'};
+ # Replace spaces by underscore in the page name
+ $pagename =~ s/ /_/g;
+ $pagename =~ s/\//%2F/g;
+ open(my $file, ">$destdir/$pagename.mw");
+ print $file "$content";
+ close ($file);
+
+}
+
+# wiki_delete_page <page_name>
+#
+# delete the page with name <page_name> from the wiki referenced
+# in the global variable $mw
+sub wiki_delete_page {
+ my $pagename = $_[0];
+
+ my $exist=$mw->get_page({title => $pagename});
+
+ if (defined($exist->{'*'})){
+ $mw->edit({ action => 'delete',
+ title => $pagename})
+ || die $mw->{error}->{code} . ": " . $mw->{error}->{details};
+ } else {
+ die "no page with such name found: $pagename\n";
+ }
+}
+
+# wiki_editpage <wiki_page> <wiki_content> <wiki_append> [-c=<category>] [-s=<summary>]
+#
+# Edit a page named <wiki_page> with content <wiki_content> on the wiki
+# referenced with the global variable $mw
+# If <wiki_append> == true : append <wiki_content> at the end of the actual
+# content of the page <wiki_page>
+# If <wik_page> doesn't exist, that page is created with the <wiki_content>
+sub wiki_editpage {
+ my $wiki_page = $_[0];
+ my $wiki_content = $_[1];
+ my $wiki_append = $_[2];
+ my $summary = "";
+ my ($summ, $cat) = ();
+ GetOptions('s=s' => \$summ, 'c=s' => \$cat);
+
+ my $append = 0;
+ if (defined($wiki_append) && $wiki_append eq 'true') {
+ $append=1;
+ }
+
+ my $previous_text ="";
+
+ if ($append) {
+ my $ref = $mw->get_page( { title => $wiki_page } );
+ $previous_text = $ref->{'*'};
+ }
+
+ my $text = $wiki_content;
+ if (defined($previous_text)) {
+ $text="$previous_text$text";
+ }
+
+ # Eventually, add this page to a category.
+ if (defined($cat)) {
+ my $category_name="[[Category:$cat]]";
+ $text="$text\n $category_name";
+ }
+ if(defined($summ)){
+ $summary=$summ;
+ }
+
+ $mw->edit( { action => 'edit', title => $wiki_page, summary => $summary, text => "$text"} );
+}
+
+# wiki_getallpagename [<category>]
+#
+# Fetch all pages of the wiki referenced by the global variable $mw
+# and print the names of each one in the file all.txt with a new line
+# ("\n") between these.
+# If the argument <category> is defined, then this function get only the pages
+# belonging to <category>.
+sub wiki_getallpagename {
+ # fetch the pages of the wiki
+ if (defined($_[0])) {
+ my $mw_pages = $mw->list ( { action => 'query',
+ list => 'categorymembers',
+ cmtitle => "Category:$_[0]",
+ cmnamespace => 0,
+ cmlimit => 500 },
+ )
+ || die $mw->{error}->{code}.": ".$mw->{error}->{details};
+ open(my $file, ">all.txt");
+ foreach my $page (@{$mw_pages}) {
+ print $file "$page->{title}\n";
+ }
+ close ($file);
+
+ } else {
+ my $mw_pages = $mw->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 500,
+ })
+ || die $mw->{error}->{code}.": ".$mw->{error}->{details};
+ open(my $file, ">all.txt");
+ foreach my $page (@{$mw_pages}) {
+ print $file "$page->{title}\n";
+ }
+ close ($file);
+ }
+}
+
+sub wiki_upload_file {
+ my $file_name = $_[0];
+ my $resultat = $mw->edit ( {
+ action => 'upload',
+ filename => $file_name,
+ comment => 'upload a file',
+ file => [ $file_name ],
+ ignorewarnings=>1,
+ }, {
+ skip_encoding => 1
+ } ) || die $mw->{error}->{code} . ' : ' . $mw->{error}->{details};
+}
+
+
+
+# Main part of this script: parse the command line arguments
+# and select which function to execute
+my $fct_to_call = shift;
+
+wiki_login($wiki_admin, $wiki_admin_pass);
+
+my %functions_to_call = qw(
+ upload_file wiki_upload_file
+ get_page wiki_getpage
+ delete_page wiki_delete_page
+ edit_page wiki_editpage
+ getallpagename wiki_getallpagename
+);
+die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call};
+&{$functions_to_call{$fct_to_call}}(@ARGV);
diff --git a/contrib/mw-to-git/t/test.config b/contrib/mw-to-git/t/test.config
new file mode 100644
index 0000000000..5ba0684162
--- /dev/null
+++ b/contrib/mw-to-git/t/test.config
@@ -0,0 +1,37 @@
+# Name of the web server's directory dedicated to the wiki is WIKI_DIR_NAME
+WIKI_DIR_NAME=wiki
+
+# Login and password of the wiki's admin
+WIKI_ADMIN=WikiAdmin
+WIKI_PASSW=AdminPass
+
+# Address of the web server
+SERVER_ADDR=localhost
+
+# SQLite database of the wiki, named DB_FILE, is located in TMP
+TMP=/tmp
+DB_FILE=wikidb.sqlite
+
+# If LIGHTTPD is not set to true, the script will use the default
+# web server running in WIKI_DIR_INST.
+WIKI_DIR_INST=/var/www
+
+# If LIGHTTPD is set to true, the script will use Lighttpd to run
+# the wiki.
+LIGHTTPD=true
+
+# The variables below are useful only if LIGHTTPD is set to true.
+PORT=1234
+PHP_DIR=/usr/bin
+LIGHTTPD_DIR=/usr/sbin
+WEB=WEB
+WEB_TMP=$WEB/tmp
+WEB_WWW=$WEB/www
+
+# The variables below are used by the script to install a wiki.
+# You should not modify these unless you are modifying the script itself.
+# tested versions: 1.19.X -> 1.21.1
+MW_VERSION_MAJOR=1.21
+MW_VERSION_MINOR=1
+FILES_FOLDER=install-wiki
+DB_INSTALL_SCRIPT=db_install.php
diff --git a/contrib/p4import/README b/contrib/p4import/README
deleted file mode 100644
index b9892b6793..0000000000
--- a/contrib/p4import/README
+++ /dev/null
@@ -1 +0,0 @@
-Please see contrib/fast-import/git-p4 for a better Perforce importer.
diff --git a/contrib/p4import/git-p4import.py b/contrib/p4import/git-p4import.py
deleted file mode 100644
index 0f3d97b67e..0000000000
--- a/contrib/p4import/git-p4import.py
+++ /dev/null
@@ -1,360 +0,0 @@
-#!/usr/bin/python
-#
-# This tool is copyright (c) 2006, Sean Estabrooks.
-# It is released under the Gnu Public License, version 2.
-#
-# Import Perforce branches into Git repositories.
-# Checking out the files is done by calling the standard p4
-# client which you must have properly configured yourself
-#
-
-import marshal
-import os
-import sys
-import time
-import getopt
-
-from signal import signal, \
- SIGPIPE, SIGINT, SIG_DFL, \
- default_int_handler
-
-signal(SIGPIPE, SIG_DFL)
-s = signal(SIGINT, SIG_DFL)
-if s != default_int_handler:
- signal(SIGINT, s)
-
-def die(msg, *args):
- for a in args:
- msg = "%s %s" % (msg, a)
- print "git-p4import fatal error:", msg
- sys.exit(1)
-
-def usage():
- print "USAGE: git-p4import [-q|-v] [--authors=<file>] [-t <timezone>] [//p4repo/path <branch>]"
- sys.exit(1)
-
-verbosity = 1
-logfile = "/dev/null"
-ignore_warnings = False
-stitch = 0
-tagall = True
-
-def report(level, msg, *args):
- global verbosity
- global logfile
- for a in args:
- msg = "%s %s" % (msg, a)
- fd = open(logfile, "a")
- fd.writelines(msg)
- fd.close()
- if level <= verbosity:
- print msg
-
-class p4_command:
- def __init__(self, _repopath):
- try:
- global logfile
- self.userlist = {}
- if _repopath[-1] == '/':
- self.repopath = _repopath[:-1]
- else:
- self.repopath = _repopath
- if self.repopath[-4:] != "/...":
- self.repopath= "%s/..." % self.repopath
- f=os.popen('p4 -V 2>>%s'%logfile, 'rb')
- a = f.readlines()
- if f.close():
- raise
- except:
- die("Could not find the \"p4\" command")
-
- def p4(self, cmd, *args):
- global logfile
- cmd = "%s %s" % (cmd, ' '.join(args))
- report(2, "P4:", cmd)
- f=os.popen('p4 -G %s 2>>%s' % (cmd,logfile), 'rb')
- list = []
- while 1:
- try:
- list.append(marshal.load(f))
- except EOFError:
- break
- self.ret = f.close()
- return list
-
- def sync(self, id, force=False, trick=False, test=False):
- if force:
- ret = self.p4("sync -f %s@%s"%(self.repopath, id))[0]
- elif trick:
- ret = self.p4("sync -k %s@%s"%(self.repopath, id))[0]
- elif test:
- ret = self.p4("sync -n %s@%s"%(self.repopath, id))[0]
- else:
- ret = self.p4("sync %s@%s"%(self.repopath, id))[0]
- if ret['code'] == "error":
- data = ret['data'].upper()
- if data.find('VIEW') > 0:
- die("Perforce reports %s is not in client view"% self.repopath)
- elif data.find('UP-TO-DATE') < 0:
- die("Could not sync files from perforce", self.repopath)
-
- def changes(self, since=0):
- try:
- list = []
- for rec in self.p4("changes %s@%s,#head" % (self.repopath, since+1)):
- list.append(rec['change'])
- list.reverse()
- return list
- except:
- return []
-
- def authors(self, filename):
- f=open(filename)
- for l in f.readlines():
- self.userlist[l[:l.find('=')].rstrip()] = \
- (l[l.find('=')+1:l.find('<')].rstrip(),l[l.find('<')+1:l.find('>')])
- f.close()
- for f,e in self.userlist.items():
- report(2, f, ":", e[0], " <", e[1], ">")
-
- def _get_user(self, id):
- if not self.userlist.has_key(id):
- try:
- user = self.p4("users", id)[0]
- self.userlist[id] = (user['FullName'], user['Email'])
- except:
- self.userlist[id] = (id, "")
- return self.userlist[id]
-
- def _format_date(self, ticks):
- symbol='+'
- name = time.tzname[0]
- offset = time.timezone
- if ticks[8]:
- name = time.tzname[1]
- offset = time.altzone
- if offset < 0:
- offset *= -1
- symbol = '-'
- localo = "%s%02d%02d %s" % (symbol, offset / 3600, offset % 3600, name)
- tickso = time.strftime("%a %b %d %H:%M:%S %Y", ticks)
- return "%s %s" % (tickso, localo)
-
- def where(self):
- try:
- return self.p4("where %s" % self.repopath)[-1]['path']
- except:
- return ""
-
- def describe(self, num):
- desc = self.p4("describe -s", num)[0]
- self.msg = desc['desc']
- self.author, self.email = self._get_user(desc['user'])
- self.date = self._format_date(time.localtime(long(desc['time'])))
- return self
-
-class git_command:
- def __init__(self):
- try:
- self.version = self.git("--version")[0][12:].rstrip()
- except:
- die("Could not find the \"git\" command")
- try:
- self.gitdir = self.get_single("rev-parse --git-dir")
- report(2, "gdir:", self.gitdir)
- except:
- die("Not a git repository... did you forget to \"git init\" ?")
- try:
- self.cdup = self.get_single("rev-parse --show-cdup")
- if self.cdup != "":
- os.chdir(self.cdup)
- self.topdir = os.getcwd()
- report(2, "topdir:", self.topdir)
- except:
- die("Could not find top git directory")
-
- def git(self, cmd):
- global logfile
- report(2, "GIT:", cmd)
- f=os.popen('git %s 2>>%s' % (cmd,logfile), 'rb')
- r=f.readlines()
- self.ret = f.close()
- return r
-
- def get_single(self, cmd):
- return self.git(cmd)[0].rstrip()
-
- def current_branch(self):
- try:
- testit = self.git("rev-parse --verify HEAD")[0]
- return self.git("symbolic-ref HEAD")[0][11:].rstrip()
- except:
- return None
-
- def get_config(self, variable):
- try:
- return self.git("config --get %s" % variable)[0].rstrip()
- except:
- return None
-
- def set_config(self, variable, value):
- try:
- self.git("config %s %s"%(variable, value) )
- except:
- die("Could not set %s to " % variable, value)
-
- def make_tag(self, name, head):
- self.git("tag -f %s %s"%(name,head))
-
- def top_change(self, branch):
- try:
- a=self.get_single("name-rev --tags refs/heads/%s" % branch)
- loc = a.find(' tags/') + 6
- if a[loc:loc+3] != "p4/":
- raise
- return int(a[loc+3:][:-2])
- except:
- return 0
-
- def update_index(self):
- self.git("ls-files -m -d -o -z | git update-index --add --remove -z --stdin")
-
- def checkout(self, branch):
- self.git("checkout %s" % branch)
-
- def repoint_head(self, branch):
- self.git("symbolic-ref HEAD refs/heads/%s" % branch)
-
- def remove_files(self):
- self.git("ls-files | xargs rm")
-
- def clean_directories(self):
- self.git("clean -d")
-
- def fresh_branch(self, branch):
- report(1, "Creating new branch", branch)
- self.git("ls-files | xargs rm")
- os.remove(".git/index")
- self.repoint_head(branch)
- self.git("clean -d")
-
- def basedir(self):
- return self.topdir
-
- def commit(self, author, email, date, msg, id):
- self.update_index()
- fd=open(".msg", "w")
- fd.writelines(msg)
- fd.close()
- try:
- current = self.get_single("rev-parse --verify HEAD")
- head = "-p HEAD"
- except:
- current = ""
- head = ""
- tree = self.get_single("write-tree")
- for r,l in [('DATE',date),('NAME',author),('EMAIL',email)]:
- os.environ['GIT_AUTHOR_%s'%r] = l
- os.environ['GIT_COMMITTER_%s'%r] = l
- commit = self.get_single("commit-tree %s %s < .msg" % (tree,head))
- os.remove(".msg")
- self.make_tag("p4/%s"%id, commit)
- self.git("update-ref HEAD %s %s" % (commit, current) )
-
-try:
- opts, args = getopt.getopt(sys.argv[1:], "qhvt:",
- ["authors=","help","stitch=","timezone=","log=","ignore","notags"])
-except getopt.GetoptError:
- usage()
-
-for o, a in opts:
- if o == "-q":
- verbosity = 0
- if o == "-v":
- verbosity += 1
- if o in ("--log"):
- logfile = a
- if o in ("--notags"):
- tagall = False
- if o in ("-h", "--help"):
- usage()
- if o in ("--ignore"):
- ignore_warnings = True
-
-git = git_command()
-branch=git.current_branch()
-
-for o, a in opts:
- if o in ("-t", "--timezone"):
- git.set_config("perforce.timezone", a)
- if o in ("--stitch"):
- git.set_config("perforce.%s.path" % branch, a)
- stitch = 1
-
-if len(args) == 2:
- branch = args[1]
- git.checkout(branch)
- if branch == git.current_branch():
- die("Branch %s already exists!" % branch)
- report(1, "Setting perforce to ", args[0])
- git.set_config("perforce.%s.path" % branch, args[0])
-elif len(args) != 0:
- die("You must specify the perforce //depot/path and git branch")
-
-p4path = git.get_config("perforce.%s.path" % branch)
-if p4path == None:
- die("Do not know Perforce //depot/path for git branch", branch)
-
-p4 = p4_command(p4path)
-
-for o, a in opts:
- if o in ("-a", "--authors"):
- p4.authors(a)
-
-localdir = git.basedir()
-if p4.where()[:len(localdir)] != localdir:
- report(1, "**WARNING** Appears p4 client is misconfigured")
- report(1, " for sync from %s to %s" % (p4.repopath, localdir))
- if ignore_warnings != True:
- die("Reconfigure or use \"--ignore\" on command line")
-
-if stitch == 0:
- top = git.top_change(branch)
-else:
- top = 0
-changes = p4.changes(top)
-count = len(changes)
-if count == 0:
- report(1, "Already up to date...")
- sys.exit(0)
-
-ptz = git.get_config("perforce.timezone")
-if ptz:
- report(1, "Setting timezone to", ptz)
- os.environ['TZ'] = ptz
- time.tzset()
-
-if stitch == 1:
- git.remove_files()
- git.clean_directories()
- p4.sync(changes[0], force=True)
-elif top == 0 and branch != git.current_branch():
- p4.sync(changes[0], test=True)
- report(1, "Creating new initial commit");
- git.fresh_branch(branch)
- p4.sync(changes[0], force=True)
-else:
- p4.sync(changes[0], trick=True)
-
-report(1, "processing %s changes from p4 (%s) to git (%s)" % (count, p4.repopath, branch))
-for id in changes:
- report(1, "Importing changeset", id)
- change = p4.describe(id)
- p4.sync(id)
- if tagall :
- git.commit(change.author, change.email, change.date, change.msg, id)
- else:
- git.commit(change.author, change.email, change.date, change.msg, "import")
- if stitch == 1:
- git.clean_directories()
- stitch = 0
diff --git a/contrib/p4import/git-p4import.txt b/contrib/p4import/git-p4import.txt
deleted file mode 100644
index 9967587fe6..0000000000
--- a/contrib/p4import/git-p4import.txt
+++ /dev/null
@@ -1,167 +0,0 @@
-git-p4import(1)
-===============
-
-NAME
-----
-git-p4import - Import a Perforce repository into git
-
-
-SYNOPSIS
---------
-[verse]
-`git-p4import` [-q|-v] [--notags] [--authors <file>] [-t <timezone>]
- <//p4repo/path> <branch>
-`git-p4import` --stitch <//p4repo/path>
-`git-p4import`
-
-
-DESCRIPTION
------------
-Import a Perforce repository into an existing git repository. When
-a <//p4repo/path> and <branch> are specified a new branch with the
-given name will be created and the initial import will begin.
-
-Once the initial import is complete you can do an incremental import
-of new commits from the Perforce repository. You do this by checking
-out the appropriate git branch and then running `git-p4import` without
-any options.
-
-The standard p4 client is used to communicate with the Perforce
-repository; it must be configured correctly in order for `git-p4import`
-to operate (see below).
-
-
-OPTIONS
--------
--q::
- Do not display any progress information.
-
--v::
- Give extra progress information.
-
-\--authors::
- Specify an authors file containing a mapping of Perforce user
- ids to full names and email addresses (see Notes below).
-
-\--notags::
- Do not create a tag for each imported commit.
-
-\--stitch::
- Import the contents of the given perforce branch into the
- currently checked out git branch.
-
-\--log::
- Store debugging information in the specified file.
-
--t::
- Specify that the remote repository is in the specified timezone.
- Timezone must be in the format "US/Pacific" or "Europe/London"
- etc. You only need to specify this once, it will be saved in
- the git config file for the repository.
-
-<//p4repo/path>::
- The Perforce path that will be imported into the specified branch.
-
-<branch>::
- The new branch that will be created to hold the Perforce imports.
-
-
-P4 Client
----------
-You must make the `p4` client command available in your $PATH and
-configure it to communicate with the target Perforce repository.
-Typically this means you must set the "$P4PORT" and "$P4CLIENT"
-environment variables.
-
-You must also configure a `p4` client "view" which maps the Perforce
-branch into the top level of your git repository, for example:
-
-------------
-Client: myhost
-
-Root: /home/sean/import
-
-Options: noallwrite clobber nocompress unlocked modtime rmdir
-
-View:
- //public/jam/... //myhost/jam/...
-------------
-
-With the above `p4` client setup, you could import the "jam"
-perforce branch into a branch named "jammy", like so:
-
-------------
-$ mkdir -p /home/sean/import/jam
-$ cd /home/sean/import/jam
-$ git init
-$ git p4import //public/jam jammy
-------------
-
-
-Multiple Branches
------------------
-Note that by creating multiple "views" you can use `git-p4import`
-to import additional branches into the same git repository.
-However, the `p4` client has a limitation in that it silently
-ignores all but the last "view" that maps into the same local
-directory. So the following will *not* work:
-
-------------
-View:
- //public/jam/... //myhost/jam/...
- //public/other/... //myhost/jam/...
- //public/guest/... //myhost/jam/...
-------------
-
-If you want more than one Perforce branch to be imported into the
-same directory you must employ a workaround. A simple option is
-to adjust your `p4` client before each import to only include a
-single view.
-
-Another option is to create multiple symlinks locally which all
-point to the same directory in your git repository and then use
-one per "view" instead of listing the actual directory.
-
-
-Tags
-----
-A git tag of the form p4/xx is created for every change imported from
-the Perforce repository where xx is the Perforce changeset number.
-Therefore after the import you can use git to access any commit by its
-Perforce number, e.g. git show p4/327.
-
-The tag associated with the HEAD commit is also how `git-p4import`
-determines if there are new changes to incrementally import from the
-Perforce repository.
-
-If you import from a repository with many thousands of changes
-you will have an equal number of p4/xxxx git tags. Git tags can
-be expensive in terms of disk space and repository operations.
-If you don't need to perform further incremental imports, you
-may delete the tags.
-
-
-Notes
------
-You can interrupt the import (e.g. ctrl-c) at any time and restart it
-without worry.
-
-Author information is automatically determined by querying the
-Perforce "users" table using the id associated with each change.
-However, if you want to manually supply these mappings you can do
-so with the "--authors" option. It accepts a file containing a list
-of mappings with each line containing one mapping in the format:
-
-------------
- perforce_id = Full Name <email@address.com>
-------------
-
-
-Author
-------
-Written by Sean Estabrooks <seanlkml@sympatico.ca>
-
-
-GIT
----
-Part of the gitlink:git[7] suite
diff --git a/contrib/patches/docbook-xsl-manpages-charmap.patch b/contrib/patches/docbook-xsl-manpages-charmap.patch
deleted file mode 100644
index f2b08b4f4a..0000000000
--- a/contrib/patches/docbook-xsl-manpages-charmap.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-From: Ismail Dönmez <ismail@pardus.org.tr>
-
-Trying to build the documentation with docbook-xsl 1.73 may result in
-the following error. This patch fixes it.
-
-$ xmlto -m callouts.xsl man git-add.xml
-runtime error: file
-file:///usr/share/sgml/docbook/xsl-stylesheets-1.73.0/manpages/other.xsl line
-129 element call-template
-The called template 'read-character-map' was not found.
-
---- docbook-xsl-1.73.0/manpages/docbook.xsl.manpages-charmap 2007-07-23 16:24:23.000000000 +0100
-+++ docbook-xsl-1.73.0/manpages/docbook.xsl 2007-07-23 16:25:16.000000000 +0100
-@@ -37,6 +37,7 @@
- <xsl:include href="lists.xsl"/>
- <xsl:include href="endnotes.xsl"/>
- <xsl:include href="table.xsl"/>
-+ <xsl:include href="../common/charmap.xsl"/>
-
- <!-- * we rename the following just to avoid using params with "man" -->
- <!-- * prefixes in the table.xsl stylesheet (because that stylesheet -->
diff --git a/contrib/persistent-https/LICENSE b/contrib/persistent-https/LICENSE
new file mode 100644
index 0000000000..d645695673
--- /dev/null
+++ b/contrib/persistent-https/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/persistent-https/Makefile b/contrib/persistent-https/Makefile
new file mode 100644
index 0000000000..92baa3beee
--- /dev/null
+++ b/contrib/persistent-https/Makefile
@@ -0,0 +1,38 @@
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+BUILD_LABEL=$(shell date +"%s")
+TAR_OUT=$(shell go env GOOS)_$(shell go env GOARCH).tar.gz
+
+all: git-remote-persistent-https git-remote-persistent-https--proxy \
+ git-remote-persistent-http
+
+git-remote-persistent-https--proxy: git-remote-persistent-https
+ ln -f -s git-remote-persistent-https git-remote-persistent-https--proxy
+
+git-remote-persistent-http: git-remote-persistent-https
+ ln -f -s git-remote-persistent-https git-remote-persistent-http
+
+git-remote-persistent-https:
+ go build -o git-remote-persistent-https \
+ -ldflags "-X main._BUILD_EMBED_LABEL $(BUILD_LABEL)"
+
+clean:
+ rm -f git-remote-persistent-http* *.tar.gz
+
+tar: clean all
+ @chmod 555 git-remote-persistent-https
+ @tar -czf $(TAR_OUT) git-remote-persistent-http* README LICENSE
+ @echo
+ @echo "Created $(TAR_OUT)"
diff --git a/contrib/persistent-https/README b/contrib/persistent-https/README
new file mode 100644
index 0000000000..f784dd2e66
--- /dev/null
+++ b/contrib/persistent-https/README
@@ -0,0 +1,62 @@
+git-remote-persistent-https
+
+The git-remote-persistent-https binary speeds up SSL operations
+by running a daemon job (git-remote-persistent-https--proxy) that
+keeps a connection open to a server.
+
+
+PRE-BUILT BINARIES
+
+Darwin amd64:
+https://commondatastorage.googleapis.com/git-remote-persistent-https/darwin_amd64.tar.gz
+
+Linux amd64:
+https://commondatastorage.googleapis.com/git-remote-persistent-https/linux_amd64.tar.gz
+
+
+INSTALLING
+
+Move all of the git-remote-persistent-http* binaries to a directory
+in PATH.
+
+
+USAGE
+
+HTTPS requests can be delegated to the proxy by using the
+"persistent-https" scheme, e.g.
+
+git clone persistent-https://kernel.googlesource.com/pub/scm/git/git
+
+Likewise, .gitconfig can be updated as follows to rewrite https urls
+to use persistent-https:
+
+[url "persistent-https"]
+ insteadof = https
+[url "persistent-http"]
+ insteadof = http
+
+
+#####################################################################
+# BUILDING FROM SOURCE
+#####################################################################
+
+LOCATION
+
+The source is available in the contrib/persistent-https directory of
+the Git source repository. The Git source repository is available at
+git://git.kernel.org/pub/scm/git/git.git/
+https://kernel.googlesource.com/pub/scm/git/git
+
+
+PREREQUISITES
+
+The code is written in Go (http://golang.org/) and the Go compiler is
+required. Currently, the compiler must be built and installed from tip
+of source, in order to include a fix in the reverse http proxy:
+http://code.google.com/p/go/source/detail?r=a615b796570a2cd8591884767a7d67ede74f6648
+
+
+BUILDING
+
+Run "make" to build the binaries. See the section on
+INSTALLING above.
diff --git a/contrib/persistent-https/client.go b/contrib/persistent-https/client.go
new file mode 100644
index 0000000000..71125b5832
--- /dev/null
+++ b/contrib/persistent-https/client.go
@@ -0,0 +1,189 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "net"
+ "net/url"
+ "os"
+ "os/exec"
+ "strings"
+ "syscall"
+ "time"
+)
+
+type Client struct {
+ ProxyBin string
+ Args []string
+
+ insecure bool
+}
+
+func (c *Client) Run() error {
+ if err := c.resolveArgs(); err != nil {
+ return fmt.Errorf("resolveArgs() got error: %v", err)
+ }
+
+ // Connect to the proxy.
+ uconn, hconn, addr, err := c.connect()
+ if err != nil {
+ return fmt.Errorf("connect() got error: %v", err)
+ }
+ // Keep the unix socket connection open for the duration of the request.
+ defer uconn.Close()
+ // Keep a connection to the HTTP server open, so no other user can
+ // bind on the same address so long as the process is running.
+ defer hconn.Close()
+
+ // Start the git-remote-http subprocess.
+ cargs := []string{"-c", fmt.Sprintf("http.proxy=%v", addr), "remote-http"}
+ cargs = append(cargs, c.Args...)
+ cmd := exec.Command("git", cargs...)
+
+ for _, v := range os.Environ() {
+ if !strings.HasPrefix(v, "GIT_PERSISTENT_HTTPS_SECURE=") {
+ cmd.Env = append(cmd.Env, v)
+ }
+ }
+ // Set the GIT_PERSISTENT_HTTPS_SECURE environment variable when
+ // the proxy is using a SSL connection. This allows credential helpers
+ // to identify secure proxy connections, despite being passed an HTTP
+ // scheme.
+ if !c.insecure {
+ cmd.Env = append(cmd.Env, "GIT_PERSISTENT_HTTPS_SECURE=1")
+ }
+
+ cmd.Stdin = os.Stdin
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ if eerr, ok := err.(*exec.ExitError); ok {
+ if stat, ok := eerr.ProcessState.Sys().(syscall.WaitStatus); ok && stat.ExitStatus() != 0 {
+ os.Exit(stat.ExitStatus())
+ }
+ }
+ return fmt.Errorf("git-remote-http subprocess got error: %v", err)
+ }
+ return nil
+}
+
+func (c *Client) connect() (uconn net.Conn, hconn net.Conn, addr string, err error) {
+ uconn, err = DefaultSocket.Dial()
+ if err != nil {
+ if e, ok := err.(*net.OpError); ok && (os.IsNotExist(e.Err) || e.Err == syscall.ECONNREFUSED) {
+ if err = c.startProxy(); err == nil {
+ uconn, err = DefaultSocket.Dial()
+ }
+ }
+ if err != nil {
+ return
+ }
+ }
+
+ if addr, err = c.readAddr(uconn); err != nil {
+ return
+ }
+
+ // Open a tcp connection to the proxy.
+ if hconn, err = net.Dial("tcp", addr); err != nil {
+ return
+ }
+
+ // Verify the address hasn't changed ownership.
+ var addr2 string
+ if addr2, err = c.readAddr(uconn); err != nil {
+ return
+ } else if addr != addr2 {
+ err = fmt.Errorf("address changed after connect. got %q, want %q", addr2, addr)
+ return
+ }
+ return
+}
+
+func (c *Client) readAddr(conn net.Conn) (string, error) {
+ conn.SetDeadline(time.Now().Add(5 * time.Second))
+ data := make([]byte, 100)
+ n, err := conn.Read(data)
+ if err != nil {
+ return "", fmt.Errorf("error reading unix socket: %v", err)
+ } else if n == 0 {
+ return "", errors.New("empty data response")
+ }
+ conn.Write([]byte{1}) // Ack
+
+ var addr string
+ if addrs := strings.Split(string(data[:n]), "\n"); len(addrs) != 2 {
+ return "", fmt.Errorf("got %q, wanted 2 addresses", data[:n])
+ } else if c.insecure {
+ addr = addrs[1]
+ } else {
+ addr = addrs[0]
+ }
+ return addr, nil
+}
+
+func (c *Client) startProxy() error {
+ cmd := exec.Command(c.ProxyBin)
+ cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
+ stdout, err := cmd.StdoutPipe()
+ if err != nil {
+ return err
+ }
+ defer stdout.Close()
+ if err := cmd.Start(); err != nil {
+ return err
+ }
+ result := make(chan error)
+ go func() {
+ bytes, _, err := bufio.NewReader(stdout).ReadLine()
+ if line := string(bytes); err == nil && line != "OK" {
+ err = fmt.Errorf("proxy returned %q, want \"OK\"", line)
+ }
+ result <- err
+ }()
+ select {
+ case err := <-result:
+ return err
+ case <-time.After(5 * time.Second):
+ return errors.New("timeout waiting for proxy to start")
+ }
+ panic("not reachable")
+}
+
+func (c *Client) resolveArgs() error {
+ if nargs := len(c.Args); nargs == 0 {
+ return errors.New("remote needed")
+ } else if nargs > 2 {
+ return fmt.Errorf("want at most 2 args, got %v", c.Args)
+ }
+
+ // Rewrite the url scheme to be http.
+ idx := len(c.Args) - 1
+ rawurl := c.Args[idx]
+ rurl, err := url.Parse(rawurl)
+ if err != nil {
+ return fmt.Errorf("invalid remote: %v", err)
+ }
+ c.insecure = rurl.Scheme == "persistent-http"
+ rurl.Scheme = "http"
+ c.Args[idx] = rurl.String()
+ if idx != 0 && c.Args[0] == rawurl {
+ c.Args[0] = c.Args[idx]
+ }
+ return nil
+}
diff --git a/contrib/persistent-https/main.go b/contrib/persistent-https/main.go
new file mode 100644
index 0000000000..fd1b107743
--- /dev/null
+++ b/contrib/persistent-https/main.go
@@ -0,0 +1,82 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// The git-remote-persistent-https binary speeds up SSL operations by running
+// a daemon job that keeps a connection open to a Git server. This ensures the
+// git-remote-persistent-https--proxy is running and delegating execution
+// to the git-remote-http binary with the http_proxy set to the daemon job.
+// A unix socket is used to authenticate the proxy and discover the
+// HTTP address. Note, both the client and proxy are included in the same
+// binary.
+package main
+
+import (
+ "flag"
+ "fmt"
+ "log"
+ "os"
+ "strings"
+ "time"
+)
+
+var (
+ forceProxy = flag.Bool("proxy", false, "Whether to start the binary in proxy mode")
+ proxyBin = flag.String("proxy_bin", "git-remote-persistent-https--proxy", "Path to the proxy binary")
+ printLabel = flag.Bool("print_label", false, "Prints the build label for the binary")
+
+ // Variable that should be defined through the -X linker flag.
+ _BUILD_EMBED_LABEL string
+)
+
+const (
+ defaultMaxIdleDuration = 24 * time.Hour
+ defaultPollUpdateInterval = 15 * time.Minute
+)
+
+func main() {
+ flag.Parse()
+ if *printLabel {
+ // Short circuit execution to print the build label
+ fmt.Println(buildLabel())
+ return
+ }
+
+ var err error
+ if *forceProxy || strings.HasSuffix(os.Args[0], "--proxy") {
+ log.SetPrefix("git-remote-persistent-https--proxy: ")
+ proxy := &Proxy{
+ BuildLabel: buildLabel(),
+ MaxIdleDuration: defaultMaxIdleDuration,
+ PollUpdateInterval: defaultPollUpdateInterval,
+ }
+ err = proxy.Run()
+ } else {
+ log.SetPrefix("git-remote-persistent-https: ")
+ client := &Client{
+ ProxyBin: *proxyBin,
+ Args: flag.Args(),
+ }
+ err = client.Run()
+ }
+ if err != nil {
+ log.Fatalln(err)
+ }
+}
+
+func buildLabel() string {
+ if _BUILD_EMBED_LABEL == "" {
+ log.Println(`unlabeled build; build with "make" to label`)
+ }
+ return _BUILD_EMBED_LABEL
+}
diff --git a/contrib/persistent-https/proxy.go b/contrib/persistent-https/proxy.go
new file mode 100644
index 0000000000..bb0cdba386
--- /dev/null
+++ b/contrib/persistent-https/proxy.go
@@ -0,0 +1,190 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "fmt"
+ "log"
+ "net"
+ "net/http"
+ "net/http/httputil"
+ "os"
+ "os/exec"
+ "os/signal"
+ "sync"
+ "syscall"
+ "time"
+)
+
+type Proxy struct {
+ BuildLabel string
+ MaxIdleDuration time.Duration
+ PollUpdateInterval time.Duration
+
+ ul net.Listener
+ httpAddr string
+ httpsAddr string
+}
+
+func (p *Proxy) Run() error {
+ hl, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ return fmt.Errorf("http listen failed: %v", err)
+ }
+ defer hl.Close()
+
+ hsl, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ return fmt.Errorf("https listen failed: %v", err)
+ }
+ defer hsl.Close()
+
+ p.ul, err = DefaultSocket.Listen()
+ if err != nil {
+ c, derr := DefaultSocket.Dial()
+ if derr == nil {
+ c.Close()
+ fmt.Println("OK\nA proxy is already running... exiting")
+ return nil
+ } else if e, ok := derr.(*net.OpError); ok && e.Err == syscall.ECONNREFUSED {
+ // Nothing is listening on the socket, unlink it and try again.
+ syscall.Unlink(DefaultSocket.Path())
+ p.ul, err = DefaultSocket.Listen()
+ }
+ if err != nil {
+ return fmt.Errorf("unix listen failed on %v: %v", DefaultSocket.Path(), err)
+ }
+ }
+ defer p.ul.Close()
+ go p.closeOnSignal()
+ go p.closeOnUpdate()
+
+ p.httpAddr = hl.Addr().String()
+ p.httpsAddr = hsl.Addr().String()
+ fmt.Printf("OK\nListening on unix socket=%v http=%v https=%v\n",
+ p.ul.Addr(), p.httpAddr, p.httpsAddr)
+
+ result := make(chan error, 2)
+ go p.serveUnix(result)
+ go func() {
+ result <- http.Serve(hl, &httputil.ReverseProxy{
+ FlushInterval: 500 * time.Millisecond,
+ Director: func(r *http.Request) {},
+ })
+ }()
+ go func() {
+ result <- http.Serve(hsl, &httputil.ReverseProxy{
+ FlushInterval: 500 * time.Millisecond,
+ Director: func(r *http.Request) {
+ r.URL.Scheme = "https"
+ },
+ })
+ }()
+ return <-result
+}
+
+type socketContext struct {
+ sync.WaitGroup
+ mutex sync.Mutex
+ last time.Time
+}
+
+func (sc *socketContext) Done() {
+ sc.mutex.Lock()
+ defer sc.mutex.Unlock()
+ sc.last = time.Now()
+ sc.WaitGroup.Done()
+}
+
+func (p *Proxy) serveUnix(result chan<- error) {
+ sockCtx := &socketContext{}
+ go p.closeOnIdle(sockCtx)
+
+ var err error
+ for {
+ var uconn net.Conn
+ uconn, err = p.ul.Accept()
+ if err != nil {
+ err = fmt.Errorf("accept failed: %v", err)
+ break
+ }
+ sockCtx.Add(1)
+ go p.handleUnixConn(sockCtx, uconn)
+ }
+ sockCtx.Wait()
+ result <- err
+}
+
+func (p *Proxy) handleUnixConn(sockCtx *socketContext, uconn net.Conn) {
+ defer sockCtx.Done()
+ defer uconn.Close()
+ data := []byte(fmt.Sprintf("%v\n%v", p.httpsAddr, p.httpAddr))
+ uconn.SetDeadline(time.Now().Add(5 * time.Second))
+ for i := 0; i < 2; i++ {
+ if n, err := uconn.Write(data); err != nil {
+ log.Printf("error sending http addresses: %+v\n", err)
+ return
+ } else if n != len(data) {
+ log.Printf("sent %d data bytes, wanted %d\n", n, len(data))
+ return
+ }
+ if _, err := uconn.Read([]byte{0, 0, 0, 0}); err != nil {
+ log.Printf("error waiting for Ack: %+v\n", err)
+ return
+ }
+ }
+ // Wait without a deadline for the client to finish via EOF
+ uconn.SetDeadline(time.Time{})
+ uconn.Read([]byte{0, 0, 0, 0})
+}
+
+func (p *Proxy) closeOnIdle(sockCtx *socketContext) {
+ for d := p.MaxIdleDuration; d > 0; {
+ time.Sleep(d)
+ sockCtx.Wait()
+ sockCtx.mutex.Lock()
+ if d = sockCtx.last.Add(p.MaxIdleDuration).Sub(time.Now()); d <= 0 {
+ log.Println("graceful shutdown from idle timeout")
+ p.ul.Close()
+ }
+ sockCtx.mutex.Unlock()
+ }
+}
+
+func (p *Proxy) closeOnUpdate() {
+ for {
+ time.Sleep(p.PollUpdateInterval)
+ if out, err := exec.Command(os.Args[0], "--print_label").Output(); err != nil {
+ log.Printf("error polling for updated binary: %v\n", err)
+ } else if s := string(out[:len(out)-1]); p.BuildLabel != s {
+ log.Printf("graceful shutdown from updated binary: %q --> %q\n", p.BuildLabel, s)
+ p.ul.Close()
+ break
+ }
+ }
+}
+
+func (p *Proxy) closeOnSignal() {
+ ch := make(chan os.Signal, 10)
+ signal.Notify(ch, os.Interrupt, os.Kill, os.Signal(syscall.SIGTERM), os.Signal(syscall.SIGHUP))
+ sig := <-ch
+ p.ul.Close()
+ switch sig {
+ case os.Signal(syscall.SIGHUP):
+ log.Printf("graceful shutdown from signal: %v\n", sig)
+ default:
+ log.Fatalf("exiting from signal: %v\n", sig)
+ }
+}
diff --git a/contrib/persistent-https/socket.go b/contrib/persistent-https/socket.go
new file mode 100644
index 0000000000..193b911dd1
--- /dev/null
+++ b/contrib/persistent-https/socket.go
@@ -0,0 +1,97 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "fmt"
+ "log"
+ "net"
+ "os"
+ "path/filepath"
+ "syscall"
+)
+
+// A Socket is a wrapper around a Unix socket that verifies directory
+// permissions.
+type Socket struct {
+ Dir string
+}
+
+func defaultDir() string {
+ sockPath := ".git-credential-cache"
+ if home := os.Getenv("HOME"); home != "" {
+ return filepath.Join(home, sockPath)
+ }
+ log.Printf("socket: cannot find HOME path. using relative directory %q for socket", sockPath)
+ return sockPath
+}
+
+// DefaultSocket is a Socket in the $HOME/.git-credential-cache directory.
+var DefaultSocket = Socket{Dir: defaultDir()}
+
+// Listen announces the local network address of the unix socket. The
+// permissions on the socket directory are verified before attempting
+// the actual listen.
+func (s Socket) Listen() (net.Listener, error) {
+ network, addr := "unix", s.Path()
+ if err := s.mkdir(); err != nil {
+ return nil, &net.OpError{Op: "listen", Net: network, Addr: &net.UnixAddr{Name: addr, Net: network}, Err: err}
+ }
+ return net.Listen(network, addr)
+}
+
+// Dial connects to the unix socket. The permissions on the socket directory
+// are verified before attempting the actual dial.
+func (s Socket) Dial() (net.Conn, error) {
+ network, addr := "unix", s.Path()
+ if err := s.checkPermissions(); err != nil {
+ return nil, &net.OpError{Op: "dial", Net: network, Addr: &net.UnixAddr{Name: addr, Net: network}, Err: err}
+ }
+ return net.Dial(network, addr)
+}
+
+// Path returns the fully specified file name of the unix socket.
+func (s Socket) Path() string {
+ return filepath.Join(s.Dir, "persistent-https-proxy-socket")
+}
+
+func (s Socket) mkdir() error {
+ if err := s.checkPermissions(); err == nil {
+ return nil
+ } else if !os.IsNotExist(err) {
+ return err
+ }
+ if err := os.MkdirAll(s.Dir, 0700); err != nil {
+ return err
+ }
+ return s.checkPermissions()
+}
+
+func (s Socket) checkPermissions() error {
+ fi, err := os.Stat(s.Dir)
+ if err != nil {
+ return err
+ }
+ if !fi.IsDir() {
+ return fmt.Errorf("socket: got file, want directory for %q", s.Dir)
+ }
+ if fi.Mode().Perm() != 0700 {
+ return fmt.Errorf("socket: got perm %o, want 700 for %q", fi.Mode().Perm(), s.Dir)
+ }
+ if st := fi.Sys().(*syscall.Stat_t); int(st.Uid) != os.Getuid() {
+ return fmt.Errorf("socket: got uid %d, want %d for %q", st.Uid, os.Getuid(), s.Dir)
+ }
+ return nil
+}
diff --git a/contrib/remote-helpers/README b/contrib/remote-helpers/README
new file mode 100644
index 0000000000..ac72332517
--- /dev/null
+++ b/contrib/remote-helpers/README
@@ -0,0 +1,15 @@
+The remote-helper bridges to access data stored in Mercurial and
+Bazaar are maintained outside the git.git tree in the repositories
+of their primary author:
+
+ https://github.com/felipec/git-remote-hg (for Mercurial)
+ https://github.com/felipec/git-remote-bzr (for Bazaar)
+
+You can pick a directory on your $PATH and download them from these
+repositories, e.g.:
+
+ $ wget -O $HOME/bin/git-remote-hg \
+ https://raw.github.com/felipec/git-remote-hg/master/git-remote-hg
+ $ wget -O $HOME/bin/git-remote-bzr \
+ https://raw.github.com/felipec/git-remote-bzr/master/git-remote-bzr
+ $ chmod +x $HOME/bin/git-remote-hg $HOME/bin/git-remote-bzr
diff --git a/contrib/remote-helpers/git-remote-bzr b/contrib/remote-helpers/git-remote-bzr
new file mode 100755
index 0000000000..712a1377e2
--- /dev/null
+++ b/contrib/remote-helpers/git-remote-bzr
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+
+import sys
+
+sys.stderr.write('WARNING: git-remote-bzr is now maintained independently.\n')
+sys.stderr.write('WARNING: For more information visit https://github.com/felipec/git-remote-bzr\n')
+
+sys.stderr.write('''WARNING:
+WARNING: You can pick a directory on your $PATH and download it, e.g.:
+WARNING: $ wget -O $HOME/bin/git-remote-bzr \\
+WARNING: https://raw.github.com/felipec/git-remote-bzr/master/git-remote-bzr
+WARNING: $ chmod +x $HOME/bin/git-remote-bzr
+''')
diff --git a/contrib/remote-helpers/git-remote-hg b/contrib/remote-helpers/git-remote-hg
new file mode 100755
index 0000000000..4255ad6312
--- /dev/null
+++ b/contrib/remote-helpers/git-remote-hg
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+
+import sys
+
+sys.stderr.write('WARNING: git-remote-hg is now maintained independently.\n')
+sys.stderr.write('WARNING: For more information visit https://github.com/felipec/git-remote-hg\n')
+
+sys.stderr.write('''WARNING:
+WARNING: You can pick a directory on your $PATH and download it, e.g.:
+WARNING: $ wget -O $HOME/bin/git-remote-hg \\
+WARNING: https://raw.github.com/felipec/git-remote-hg/master/git-remote-hg
+WARNING: $ chmod +x $HOME/bin/git-remote-hg
+''')
diff --git a/contrib/rerere-train.sh b/contrib/rerere-train.sh
index 2cfe1b936b..36b6feebe0 100755
--- a/contrib/rerere-train.sh
+++ b/contrib/rerere-train.sh
@@ -7,7 +7,7 @@ USAGE="$me rev-list-args"
SUBDIRECTORY_OK=Yes
OPTIONS_SPEC=
-. git-sh-setup
+. $(git --exec-path)/git-sh-setup
require_work_tree
cd_to_toplevel
diff --git a/contrib/stats/mailmap.pl b/contrib/stats/mailmap.pl
index 4b852e2455..9513f5e35b 100755
--- a/contrib/stats/mailmap.pl
+++ b/contrib/stats/mailmap.pl
@@ -1,38 +1,70 @@
-#!/usr/bin/perl -w
-my %mailmap = ();
-open I, "<", ".mailmap";
-while (<I>) {
- chomp;
- next if /^#/;
- if (my ($author, $mail) = /^(.*?)\s+<(.+)>$/) {
- $mailmap{$mail} = $author;
- }
+#!/usr/bin/perl
+
+use warnings 'all';
+use strict;
+use Getopt::Long;
+
+my $match_emails;
+my $match_names;
+my $order_by = 'count';
+Getopt::Long::Configure(qw(bundling));
+GetOptions(
+ 'emails|e!' => \$match_emails,
+ 'names|n!' => \$match_names,
+ 'count|c' => sub { $order_by = 'count' },
+ 'time|t' => sub { $order_by = 'stamp' },
+) or exit 1;
+$match_emails = 1 unless $match_names;
+
+my $email = {};
+my $name = {};
+
+open(my $fh, '-|', "git log --format='%at <%aE> %aN'");
+while(<$fh>) {
+ my ($t, $e, $n) = /(\S+) <(\S+)> (.*)/;
+ mark($email, $e, $n, $t);
+ mark($name, $n, $e, $t);
}
-close I;
-
-my %mail2author = ();
-open I, "git log --pretty='format:%ae %an' |";
-while (<I>) {
- chomp;
- my ($mail, $author) = split(/\t/, $_);
- next if exists $mailmap{$mail};
- $mail2author{$mail} ||= {};
- $mail2author{$mail}{$author} ||= 0;
- $mail2author{$mail}{$author}++;
+close($fh);
+
+if ($match_emails) {
+ foreach my $e (dups($email)) {
+ foreach my $n (vals($email->{$e})) {
+ show($n, $e, $email->{$e}->{$n});
+ }
+ print "\n";
+ }
}
-close I;
-
-while (my ($mail, $authorcount) = each %mail2author) {
- # %$authorcount is ($author => $count);
- # sort and show the names from the most frequent ones.
- my @names = (map { $_->[0] }
- sort { $b->[1] <=> $a->[1] }
- map { [$_, $authorcount->{$_}] }
- keys %$authorcount);
- if (1 < @names) {
- for (@names) {
- print "$_ <$mail>\n";
+if ($match_names) {
+ foreach my $n (dups($name)) {
+ foreach my $e (vals($name->{$n})) {
+ show($n, $e, $name->{$n}->{$e});
}
+ print "\n";
}
}
+exit 0;
+sub mark {
+ my ($h, $k, $v, $t) = @_;
+ my $e = $h->{$k}->{$v} ||= { count => 0, stamp => 0 };
+ $e->{count}++;
+ $e->{stamp} = $t unless $t < $e->{stamp};
+}
+
+sub dups {
+ my $h = shift;
+ return grep { keys($h->{$_}) > 1 } keys($h);
+}
+
+sub vals {
+ my $h = shift;
+ return sort {
+ $h->{$b}->{$order_by} <=> $h->{$a}->{$order_by}
+ } keys($h);
+}
+
+sub show {
+ my ($n, $e, $h) = @_;
+ print "$n <$e> ($h->{$order_by})\n";
+}
diff --git a/contrib/subtree/.gitignore b/contrib/subtree/.gitignore
new file mode 100644
index 0000000000..0b9381abca
--- /dev/null
+++ b/contrib/subtree/.gitignore
@@ -0,0 +1,7 @@
+*~
+git-subtree
+git-subtree.1
+git-subtree.html
+git-subtree.xml
+mainline
+subproj
diff --git a/contrib/subtree/COPYING b/contrib/subtree/COPYING
new file mode 100644
index 0000000000..d511905c16
--- /dev/null
+++ b/contrib/subtree/COPYING
@@ -0,0 +1,339 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ <signature of Ty Coon>, 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
diff --git a/contrib/subtree/INSTALL b/contrib/subtree/INSTALL
new file mode 100644
index 0000000000..7ab0cf4509
--- /dev/null
+++ b/contrib/subtree/INSTALL
@@ -0,0 +1,28 @@
+HOW TO INSTALL git-subtree
+==========================
+
+First, build from the top source directory.
+
+Then, in contrib/subtree, run:
+
+ make
+ make install
+ make install-doc
+
+If you used configure to do the main build the git-subtree build will
+pick up those settings. If not, you will likely have to provide a
+value for prefix:
+
+ make prefix=<some dir>
+ make prefix=<some dir> install
+ make prefix=<some dir> install-doc
+
+To run tests first copy git-subtree to the main build area so the
+newly-built git can find it:
+
+ cp git-subtree ../..
+
+Then:
+
+ make test
+
diff --git a/contrib/subtree/Makefile b/contrib/subtree/Makefile
new file mode 100644
index 0000000000..3071baf493
--- /dev/null
+++ b/contrib/subtree/Makefile
@@ -0,0 +1,81 @@
+# The default target of this Makefile is...
+all::
+
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+prefix ?= /usr/local
+gitexecdir ?= $(prefix)/libexec/git-core
+mandir ?= $(prefix)/share/man
+man1dir ?= $(mandir)/man1
+htmldir ?= $(prefix)/share/doc/git-doc
+
+../../GIT-VERSION-FILE: FORCE
+ $(MAKE) -C ../../ GIT-VERSION-FILE
+
+-include ../../GIT-VERSION-FILE
+
+# this should be set to a 'standard' bsd-type install program
+INSTALL ?= install
+RM ?= rm -f
+
+ASCIIDOC = asciidoc
+XMLTO = xmlto
+
+ifndef SHELL_PATH
+ SHELL_PATH = /bin/sh
+endif
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+
+ASCIIDOC_CONF = ../../Documentation/asciidoc.conf
+MANPAGE_XSL = ../../Documentation/manpage-normal.xsl
+
+GIT_SUBTREE_SH := git-subtree.sh
+GIT_SUBTREE := git-subtree
+
+GIT_SUBTREE_DOC := git-subtree.1
+GIT_SUBTREE_XML := git-subtree.xml
+GIT_SUBTREE_TXT := git-subtree.txt
+GIT_SUBTREE_HTML := git-subtree.html
+
+all:: $(GIT_SUBTREE)
+
+$(GIT_SUBTREE): $(GIT_SUBTREE_SH)
+ sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' $< >$@
+ chmod +x $@
+
+doc: $(GIT_SUBTREE_DOC) $(GIT_SUBTREE_HTML)
+
+install: $(GIT_SUBTREE)
+ $(INSTALL) -d -m 755 $(DESTDIR)$(gitexecdir)
+ $(INSTALL) -m 755 $(GIT_SUBTREE) $(DESTDIR)$(gitexecdir)
+
+install-doc: install-man install-html
+
+install-man: $(GIT_SUBTREE_DOC)
+ $(INSTALL) -d -m 755 $(DESTDIR)$(man1dir)
+ $(INSTALL) -m 644 $^ $(DESTDIR)$(man1dir)
+
+install-html: $(GIT_SUBTREE_HTML)
+ $(INSTALL) -d -m 755 $(DESTDIR)$(htmldir)
+ $(INSTALL) -m 644 $^ $(DESTDIR)$(htmldir)
+
+$(GIT_SUBTREE_DOC): $(GIT_SUBTREE_XML)
+ $(XMLTO) -m $(MANPAGE_XSL) man $^
+
+$(GIT_SUBTREE_XML): $(GIT_SUBTREE_TXT)
+ $(ASCIIDOC) -b docbook -d manpage -f $(ASCIIDOC_CONF) \
+ -agit_version=$(GIT_VERSION) $^
+
+$(GIT_SUBTREE_HTML): $(GIT_SUBTREE_TXT)
+ $(ASCIIDOC) -b xhtml11 -d manpage -f $(ASCIIDOC_CONF) \
+ -agit_version=$(GIT_VERSION) $^
+
+test:
+ $(MAKE) -C t/ test
+
+clean:
+ $(RM) $(GIT_SUBTREE)
+ $(RM) *.xml *.html *.1
+
+.PHONY: FORCE
diff --git a/contrib/subtree/README b/contrib/subtree/README
new file mode 100644
index 0000000000..c686b4a69b
--- /dev/null
+++ b/contrib/subtree/README
@@ -0,0 +1,8 @@
+
+Please read git-subtree.txt for documentation.
+
+Please don't contact me using github mail; it's slow, ugly, and worst of
+all, redundant. Email me instead at apenwarr@gmail.com and I'll be happy to
+help.
+
+Avery
diff --git a/contrib/subtree/git-subtree.sh b/contrib/subtree/git-subtree.sh
new file mode 100755
index 0000000000..fa1a5839af
--- /dev/null
+++ b/contrib/subtree/git-subtree.sh
@@ -0,0 +1,737 @@
+#!/bin/sh
+#
+# git-subtree.sh: split/join git repositories in subdirectories of this one
+#
+# Copyright (C) 2009 Avery Pennarun <apenwarr@gmail.com>
+#
+if [ $# -eq 0 ]; then
+ set -- -h
+fi
+OPTS_SPEC="\
+git subtree add --prefix=<prefix> <commit>
+git subtree add --prefix=<prefix> <repository> <ref>
+git subtree merge --prefix=<prefix> <commit>
+git subtree pull --prefix=<prefix> <repository> <ref>
+git subtree push --prefix=<prefix> <repository> <ref>
+git subtree split --prefix=<prefix> <commit...>
+--
+h,help show the help
+q quiet
+d show debug messages
+P,prefix= the name of the subdir to split out
+m,message= use the given message as the commit message for the merge commit
+ options for 'split'
+annotate= add a prefix to commit message of new commits
+b,branch= create a new branch from the split subtree
+ignore-joins ignore prior --rejoin commits
+onto= try connecting new tree to an existing one
+rejoin merge the new branch back into HEAD
+ options for 'add', 'merge', 'pull' and 'push'
+squash merge subtree changes as a single commit
+"
+eval "$(echo "$OPTS_SPEC" | git rev-parse --parseopt -- "$@" || echo exit $?)"
+
+PATH=$PATH:$(git --exec-path)
+. git-sh-setup
+
+require_work_tree
+
+quiet=
+branch=
+debug=
+command=
+onto=
+rejoin=
+ignore_joins=
+annotate=
+squash=
+message=
+prefix=
+
+debug()
+{
+ if [ -n "$debug" ]; then
+ echo "$@" >&2
+ fi
+}
+
+say()
+{
+ if [ -z "$quiet" ]; then
+ echo "$@" >&2
+ fi
+}
+
+assert()
+{
+ if "$@"; then
+ :
+ else
+ die "assertion failed: " "$@"
+ fi
+}
+
+
+#echo "Options: $*"
+
+while [ $# -gt 0 ]; do
+ opt="$1"
+ shift
+ case "$opt" in
+ -q) quiet=1 ;;
+ -d) debug=1 ;;
+ --annotate) annotate="$1"; shift ;;
+ --no-annotate) annotate= ;;
+ -b) branch="$1"; shift ;;
+ -P) prefix="$1"; shift ;;
+ -m) message="$1"; shift ;;
+ --no-prefix) prefix= ;;
+ --onto) onto="$1"; shift ;;
+ --no-onto) onto= ;;
+ --rejoin) rejoin=1 ;;
+ --no-rejoin) rejoin= ;;
+ --ignore-joins) ignore_joins=1 ;;
+ --no-ignore-joins) ignore_joins= ;;
+ --squash) squash=1 ;;
+ --no-squash) squash= ;;
+ --) break ;;
+ *) die "Unexpected option: $opt" ;;
+ esac
+done
+
+command="$1"
+shift
+case "$command" in
+ add|merge|pull) default= ;;
+ split|push) default="--default HEAD" ;;
+ *) die "Unknown command '$command'" ;;
+esac
+
+if [ -z "$prefix" ]; then
+ die "You must provide the --prefix option."
+fi
+
+case "$command" in
+ add) [ -e "$prefix" ] &&
+ die "prefix '$prefix' already exists." ;;
+ *) [ -e "$prefix" ] ||
+ die "'$prefix' does not exist; use 'git subtree add'" ;;
+esac
+
+dir="$(dirname "$prefix/.")"
+
+if [ "$command" != "pull" -a "$command" != "add" -a "$command" != "push" ]; then
+ revs=$(git rev-parse $default --revs-only "$@") || exit $?
+ dirs="$(git rev-parse --no-revs --no-flags "$@")" || exit $?
+ if [ -n "$dirs" ]; then
+ die "Error: Use --prefix instead of bare filenames."
+ fi
+fi
+
+debug "command: {$command}"
+debug "quiet: {$quiet}"
+debug "revs: {$revs}"
+debug "dir: {$dir}"
+debug "opts: {$*}"
+debug
+
+cache_setup()
+{
+ cachedir="$GIT_DIR/subtree-cache/$$"
+ rm -rf "$cachedir" || die "Can't delete old cachedir: $cachedir"
+ mkdir -p "$cachedir" || die "Can't create new cachedir: $cachedir"
+ mkdir -p "$cachedir/notree" || die "Can't create new cachedir: $cachedir/notree"
+ debug "Using cachedir: $cachedir" >&2
+}
+
+cache_get()
+{
+ for oldrev in $*; do
+ if [ -r "$cachedir/$oldrev" ]; then
+ read newrev <"$cachedir/$oldrev"
+ echo $newrev
+ fi
+ done
+}
+
+cache_miss()
+{
+ for oldrev in $*; do
+ if [ ! -r "$cachedir/$oldrev" ]; then
+ echo $oldrev
+ fi
+ done
+}
+
+check_parents()
+{
+ missed=$(cache_miss $*)
+ for miss in $missed; do
+ if [ ! -r "$cachedir/notree/$miss" ]; then
+ debug " incorrect order: $miss"
+ fi
+ done
+}
+
+set_notree()
+{
+ echo "1" > "$cachedir/notree/$1"
+}
+
+cache_set()
+{
+ oldrev="$1"
+ newrev="$2"
+ if [ "$oldrev" != "latest_old" \
+ -a "$oldrev" != "latest_new" \
+ -a -e "$cachedir/$oldrev" ]; then
+ die "cache for $oldrev already exists!"
+ fi
+ echo "$newrev" >"$cachedir/$oldrev"
+}
+
+rev_exists()
+{
+ if git rev-parse "$1" >/dev/null 2>&1; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+rev_is_descendant_of_branch()
+{
+ newrev="$1"
+ branch="$2"
+ branch_hash=$(git rev-parse $branch)
+ match=$(git rev-list -1 $branch_hash ^$newrev)
+
+ if [ -z "$match" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+# if a commit doesn't have a parent, this might not work. But we only want
+# to remove the parent from the rev-list, and since it doesn't exist, it won't
+# be there anyway, so do nothing in that case.
+try_remove_previous()
+{
+ if rev_exists "$1^"; then
+ echo "^$1^"
+ fi
+}
+
+find_latest_squash()
+{
+ debug "Looking for latest squash ($dir)..."
+ dir="$1"
+ sq=
+ main=
+ sub=
+ git log --grep="^git-subtree-dir: $dir/*\$" \
+ --pretty=format:'START %H%n%s%n%n%b%nEND%n' HEAD |
+ while read a b junk; do
+ debug "$a $b $junk"
+ debug "{{$sq/$main/$sub}}"
+ case "$a" in
+ START) sq="$b" ;;
+ git-subtree-mainline:) main="$b" ;;
+ git-subtree-split:) sub="$b" ;;
+ END)
+ if [ -n "$sub" ]; then
+ if [ -n "$main" ]; then
+ # a rejoin commit?
+ # Pretend its sub was a squash.
+ sq="$sub"
+ fi
+ debug "Squash found: $sq $sub"
+ echo "$sq" "$sub"
+ break
+ fi
+ sq=
+ main=
+ sub=
+ ;;
+ esac
+ done
+}
+
+find_existing_splits()
+{
+ debug "Looking for prior splits..."
+ dir="$1"
+ revs="$2"
+ main=
+ sub=
+ git log --grep="^git-subtree-dir: $dir/*\$" \
+ --pretty=format:'START %H%n%s%n%n%b%nEND%n' $revs |
+ while read a b junk; do
+ case "$a" in
+ START) sq="$b" ;;
+ git-subtree-mainline:) main="$b" ;;
+ git-subtree-split:) sub="$b" ;;
+ END)
+ debug " Main is: '$main'"
+ if [ -z "$main" -a -n "$sub" ]; then
+ # squash commits refer to a subtree
+ debug " Squash: $sq from $sub"
+ cache_set "$sq" "$sub"
+ fi
+ if [ -n "$main" -a -n "$sub" ]; then
+ debug " Prior: $main -> $sub"
+ cache_set $main $sub
+ cache_set $sub $sub
+ try_remove_previous "$main"
+ try_remove_previous "$sub"
+ fi
+ main=
+ sub=
+ ;;
+ esac
+ done
+}
+
+copy_commit()
+{
+ # We're going to set some environment vars here, so
+ # do it in a subshell to get rid of them safely later
+ debug copy_commit "{$1}" "{$2}" "{$3}"
+ git log -1 --pretty=format:'%an%n%ae%n%ad%n%cn%n%ce%n%cd%n%B' "$1" |
+ (
+ read GIT_AUTHOR_NAME
+ read GIT_AUTHOR_EMAIL
+ read GIT_AUTHOR_DATE
+ read GIT_COMMITTER_NAME
+ read GIT_COMMITTER_EMAIL
+ read GIT_COMMITTER_DATE
+ export GIT_AUTHOR_NAME \
+ GIT_AUTHOR_EMAIL \
+ GIT_AUTHOR_DATE \
+ GIT_COMMITTER_NAME \
+ GIT_COMMITTER_EMAIL \
+ GIT_COMMITTER_DATE
+ (printf "%s" "$annotate"; cat ) |
+ git commit-tree "$2" $3 # reads the rest of stdin
+ ) || die "Can't copy commit $1"
+}
+
+add_msg()
+{
+ dir="$1"
+ latest_old="$2"
+ latest_new="$3"
+ if [ -n "$message" ]; then
+ commit_message="$message"
+ else
+ commit_message="Add '$dir/' from commit '$latest_new'"
+ fi
+ cat <<-EOF
+ $commit_message
+
+ git-subtree-dir: $dir
+ git-subtree-mainline: $latest_old
+ git-subtree-split: $latest_new
+ EOF
+}
+
+add_squashed_msg()
+{
+ if [ -n "$message" ]; then
+ echo "$message"
+ else
+ echo "Merge commit '$1' as '$2'"
+ fi
+}
+
+rejoin_msg()
+{
+ dir="$1"
+ latest_old="$2"
+ latest_new="$3"
+ if [ -n "$message" ]; then
+ commit_message="$message"
+ else
+ commit_message="Split '$dir/' into commit '$latest_new'"
+ fi
+ cat <<-EOF
+ $commit_message
+
+ git-subtree-dir: $dir
+ git-subtree-mainline: $latest_old
+ git-subtree-split: $latest_new
+ EOF
+}
+
+squash_msg()
+{
+ dir="$1"
+ oldsub="$2"
+ newsub="$3"
+ newsub_short=$(git rev-parse --short "$newsub")
+
+ if [ -n "$oldsub" ]; then
+ oldsub_short=$(git rev-parse --short "$oldsub")
+ echo "Squashed '$dir/' changes from $oldsub_short..$newsub_short"
+ echo
+ git log --pretty=tformat:'%h %s' "$oldsub..$newsub"
+ git log --pretty=tformat:'REVERT: %h %s' "$newsub..$oldsub"
+ else
+ echo "Squashed '$dir/' content from commit $newsub_short"
+ fi
+
+ echo
+ echo "git-subtree-dir: $dir"
+ echo "git-subtree-split: $newsub"
+}
+
+toptree_for_commit()
+{
+ commit="$1"
+ git log -1 --pretty=format:'%T' "$commit" -- || exit $?
+}
+
+subtree_for_commit()
+{
+ commit="$1"
+ dir="$2"
+ git ls-tree "$commit" -- "$dir" |
+ while read mode type tree name; do
+ assert [ "$name" = "$dir" ]
+ assert [ "$type" = "tree" -o "$type" = "commit" ]
+ [ "$type" = "commit" ] && continue # ignore submodules
+ echo $tree
+ break
+ done
+}
+
+tree_changed()
+{
+ tree=$1
+ shift
+ if [ $# -ne 1 ]; then
+ return 0 # weird parents, consider it changed
+ else
+ ptree=$(toptree_for_commit $1)
+ if [ "$ptree" != "$tree" ]; then
+ return 0 # changed
+ else
+ return 1 # not changed
+ fi
+ fi
+}
+
+new_squash_commit()
+{
+ old="$1"
+ oldsub="$2"
+ newsub="$3"
+ tree=$(toptree_for_commit $newsub) || exit $?
+ if [ -n "$old" ]; then
+ squash_msg "$dir" "$oldsub" "$newsub" |
+ git commit-tree "$tree" -p "$old" || exit $?
+ else
+ squash_msg "$dir" "" "$newsub" |
+ git commit-tree "$tree" || exit $?
+ fi
+}
+
+copy_or_skip()
+{
+ rev="$1"
+ tree="$2"
+ newparents="$3"
+ assert [ -n "$tree" ]
+
+ identical=
+ nonidentical=
+ p=
+ gotparents=
+ for parent in $newparents; do
+ ptree=$(toptree_for_commit $parent) || exit $?
+ [ -z "$ptree" ] && continue
+ if [ "$ptree" = "$tree" ]; then
+ # an identical parent could be used in place of this rev.
+ identical="$parent"
+ else
+ nonidentical="$parent"
+ fi
+
+ # sometimes both old parents map to the same newparent;
+ # eliminate duplicates
+ is_new=1
+ for gp in $gotparents; do
+ if [ "$gp" = "$parent" ]; then
+ is_new=
+ break
+ fi
+ done
+ if [ -n "$is_new" ]; then
+ gotparents="$gotparents $parent"
+ p="$p -p $parent"
+ fi
+ done
+
+ if [ -n "$identical" ]; then
+ echo $identical
+ else
+ copy_commit $rev $tree "$p" || exit $?
+ fi
+}
+
+ensure_clean()
+{
+ if ! git diff-index HEAD --exit-code --quiet 2>&1; then
+ die "Working tree has modifications. Cannot add."
+ fi
+ if ! git diff-index --cached HEAD --exit-code --quiet 2>&1; then
+ die "Index has modifications. Cannot add."
+ fi
+}
+
+ensure_valid_ref_format()
+{
+ git check-ref-format "refs/heads/$1" ||
+ die "'$1' does not look like a ref"
+}
+
+cmd_add()
+{
+ if [ -e "$dir" ]; then
+ die "'$dir' already exists. Cannot add."
+ fi
+
+ ensure_clean
+
+ if [ $# -eq 1 ]; then
+ git rev-parse -q --verify "$1^{commit}" >/dev/null ||
+ die "'$1' does not refer to a commit"
+
+ "cmd_add_commit" "$@"
+ elif [ $# -eq 2 ]; then
+ # Technically we could accept a refspec here but we're
+ # just going to turn around and add FETCH_HEAD under the
+ # specified directory. Allowing a refspec might be
+ # misleading because we won't do anything with any other
+ # branches fetched via the refspec.
+ ensure_valid_ref_format "$2"
+
+ "cmd_add_repository" "$@"
+ else
+ say "error: parameters were '$@'"
+ die "Provide either a commit or a repository and commit."
+ fi
+}
+
+cmd_add_repository()
+{
+ echo "git fetch" "$@"
+ repository=$1
+ refspec=$2
+ git fetch "$@" || exit $?
+ revs=FETCH_HEAD
+ set -- $revs
+ cmd_add_commit "$@"
+}
+
+cmd_add_commit()
+{
+ revs=$(git rev-parse $default --revs-only "$@") || exit $?
+ set -- $revs
+ rev="$1"
+
+ debug "Adding $dir as '$rev'..."
+ git read-tree --prefix="$dir" $rev || exit $?
+ git checkout -- "$dir" || exit $?
+ tree=$(git write-tree) || exit $?
+
+ headrev=$(git rev-parse HEAD) || exit $?
+ if [ -n "$headrev" -a "$headrev" != "$rev" ]; then
+ headp="-p $headrev"
+ else
+ headp=
+ fi
+
+ if [ -n "$squash" ]; then
+ rev=$(new_squash_commit "" "" "$rev") || exit $?
+ commit=$(add_squashed_msg "$rev" "$dir" |
+ git commit-tree $tree $headp -p "$rev") || exit $?
+ else
+ revp=$(peel_committish "$rev") &&
+ commit=$(add_msg "$dir" "$headrev" "$rev" |
+ git commit-tree $tree $headp -p "$revp") || exit $?
+ fi
+ git reset "$commit" || exit $?
+
+ say "Added dir '$dir'"
+}
+
+cmd_split()
+{
+ debug "Splitting $dir..."
+ cache_setup || exit $?
+
+ if [ -n "$onto" ]; then
+ debug "Reading history for --onto=$onto..."
+ git rev-list $onto |
+ while read rev; do
+ # the 'onto' history is already just the subdir, so
+ # any parent we find there can be used verbatim
+ debug " cache: $rev"
+ cache_set $rev $rev
+ done
+ fi
+
+ if [ -n "$ignore_joins" ]; then
+ unrevs=
+ else
+ unrevs="$(find_existing_splits "$dir" "$revs")"
+ fi
+
+ # We can't restrict rev-list to only $dir here, because some of our
+ # parents have the $dir contents the root, and those won't match.
+ # (and rev-list --follow doesn't seem to solve this)
+ grl='git rev-list --topo-order --reverse --parents $revs $unrevs'
+ revmax=$(eval "$grl" | wc -l)
+ revcount=0
+ createcount=0
+ eval "$grl" |
+ while read rev parents; do
+ revcount=$(($revcount + 1))
+ say -n "$revcount/$revmax ($createcount) "
+ debug "Processing commit: $rev"
+ exists=$(cache_get $rev)
+ if [ -n "$exists" ]; then
+ debug " prior: $exists"
+ continue
+ fi
+ createcount=$(($createcount + 1))
+ debug " parents: $parents"
+ newparents=$(cache_get $parents)
+ debug " newparents: $newparents"
+
+ tree=$(subtree_for_commit $rev "$dir")
+ debug " tree is: $tree"
+
+ check_parents $parents
+
+ # ugly. is there no better way to tell if this is a subtree
+ # vs. a mainline commit? Does it matter?
+ if [ -z $tree ]; then
+ set_notree $rev
+ if [ -n "$newparents" ]; then
+ cache_set $rev $rev
+ fi
+ continue
+ fi
+
+ newrev=$(copy_or_skip "$rev" "$tree" "$newparents") || exit $?
+ debug " newrev is: $newrev"
+ cache_set $rev $newrev
+ cache_set latest_new $newrev
+ cache_set latest_old $rev
+ done || exit $?
+ latest_new=$(cache_get latest_new)
+ if [ -z "$latest_new" ]; then
+ die "No new revisions were found"
+ fi
+
+ if [ -n "$rejoin" ]; then
+ debug "Merging split branch into HEAD..."
+ latest_old=$(cache_get latest_old)
+ git merge -s ours \
+ -m "$(rejoin_msg $dir $latest_old $latest_new)" \
+ $latest_new >&2 || exit $?
+ fi
+ if [ -n "$branch" ]; then
+ if rev_exists "refs/heads/$branch"; then
+ if ! rev_is_descendant_of_branch $latest_new $branch; then
+ die "Branch '$branch' is not an ancestor of commit '$latest_new'."
+ fi
+ action='Updated'
+ else
+ action='Created'
+ fi
+ git update-ref -m 'subtree split' "refs/heads/$branch" $latest_new || exit $?
+ say "$action branch '$branch'"
+ fi
+ echo $latest_new
+ exit 0
+}
+
+cmd_merge()
+{
+ revs=$(git rev-parse $default --revs-only "$@") || exit $?
+ ensure_clean
+
+ set -- $revs
+ if [ $# -ne 1 ]; then
+ die "You must provide exactly one revision. Got: '$revs'"
+ fi
+ rev="$1"
+
+ if [ -n "$squash" ]; then
+ first_split="$(find_latest_squash "$dir")"
+ if [ -z "$first_split" ]; then
+ die "Can't squash-merge: '$dir' was never added."
+ fi
+ set $first_split
+ old=$1
+ sub=$2
+ if [ "$sub" = "$rev" ]; then
+ say "Subtree is already at commit $rev."
+ exit 0
+ fi
+ new=$(new_squash_commit "$old" "$sub" "$rev") || exit $?
+ debug "New squash commit: $new"
+ rev="$new"
+ fi
+
+ version=$(git version)
+ if [ "$version" \< "git version 1.7" ]; then
+ if [ -n "$message" ]; then
+ git merge -s subtree --message="$message" $rev
+ else
+ git merge -s subtree $rev
+ fi
+ else
+ if [ -n "$message" ]; then
+ git merge -Xsubtree="$prefix" --message="$message" $rev
+ else
+ git merge -Xsubtree="$prefix" $rev
+ fi
+ fi
+}
+
+cmd_pull()
+{
+ if [ $# -ne 2 ]; then
+ die "You must provide <repository> <ref>"
+ fi
+ ensure_clean
+ ensure_valid_ref_format "$2"
+ git fetch "$@" || exit $?
+ revs=FETCH_HEAD
+ set -- $revs
+ cmd_merge "$@"
+}
+
+cmd_push()
+{
+ if [ $# -ne 2 ]; then
+ die "You must provide <repository> <ref>"
+ fi
+ ensure_valid_ref_format "$2"
+ if [ -e "$dir" ]; then
+ repository=$1
+ refspec=$2
+ echo "git push using: " $repository $refspec
+ localrev=$(git subtree split --prefix="$prefix") || die
+ git push $repository $localrev:refs/heads/$refspec
+ else
+ die "'$dir' must already exist. Try 'git subtree add'."
+ fi
+}
+
+"cmd_$command" "$@"
diff --git a/contrib/subtree/git-subtree.txt b/contrib/subtree/git-subtree.txt
new file mode 100644
index 0000000000..54e4b4a243
--- /dev/null
+++ b/contrib/subtree/git-subtree.txt
@@ -0,0 +1,351 @@
+git-subtree(1)
+==============
+
+NAME
+----
+git-subtree - Merge subtrees together and split repository into subtrees
+
+
+SYNOPSIS
+--------
+[verse]
+'git subtree' add -P <prefix> <commit>
+'git subtree' add -P <prefix> <repository> <ref>
+'git subtree' pull -P <prefix> <repository> <ref>
+'git subtree' push -P <prefix> <repository> <ref>
+'git subtree' merge -P <prefix> <commit>
+'git subtree' split -P <prefix> [OPTIONS] [<commit>]
+
+
+DESCRIPTION
+-----------
+Subtrees allow subprojects to be included within a subdirectory
+of the main project, optionally including the subproject's
+entire history.
+
+For example, you could include the source code for a library
+as a subdirectory of your application.
+
+Subtrees are not to be confused with submodules, which are meant for
+the same task. Unlike submodules, subtrees do not need any special
+constructions (like .gitmodule files or gitlinks) be present in
+your repository, and do not force end-users of your
+repository to do anything special or to understand how subtrees
+work. A subtree is just a subdirectory that can be
+committed to, branched, and merged along with your project in
+any way you want.
+
+They are also not to be confused with using the subtree merge
+strategy. The main difference is that, besides merging
+the other project as a subdirectory, you can also extract the
+entire history of a subdirectory from your project and make it
+into a standalone project. Unlike the subtree merge strategy
+you can alternate back and forth between these
+two operations. If the standalone library gets updated, you can
+automatically merge the changes into your project; if you
+update the library inside your project, you can "split" the
+changes back out again and merge them back into the library
+project.
+
+For example, if a library you made for one application ends up being
+useful elsewhere, you can extract its entire history and publish
+that as its own git repository, without accidentally
+intermingling the history of your application project.
+
+[TIP]
+In order to keep your commit messages clean, we recommend that
+people split their commits between the subtrees and the main
+project as much as possible. That is, if you make a change that
+affects both the library and the main application, commit it in
+two pieces. That way, when you split the library commits out
+later, their descriptions will still make sense. But if this
+isn't important to you, it's not *necessary*. git subtree will
+simply leave out the non-library-related parts of the commit
+when it splits it out into the subproject later.
+
+
+COMMANDS
+--------
+add::
+ Create the <prefix> subtree by importing its contents
+ from the given <commit> or <repository> and remote <ref>.
+ A new commit is created automatically, joining the imported
+ project's history with your own. With '--squash', imports
+ only a single commit from the subproject, rather than its
+ entire history.
+
+merge::
+ Merge recent changes up to <commit> into the <prefix>
+ subtree. As with normal 'git merge', this doesn't
+ remove your own local changes; it just merges those
+ changes into the latest <commit>. With '--squash',
+ creates only one commit that contains all the changes,
+ rather than merging in the entire history.
++
+If you use '--squash', the merge direction doesn't always have to be
+forward; you can use this command to go back in time from v2.5 to v2.4,
+for example. If your merge introduces a conflict, you can resolve it in
+the usual ways.
+
+pull::
+ Exactly like 'merge', but parallels 'git pull' in that
+ it fetches the given ref from the specified remote
+ repository.
+
+push::
+ Does a 'split' (see below) using the <prefix> supplied
+ and then does a 'git push' to push the result to the
+ repository and ref. This can be used to push your
+ subtree to different branches of the remote repository.
+
+split::
+ Extract a new, synthetic project history from the
+ history of the <prefix> subtree. The new history
+ includes only the commits (including merges) that
+ affected <prefix>, and each of those commits now has the
+ contents of <prefix> at the root of the project instead
+ of in a subdirectory. Thus, the newly created history
+ is suitable for export as a separate git repository.
++
+After splitting successfully, a single commit id is printed to stdout.
+This corresponds to the HEAD of the newly created tree, which you can
+manipulate however you want.
++
+Repeated splits of exactly the same history are guaranteed to be
+identical (i.e. to produce the same commit ids). Because of this, if
+you add new commits and then re-split, the new commits will be attached
+as commits on top of the history you generated last time, so 'git merge'
+and friends will work as expected.
++
+Note that if you use '--squash' when you merge, you should usually not
+just '--rejoin' when you split.
+
+
+OPTIONS
+-------
+-q::
+--quiet::
+ Suppress unnecessary output messages on stderr.
+
+-d::
+--debug::
+ Produce even more unnecessary output messages on stderr.
+
+-P <prefix>::
+--prefix=<prefix>::
+ Specify the path in the repository to the subtree you
+ want to manipulate. This option is mandatory
+ for all commands.
+
+-m <message>::
+--message=<message>::
+ This option is only valid for add, merge and pull (unsure).
+ Specify <message> as the commit message for the merge commit.
+
+
+OPTIONS FOR add, merge, push, pull
+----------------------------------
+--squash::
+ This option is only valid for add, merge, push and pull
+ commands.
++
+Instead of merging the entire history from the subtree project, produce
+only a single commit that contains all the differences you want to
+merge, and then merge that new commit into your project.
++
+Using this option helps to reduce log clutter. People rarely want to see
+every change that happened between v1.0 and v1.1 of the library they're
+using, since none of the interim versions were ever included in their
+application.
++
+Using '--squash' also helps avoid problems when the same subproject is
+included multiple times in the same project, or is removed and then
+re-added. In such a case, it doesn't make sense to combine the
+histories anyway, since it's unclear which part of the history belongs
+to which subtree.
++
+Furthermore, with '--squash', you can switch back and forth between
+different versions of a subtree, rather than strictly forward. 'git
+subtree merge --squash' always adjusts the subtree to match the exactly
+specified commit, even if getting to that commit would require undoing
+some changes that were added earlier.
++
+Whether or not you use '--squash', changes made in your local repository
+remain intact and can be later split and send upstream to the
+subproject.
+
+
+OPTIONS FOR split
+-----------------
+--annotate=<annotation>::
+ This option is only valid for the split command.
++
+When generating synthetic history, add <annotation> as a prefix to each
+commit message. Since we're creating new commits with the same commit
+message, but possibly different content, from the original commits, this
+can help to differentiate them and avoid confusion.
++
+Whenever you split, you need to use the same <annotation>, or else you
+don't have a guarantee that the new re-created history will be identical
+to the old one. That will prevent merging from working correctly. git
+subtree tries to make it work anyway, particularly if you use --rejoin,
+but it may not always be effective.
+
+-b <branch>::
+--branch=<branch>::
+ This option is only valid for the split command.
++
+After generating the synthetic history, create a new branch called
+<branch> that contains the new history. This is suitable for immediate
+pushing upstream. <branch> must not already exist.
+
+--ignore-joins::
+ This option is only valid for the split command.
++
+If you use '--rejoin', git subtree attempts to optimize its history
+reconstruction to generate only the new commits since the last
+'--rejoin'. '--ignore-join' disables this behaviour, forcing it to
+regenerate the entire history. In a large project, this can take a long
+time.
+
+--onto=<onto>::
+ This option is only valid for the split command.
++
+If your subtree was originally imported using something other than git
+subtree, its history may not match what git subtree is expecting. In
+that case, you can specify the commit id <onto> that corresponds to the
+first revision of the subproject's history that was imported into your
+project, and git subtree will attempt to build its history from there.
++
+If you used 'git subtree add', you should never need this option.
+
+--rejoin::
+ This option is only valid for the split command.
++
+After splitting, merge the newly created synthetic history back into
+your main project. That way, future splits can search only the part of
+history that has been added since the most recent --rejoin.
++
+If your split commits end up merged into the upstream subproject, and
+then you want to get the latest upstream version, this will allow git's
+merge algorithm to more intelligently avoid conflicts (since it knows
+these synthetic commits are already part of the upstream repository).
++
+Unfortunately, using this option results in 'git log' showing an extra
+copy of every new commit that was created (the original, and the
+synthetic one).
++
+If you do all your merges with '--squash', don't use '--rejoin' when you
+split, because you don't want the subproject's history to be part of
+your project anyway.
+
+
+EXAMPLE 1. Add command
+----------------------
+Let's assume that you have a local repository that you would like
+to add an external vendor library to. In this case we will add the
+git-subtree repository as a subdirectory of your already existing
+git-extensions repository in ~/git-extensions/:
+
+ $ git subtree add --prefix=git-subtree --squash \
+ git://github.com/apenwarr/git-subtree.git master
+
+'master' needs to be a valid remote ref and can be a different branch
+name
+
+You can omit the --squash flag, but doing so will increase the number
+of commits that are included in your local repository.
+
+We now have a ~/git-extensions/git-subtree directory containing code
+from the master branch of git://github.com/apenwarr/git-subtree.git
+in our git-extensions repository.
+
+EXAMPLE 2. Extract a subtree using commit, merge and pull
+---------------------------------------------------------
+Let's use the repository for the git source code as an example.
+First, get your own copy of the git.git repository:
+
+ $ git clone git://git.kernel.org/pub/scm/git/git.git test-git
+ $ cd test-git
+
+gitweb (commit 1130ef3) was merged into git as of commit
+0a8f4f0, after which it was no longer maintained separately.
+But imagine it had been maintained separately, and we wanted to
+extract git's changes to gitweb since that time, to share with
+the upstream. You could do this:
+
+ $ git subtree split --prefix=gitweb --annotate='(split) ' \
+ 0a8f4f0^.. --onto=1130ef3 --rejoin \
+ --branch gitweb-latest
+ $ gitk gitweb-latest
+ $ git push git@github.com:whatever/gitweb.git gitweb-latest:master
+
+(We use '0a8f4f0^..' because that means "all the changes from
+0a8f4f0 to the current version, including 0a8f4f0 itself.")
+
+If gitweb had originally been merged using 'git subtree add' (or
+a previous split had already been done with --rejoin specified)
+then you can do all your splits without having to remember any
+weird commit ids:
+
+ $ git subtree split --prefix=gitweb --annotate='(split) ' --rejoin \
+ --branch gitweb-latest2
+
+And you can merge changes back in from the upstream project just
+as easily:
+
+ $ git subtree pull --prefix=gitweb \
+ git@github.com:whatever/gitweb.git master
+
+Or, using '--squash', you can actually rewind to an earlier
+version of gitweb:
+
+ $ git subtree merge --prefix=gitweb --squash gitweb-latest~10
+
+Then make some changes:
+
+ $ date >gitweb/myfile
+ $ git add gitweb/myfile
+ $ git commit -m 'created myfile'
+
+And fast forward again:
+
+ $ git subtree merge --prefix=gitweb --squash gitweb-latest
+
+And notice that your change is still intact:
+
+ $ ls -l gitweb/myfile
+
+And you can split it out and look at your changes versus
+the standard gitweb:
+
+ git log gitweb-latest..$(git subtree split --prefix=gitweb)
+
+EXAMPLE 3. Extract a subtree using branch
+-----------------------------------------
+Suppose you have a source directory with many files and
+subdirectories, and you want to extract the lib directory to its own
+git project. Here's a short way to do it:
+
+First, make the new repository wherever you want:
+
+ $ <go to the new location>
+ $ git init --bare
+
+Back in your original directory:
+
+ $ git subtree split --prefix=lib --annotate="(split)" -b split
+
+Then push the new branch onto the new empty repository:
+
+ $ git push <new-repo> split:master
+
+
+AUTHOR
+------
+Written by Avery Pennarun <apenwarr@gmail.com>
+
+
+GIT
+---
+Part of the linkgit:git[1] suite
diff --git a/contrib/subtree/t/Makefile b/contrib/subtree/t/Makefile
new file mode 100644
index 0000000000..c864810389
--- /dev/null
+++ b/contrib/subtree/t/Makefile
@@ -0,0 +1,69 @@
+# Run tests
+#
+# Copyright (c) 2005 Junio C Hamano
+#
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+#GIT_TEST_OPTS=--verbose --debug
+SHELL_PATH ?= $(SHELL)
+PERL_PATH ?= /usr/bin/perl
+TAR ?= $(TAR)
+RM ?= rm -f
+PROVE ?= prove
+DEFAULT_TEST_TARGET ?= test
+
+# Shell quote;
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+
+T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+
+all: $(DEFAULT_TEST_TARGET)
+
+test: pre-clean $(TEST_LINT)
+ $(MAKE) aggregate-results-and-cleanup
+
+prove: pre-clean $(TEST_LINT)
+ @echo "*** prove ***"; GIT_CONFIG=.git/config $(PROVE) --exec '$(SHELL_PATH_SQ)' $(GIT_PROVE_OPTS) $(T) :: $(GIT_TEST_OPTS)
+ $(MAKE) clean
+
+$(T):
+ @echo "*** $@ ***"; GIT_CONFIG=.git/config '$(SHELL_PATH_SQ)' $@ $(GIT_TEST_OPTS)
+
+pre-clean:
+ $(RM) -r test-results
+
+clean:
+ $(RM) -r 'trash directory'.* test-results
+ $(RM) -r valgrind/bin
+ $(RM) .prove
+
+test-lint: test-lint-duplicates test-lint-executable
+
+test-lint-duplicates:
+ @dups=`echo $(T) | tr ' ' '\n' | sed 's/-.*//' | sort | uniq -d` && \
+ test -z "$$dups" || { \
+ echo >&2 "duplicate test numbers:" $$dups; exit 1; }
+
+test-lint-executable:
+ @bad=`for i in $(T); do test -x "$$i" || echo $$i; done` && \
+ test -z "$$bad" || { \
+ echo >&2 "non-executable tests:" $$bad; exit 1; }
+
+aggregate-results-and-cleanup: $(T)
+ $(MAKE) aggregate-results
+ $(MAKE) clean
+
+aggregate-results:
+ for f in ../../../t/test-results/t*-*.counts; do \
+ echo "$$f"; \
+ done | '$(SHELL_PATH_SQ)' ../../../t/aggregate-results.sh
+
+valgrind:
+ $(MAKE) GIT_TEST_OPTS="$(GIT_TEST_OPTS) --valgrind"
+
+test-results:
+ mkdir -p test-results
+
+.PHONY: pre-clean $(T) aggregate-results clean valgrind
diff --git a/contrib/subtree/t/t7900-subtree.sh b/contrib/subtree/t/t7900-subtree.sh
new file mode 100755
index 0000000000..6309d124ca
--- /dev/null
+++ b/contrib/subtree/t/t7900-subtree.sh
@@ -0,0 +1,469 @@
+#!/bin/sh
+#
+# Copyright (c) 2012 Avery Pennaraum
+#
+test_description='Basic porcelain support for subtrees
+
+This test verifies the basic operation of the merge, pull, add
+and split subcommands of git subtree.
+'
+
+TEST_DIRECTORY=$(pwd)/../../../t
+export TEST_DIRECTORY
+
+. ../../../t/test-lib.sh
+
+create()
+{
+ echo "$1" >"$1"
+ git add "$1"
+}
+
+
+check_equal()
+{
+ test_debug 'echo'
+ test_debug "echo \"check a:\" \"{$1}\""
+ test_debug "echo \" b:\" \"{$2}\""
+ if [ "$1" = "$2" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+fixnl()
+{
+ t=""
+ while read x; do
+ t="$t$x "
+ done
+ echo $t
+}
+
+multiline()
+{
+ while read x; do
+ set -- $x
+ for d in "$@"; do
+ echo "$d"
+ done
+ done
+}
+
+undo()
+{
+ git reset --hard HEAD~
+}
+
+last_commit_message()
+{
+ git log --pretty=format:%s -1
+}
+
+test_expect_success 'init subproj' '
+ test_create_repo subproj
+'
+
+# To the subproject!
+cd subproj
+
+test_expect_success 'add sub1' '
+ create sub1 &&
+ git commit -m "sub1" &&
+ git branch sub1 &&
+ git branch -m master subproj
+'
+
+# Save this hash for testing later.
+
+subdir_hash=$(git rev-parse HEAD)
+
+test_expect_success 'add sub2' '
+ create sub2 &&
+ git commit -m "sub2" &&
+ git branch sub2
+'
+
+test_expect_success 'add sub3' '
+ create sub3 &&
+ git commit -m "sub3" &&
+ git branch sub3
+'
+
+# Back to mainline
+cd ..
+
+test_expect_success 'add main4' '
+ create main4 &&
+ git commit -m "main4" &&
+ git branch -m master mainline &&
+ git branch subdir
+'
+
+test_expect_success 'fetch subproj history' '
+ git fetch ./subproj sub1 &&
+ git branch sub1 FETCH_HEAD
+'
+
+test_expect_success 'no subtree exists in main tree' '
+ test_must_fail git subtree merge --prefix=subdir sub1
+'
+
+test_expect_success 'no pull from non-existant subtree' '
+ test_must_fail git subtree pull --prefix=subdir ./subproj sub1
+'
+
+test_expect_success 'check if --message works for add' '
+ git subtree add --prefix=subdir --message="Added subproject" sub1 &&
+ check_equal ''"$(last_commit_message)"'' "Added subproject" &&
+ undo
+'
+
+test_expect_success 'check if --message works as -m and --prefix as -P' '
+ git subtree add -P subdir -m "Added subproject using git subtree" sub1 &&
+ check_equal ''"$(last_commit_message)"'' "Added subproject using git subtree" &&
+ undo
+'
+
+test_expect_success 'check if --message works with squash too' '
+ git subtree add -P subdir -m "Added subproject with squash" --squash sub1 &&
+ check_equal ''"$(last_commit_message)"'' "Added subproject with squash" &&
+ undo
+'
+
+test_expect_success 'add subproj to mainline' '
+ git subtree add --prefix=subdir/ FETCH_HEAD &&
+ check_equal ''"$(last_commit_message)"'' "Add '"'subdir/'"' from commit '"'"'''"$(git rev-parse sub1)"'''"'"'"
+'
+
+# this shouldn't actually do anything, since FETCH_HEAD is already a parent
+test_expect_success 'merge fetched subproj' '
+ git merge -m "merge -s -ours" -s ours FETCH_HEAD
+'
+
+test_expect_success 'add main-sub5' '
+ create subdir/main-sub5 &&
+ git commit -m "main-sub5"
+'
+
+test_expect_success 'add main6' '
+ create main6 &&
+ git commit -m "main6 boring"
+'
+
+test_expect_success 'add main-sub7' '
+ create subdir/main-sub7 &&
+ git commit -m "main-sub7"
+'
+
+test_expect_success 'fetch new subproj history' '
+ git fetch ./subproj sub2 &&
+ git branch sub2 FETCH_HEAD
+'
+
+test_expect_success 'check if --message works for merge' '
+ git subtree merge --prefix=subdir -m "Merged changes from subproject" sub2 &&
+ check_equal ''"$(last_commit_message)"'' "Merged changes from subproject" &&
+ undo
+'
+
+test_expect_success 'check if --message for merge works with squash too' '
+ git subtree merge --prefix subdir -m "Merged changes from subproject using squash" --squash sub2 &&
+ check_equal ''"$(last_commit_message)"'' "Merged changes from subproject using squash" &&
+ undo
+'
+
+test_expect_success 'merge new subproj history into subdir' '
+ git subtree merge --prefix=subdir FETCH_HEAD &&
+ git branch pre-split &&
+ check_equal ''"$(last_commit_message)"'' "Merge commit '"'"'"$(git rev-parse sub2)"'"'"' into mainline"
+'
+
+test_expect_success 'Check that prefix argument is required for split' '
+ echo "You must provide the --prefix option." > expected &&
+ test_must_fail git subtree split > actual 2>&1 &&
+ test_debug "printf '"'"'expected: '"'"'" &&
+ test_debug "cat expected" &&
+ test_debug "printf '"'"'actual: '"'"'" &&
+ test_debug "cat actual" &&
+ test_cmp expected actual &&
+ rm -f expected actual
+'
+
+test_expect_success 'Check that the <prefix> exists for a split' '
+ echo "'"'"'non-existent-directory'"'"'" does not exist\; use "'"'"'git subtree add'"'"'" > expected &&
+ test_must_fail git subtree split --prefix=non-existent-directory > actual 2>&1 &&
+ test_debug "printf '"'"'expected: '"'"'" &&
+ test_debug "cat expected" &&
+ test_debug "printf '"'"'actual: '"'"'" &&
+ test_debug "cat actual" &&
+ test_cmp expected actual
+# rm -f expected actual
+'
+
+test_expect_success 'check if --message works for split+rejoin' '
+ spl1=''"$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin)"'' &&
+ git branch spl1 "$spl1" &&
+ check_equal ''"$(last_commit_message)"'' "Split & rejoin" &&
+ undo
+'
+
+test_expect_success 'check split with --branch' '
+ spl1=$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin) &&
+ undo &&
+ git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --branch splitbr1 &&
+ check_equal ''"$(git rev-parse splitbr1)"'' "$spl1"
+'
+
+test_expect_success 'check hash of split' '
+ spl1=$(git subtree split --prefix subdir) &&
+ undo &&
+ git subtree split --prefix subdir --branch splitbr1test &&
+ check_equal ''"$(git rev-parse splitbr1test)"'' "$spl1"
+ git checkout splitbr1test &&
+ new_hash=$(git rev-parse HEAD~2) &&
+ git checkout mainline &&
+ check_equal ''"$new_hash"'' "$subdir_hash"
+'
+
+test_expect_success 'check split with --branch for an existing branch' '
+ spl1=''"$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin)"'' &&
+ undo &&
+ git branch splitbr2 sub1 &&
+ git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --branch splitbr2 &&
+ check_equal ''"$(git rev-parse splitbr2)"'' "$spl1"
+'
+
+test_expect_success 'check split with --branch for an incompatible branch' '
+ test_must_fail git subtree split --prefix subdir --onto FETCH_HEAD --branch subdir
+'
+
+test_expect_success 'check split+rejoin' '
+ spl1=''"$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin)"'' &&
+ undo &&
+ git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --rejoin &&
+ check_equal ''"$(last_commit_message)"'' "Split '"'"'subdir/'"'"' into commit '"'"'"$spl1"'"'"'"
+'
+
+test_expect_success 'add main-sub8' '
+ create subdir/main-sub8 &&
+ git commit -m "main-sub8"
+'
+
+# To the subproject!
+cd ./subproj
+
+test_expect_success 'merge split into subproj' '
+ git fetch .. spl1 &&
+ git branch spl1 FETCH_HEAD &&
+ git merge FETCH_HEAD
+'
+
+test_expect_success 'add sub9' '
+ create sub9 &&
+ git commit -m "sub9"
+'
+
+# Back to mainline
+cd ..
+
+test_expect_success 'split for sub8' '
+ split2=''"$(git subtree split --annotate='"'*'"' --prefix subdir/ --rejoin)"''
+ git branch split2 "$split2"
+'
+
+test_expect_success 'add main-sub10' '
+ create subdir/main-sub10 &&
+ git commit -m "main-sub10"
+'
+
+test_expect_success 'split for sub10' '
+ spl3=''"$(git subtree split --annotate='"'*'"' --prefix subdir --rejoin)"'' &&
+ git branch spl3 "$spl3"
+'
+
+# To the subproject!
+cd ./subproj
+
+test_expect_success 'merge split into subproj' '
+ git fetch .. spl3 &&
+ git branch spl3 FETCH_HEAD &&
+ git merge FETCH_HEAD &&
+ git branch subproj-merge-spl3
+'
+
+chkm="main4 main6"
+chkms="main-sub10 main-sub5 main-sub7 main-sub8"
+chkms_sub=$(echo $chkms | multiline | sed 's,^,subdir/,' | fixnl)
+chks="sub1 sub2 sub3 sub9"
+chks_sub=$(echo $chks | multiline | sed 's,^,subdir/,' | fixnl)
+
+test_expect_success 'make sure exactly the right set of files ends up in the subproj' '
+ subfiles=''"$(git ls-files | fixnl)"'' &&
+ check_equal "$subfiles" "$chkms $chks"
+'
+
+test_expect_success 'make sure the subproj history *only* contains commits that affect the subdir' '
+ allchanges=''"$(git log --name-only --pretty=format:'"''"' | sort | fixnl)"'' &&
+ check_equal "$allchanges" "$chkms $chks"
+'
+
+# Back to mainline
+cd ..
+
+test_expect_success 'pull from subproj' '
+ git fetch ./subproj subproj-merge-spl3 &&
+ git branch subproj-merge-spl3 FETCH_HEAD &&
+ git subtree pull --prefix=subdir ./subproj subproj-merge-spl3
+'
+
+test_expect_success 'make sure exactly the right set of files ends up in the mainline' '
+ mainfiles=''"$(git ls-files | fixnl)"'' &&
+ check_equal "$mainfiles" "$chkm $chkms_sub $chks_sub"
+'
+
+test_expect_success 'make sure each filename changed exactly once in the entire history' '
+ # main-sub?? and /subdir/main-sub?? both change, because those are the
+ # changes that were split into their own history. And subdir/sub?? never
+ # change, since they were *only* changed in the subtree branch.
+ allchanges=''"$(git log --name-only --pretty=format:'"''"' | sort | fixnl)"'' &&
+ check_equal "$allchanges" ''"$(echo $chkms $chkm $chks $chkms_sub | multiline | sort | fixnl)"''
+'
+
+test_expect_success 'make sure the --rejoin commits never make it into subproj' '
+ check_equal ''"$(git log --pretty=format:'"'%s'"' HEAD^2 | grep -i split)"'' ""
+'
+
+test_expect_success 'make sure no "git subtree" tagged commits make it into subproj' '
+ # They are meaningless to subproj since one side of the merge refers to the mainline
+ check_equal ''"$(git log --pretty=format:'"'%s%n%b'"' HEAD^2 | grep "git-subtree.*:")"'' ""
+'
+
+# prepare second pair of repositories
+mkdir test2
+cd test2
+
+test_expect_success 'init main' '
+ test_create_repo main
+'
+
+cd main
+
+test_expect_success 'add main1' '
+ create main1 &&
+ git commit -m "main1"
+'
+
+cd ..
+
+test_expect_success 'init sub' '
+ test_create_repo sub
+'
+
+cd sub
+
+test_expect_success 'add sub2' '
+ create sub2 &&
+ git commit -m "sub2"
+'
+
+cd ../main
+
+# check if split can find proper base without --onto
+
+test_expect_success 'add sub as subdir in main' '
+ git fetch ../sub master &&
+ git branch sub2 FETCH_HEAD &&
+ git subtree add --prefix subdir sub2
+'
+
+cd ../sub
+
+test_expect_success 'add sub3' '
+ create sub3 &&
+ git commit -m "sub3"
+'
+
+cd ../main
+
+test_expect_success 'merge from sub' '
+ git fetch ../sub master &&
+ git branch sub3 FETCH_HEAD &&
+ git subtree merge --prefix subdir sub3
+'
+
+test_expect_success 'add main-sub4' '
+ create subdir/main-sub4 &&
+ git commit -m "main-sub4"
+'
+
+test_expect_success 'split for main-sub4 without --onto' '
+ git subtree split --prefix subdir --branch mainsub4
+'
+
+# at this point, the new commit parent should be sub3 if it is not,
+# something went wrong (the "newparent" of "master~" commit should
+# have been sub3, but it was not, because its cache was not set to
+# itself)
+
+test_expect_success 'check that the commit parent is sub3' '
+ check_equal ''"$(git log --pretty=format:%P -1 mainsub4)"'' ''"$(git rev-parse sub3)"''
+'
+
+test_expect_success 'add main-sub5' '
+ mkdir subdir2 &&
+ create subdir2/main-sub5 &&
+ git commit -m "main-sub5"
+'
+
+test_expect_success 'split for main-sub5 without --onto' '
+ # also test that we still can split out an entirely new subtree
+ # if the parent of the first commit in the tree is not empty,
+ # then the new subtree has accidentally been attached to something
+ git subtree split --prefix subdir2 --branch mainsub5 &&
+ check_equal ''"$(git log --pretty=format:%P -1 mainsub5)"'' ""
+'
+
+# make sure no patch changes more than one file. The original set of commits
+# changed only one file each. A multi-file change would imply that we pruned
+# commits too aggressively.
+joincommits()
+{
+ commit=
+ all=
+ while read x y; do
+ #echo "{$x}" >&2
+ if [ -z "$x" ]; then
+ continue
+ elif [ "$x" = "commit:" ]; then
+ if [ -n "$commit" ]; then
+ echo "$commit $all"
+ all=
+ fi
+ commit="$y"
+ else
+ all="$all $y"
+ fi
+ done
+ echo "$commit $all"
+}
+
+test_expect_success 'verify one file change per commit' '
+ x= &&
+ list=''"$(git log --pretty=format:'"'commit: %H'"' | joincommits)"'' &&
+# test_debug "echo HERE" &&
+# test_debug "echo ''"$list"''" &&
+ (git log --pretty=format:'"'commit: %H'"' | joincommits |
+ ( while read commit a b; do
+ test_debug "echo Verifying commit "''"$commit"''
+ test_debug "echo a: "''"$a"''
+ test_debug "echo b: "''"$b"''
+ check_equal "$b" ""
+ x=1
+ done
+ check_equal "$x" 1
+ ))
+'
+
+test_done
diff --git a/contrib/subtree/todo b/contrib/subtree/todo
new file mode 100644
index 0000000000..7e44b0024f
--- /dev/null
+++ b/contrib/subtree/todo
@@ -0,0 +1,50 @@
+
+ delete tempdir
+
+ 'git subtree rejoin' option to do the same as --rejoin, eg. after a
+ rebase
+
+ --prefix doesn't force the subtree correctly in merge/pull:
+ "-s subtree" should be given an explicit subtree option?
+ There doesn't seem to be a way to do this. We'd have to
+ patch git-merge-subtree. Ugh.
+ (but we could avoid this problem by generating squashes with
+ exactly the right subtree structure, rather than using
+ subtree merge...)
+
+ add a 'push' subcommand to parallel 'pull'
+
+ add a 'log' subcommand to see what's new in a subtree?
+
+ add to-submodule and from-submodule commands
+
+ automated tests for --squash stuff
+
+ "add" command non-obviously requires a commitid; would be easier if
+ it had a "pull" sort of mode instead
+
+ "pull" and "merge" commands should fail if you've never merged
+ that --prefix before
+
+ docs should provide an example of "add"
+
+ note that the initial split doesn't *have* to have a commitid
+ specified... that's just an optimization
+
+ if you try to add (or maybe merge?) with an invalid commitid, you
+ get a misleading "prefix must end with /" message from
+ one of the other git tools that git-subtree calls. Should
+ detect this situation and print the *real* problem.
+
+ "pull --squash" should do fetch-synthesize-merge, but instead just
+ does "pull" directly, which doesn't work at all.
+
+ make a 'force-update' that does what 'add' does even if the subtree
+ already exists. That way we can help people who imported
+ subtrees "incorrectly" (eg. by just copying in the files) in
+ the past.
+
+ guess --prefix automatically if possible based on pwd
+
+ make a 'git subtree grafts' that automatically expands --squash'd
+ commits so you can see the full history if you want it.
diff --git a/contrib/svn-fe/.gitignore b/contrib/svn-fe/.gitignore
new file mode 100644
index 0000000000..02a7791585
--- /dev/null
+++ b/contrib/svn-fe/.gitignore
@@ -0,0 +1,4 @@
+/*.xml
+/*.1
+/*.html
+/svn-fe
diff --git a/contrib/svn-fe/Makefile b/contrib/svn-fe/Makefile
new file mode 100644
index 0000000000..e8651aaf4b
--- /dev/null
+++ b/contrib/svn-fe/Makefile
@@ -0,0 +1,105 @@
+all:: svn-fe$X
+
+CC = cc
+RM = rm -f
+MV = mv
+
+CFLAGS = -g -O2 -Wall
+LDFLAGS =
+EXTLIBS = -lz
+
+include ../../config.mak.uname
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+ifeq ($(uname_S),Darwin)
+ ifndef NO_FINK
+ ifeq ($(shell test -d /sw/lib && echo y),y)
+ CFLAGS += -I/sw/include
+ LDFLAGS += -L/sw/lib
+ endif
+ endif
+ ifndef NO_DARWIN_PORTS
+ ifeq ($(shell test -d /opt/local/lib && echo y),y)
+ CFLAGS += -I/opt/local/include
+ LDFLAGS += -L/opt/local/lib
+ endif
+ endif
+endif
+
+ifndef NO_OPENSSL
+ EXTLIBS += -lssl
+ ifdef NEEDS_CRYPTO_WITH_SSL
+ EXTLIBS += -lcrypto
+ endif
+endif
+
+ifndef NO_PTHREADS
+ CFLAGS += $(PTHREADS_CFLAGS)
+ EXTLIBS += $(PTHREAD_LIBS)
+endif
+
+ifdef HAVE_CLOCK_GETTIME
+ CFLAGS += -DHAVE_CLOCK_GETTIME
+ EXTLIBS += -lrt
+endif
+
+ifdef NEEDS_LIBICONV
+ EXTLIBS += -liconv
+endif
+
+GIT_LIB = ../../libgit.a
+VCSSVN_LIB = ../../vcs-svn/lib.a
+XDIFF_LIB = ../../xdiff/lib.a
+
+LIBS = $(VCSSVN_LIB) $(GIT_LIB) $(XDIFF_LIB)
+
+QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir
+QUIET_SUBDIR1 =
+
+ifneq ($(findstring $(MAKEFLAGS),w),w)
+PRINT_DIR = --no-print-directory
+else # "make -w"
+NO_SUBDIR = :
+endif
+
+ifneq ($(findstring $(MAKEFLAGS),s),s)
+ifndef V
+ QUIET_CC = @echo ' ' CC $@;
+ QUIET_LINK = @echo ' ' LINK $@;
+ QUIET_SUBDIR0 = +@subdir=
+ QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo ' ' SUBDIR $$subdir; \
+ $(MAKE) $(PRINT_DIR) -C $$subdir
+endif
+endif
+
+svn-fe$X: svn-fe.o $(VCSSVN_LIB) $(XDIFF_LIB) $(GIT_LIB)
+ $(QUIET_LINK)$(CC) $(CFLAGS) $(LDFLAGS) $(EXTLIBS) -o $@ svn-fe.o $(LIBS)
+
+svn-fe.o: svn-fe.c ../../vcs-svn/svndump.h
+ $(QUIET_CC)$(CC) $(CFLAGS) -I../../vcs-svn -o $*.o -c $<
+
+svn-fe.html: svn-fe.txt
+ $(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \
+ MAN_TXT=../contrib/svn-fe/svn-fe.txt \
+ ../contrib/svn-fe/$@
+
+svn-fe.1: svn-fe.txt
+ $(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \
+ MAN_TXT=../contrib/svn-fe/svn-fe.txt \
+ ../contrib/svn-fe/$@
+ $(MV) ../../Documentation/svn-fe.1 .
+
+../../vcs-svn/lib.a: FORCE
+ $(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) vcs-svn/lib.a
+
+../../xdiff/lib.a: FORCE
+ $(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) xdiff/lib.a
+
+../../libgit.a: FORCE
+ $(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) libgit.a
+
+clean:
+ $(RM) svn-fe$X svn-fe.o svn-fe.html svn-fe.xml svn-fe.1
+
+.PHONY: all clean FORCE
diff --git a/contrib/svn-fe/svn-fe.c b/contrib/svn-fe/svn-fe.c
new file mode 100644
index 0000000000..f363505abb
--- /dev/null
+++ b/contrib/svn-fe/svn-fe.c
@@ -0,0 +1,18 @@
+/*
+ * This file is in the public domain.
+ * You may freely use, modify, distribute, and relicense it.
+ */
+
+#include <stdlib.h>
+#include "svndump.h"
+
+int main(int argc, char **argv)
+{
+ if (svndump_init(NULL))
+ return 1;
+ svndump_read((argc > 1) ? argv[1] : NULL, "refs/heads/master",
+ "refs/notes/svn/revs");
+ svndump_deinit();
+ svndump_reset();
+ return 0;
+}
diff --git a/contrib/svn-fe/svn-fe.txt b/contrib/svn-fe/svn-fe.txt
new file mode 100644
index 0000000000..a3425f4770
--- /dev/null
+++ b/contrib/svn-fe/svn-fe.txt
@@ -0,0 +1,71 @@
+svn-fe(1)
+=========
+
+NAME
+----
+svn-fe - convert an SVN "dumpfile" to a fast-import stream
+
+SYNOPSIS
+--------
+[verse]
+mkfifo backchannel &&
+svnadmin dump --deltas REPO |
+ svn-fe [url] 3<backchannel |
+ git fast-import --cat-blob-fd=3 3>backchannel
+
+DESCRIPTION
+-----------
+
+Converts a Subversion dumpfile into input suitable for
+git-fast-import(1) and similar importers. REPO is a path to a
+Subversion repository mirrored on the local disk. Remote Subversion
+repositories can be mirrored on local disk using the `svnsync`
+command.
+
+Note: this tool is very young. The details of its commandline
+interface may change in backward incompatible ways.
+
+INPUT FORMAT
+------------
+Subversion's repository dump format is documented in full in
+`notes/dump-load-format.txt` from the Subversion source tree.
+Files in this format can be generated using the 'svnadmin dump' or
+'svk admin dump' command.
+
+OUTPUT FORMAT
+-------------
+The fast-import format is documented by the git-fast-import(1)
+manual page.
+
+NOTES
+-----
+Subversion dumps do not record a separate author and committer for
+each revision, nor do they record a separate display name and email
+address for each author. Like git-svn(1), 'svn-fe' will use the name
+
+---------
+user <user@UUID>
+---------
+
+as committer, where 'user' is the value of the `svn:author` property
+and 'UUID' the repository's identifier.
+
+To support incremental imports, 'svn-fe' puts a `git-svn-id` line at
+the end of each commit log message if passed a URL on the command
+line. This line has the form `git-svn-id: URL@REVNO UUID`.
+
+The resulting repository will generally require further processing
+to put each project in its own repository and to separate the history
+of each branch. The 'git filter-branch --subdirectory-filter' command
+may be useful for this purpose.
+
+BUGS
+----
+Empty directories and unknown properties are silently discarded.
+
+The exit status does not reflect whether an error was detected.
+
+SEE ALSO
+--------
+git-svn(1), svn2git(1), svk(1), git-filter-branch(1), git-fast-import(1),
+https://svn.apache.org/repos/asf/subversion/trunk/notes/dump-load-format.txt
diff --git a/contrib/svn-fe/svnrdump_sim.py b/contrib/svn-fe/svnrdump_sim.py
new file mode 100755
index 0000000000..11ac6f6927
--- /dev/null
+++ b/contrib/svn-fe/svnrdump_sim.py
@@ -0,0 +1,68 @@
+#!/usr/bin/python
+"""
+Simulates svnrdump by replaying an existing dump from a file, taking care
+of the specified revision range.
+To simulate incremental imports the environment variable SVNRMAX can be set
+to the highest revision that should be available.
+"""
+import sys
+import os
+
+if sys.hexversion < 0x02040000:
+ # The limiter is the ValueError() calls. This may be too conservative
+ sys.stderr.write("svnrdump-sim.py: requires Python 2.4 or later.\n")
+ sys.exit(1)
+
+
+def getrevlimit():
+ var = 'SVNRMAX'
+ if var in os.environ:
+ return os.environ[var]
+ return None
+
+
+def writedump(url, lower, upper):
+ if url.startswith('sim://'):
+ filename = url[6:]
+ if filename[-1] == '/':
+ filename = filename[:-1] # remove terminating slash
+ else:
+ raise ValueError('sim:// url required')
+ f = open(filename, 'r')
+ state = 'header'
+ wroterev = False
+ while(True):
+ l = f.readline()
+ if l == '':
+ break
+ if state == 'header' and l.startswith('Revision-number: '):
+ state = 'prefix'
+ if state == 'prefix' and l == 'Revision-number: %s\n' % lower:
+ state = 'selection'
+ if not upper == 'HEAD' and state == 'selection' and \
+ l == 'Revision-number: %s\n' % upper:
+ break
+
+ if state == 'header' or state == 'selection':
+ if state == 'selection':
+ wroterev = True
+ sys.stdout.write(l)
+ return wroterev
+
+if __name__ == "__main__":
+ if not (len(sys.argv) in (3, 4, 5)):
+ print("usage: %s dump URL -rLOWER:UPPER")
+ sys.exit(1)
+ if not sys.argv[1] == 'dump':
+ raise NotImplementedError('only "dump" is suppported.')
+ url = sys.argv[2]
+ r = ('0', 'HEAD')
+ if len(sys.argv) == 4 and sys.argv[3][0:2] == '-r':
+ r = sys.argv[3][2:].lstrip().split(':')
+ if not getrevlimit() is None:
+ r[1] = getrevlimit()
+ if writedump(url, r[0], r[1]):
+ ret = 0
+ else:
+ ret = 1
+ sys.exit(ret)
diff --git a/contrib/thunderbird-patch-inline/appp.sh b/contrib/thunderbird-patch-inline/appp.sh
index cc518f3c89..8dc73ece15 100755
--- a/contrib/thunderbird-patch-inline/appp.sh
+++ b/contrib/thunderbird-patch-inline/appp.sh
@@ -1,8 +1,8 @@
-#!/bin/bash
+#!/bin/sh
# Copyright 2008 Lukas Sandström <luksan@gmail.com>
#
# AppendPatch - A script to be used together with ExternalEditor
-# for Mozilla Thunderbird to properly include pathes inline i e-mails.
+# for Mozilla Thunderbird to properly include patches inline in e-mails.
# ExternalEditor can be downloaded at http://globs.org/articles.php?lng=en&pg=2
@@ -10,7 +10,7 @@ CONFFILE=~/.appprc
SEP="-=-=-=-=-=-=-=-=-=# Don't remove this line #=-=-=-=-=-=-=-=-=-"
if [ -e "$CONFFILE" ] ; then
- LAST_DIR=`grep -m 1 "^LAST_DIR=" "${CONFFILE}"|sed -e 's/^LAST_DIR=//'`
+ LAST_DIR=$(grep -m 1 "^LAST_DIR=" "${CONFFILE}"|sed -e 's/^LAST_DIR=//')
cd "${LAST_DIR}"
else
cd > /dev/null
@@ -25,11 +25,11 @@ fi
cd - > /dev/null
-SUBJECT=`sed -n -e '/^Subject: /p' "${PATCH}"`
-HEADERS=`sed -e '/^'"${SEP}"'$/,$d' $1`
-BODY=`sed -e "1,/${SEP}/d" $1`
-CMT_MSG=`sed -e '1,/^$/d' -e '/^---$/,$d' "${PATCH}"`
-DIFF=`sed -e '1,/^---$/d' "${PATCH}"`
+SUBJECT=$(sed -n -e '/^Subject: /p' "${PATCH}")
+HEADERS=$(sed -e '/^'"${SEP}"'$/,$d' $1)
+BODY=$(sed -e "1,/${SEP}/d" $1)
+CMT_MSG=$(sed -e '1,/^$/d' -e '/^---$/,$d' "${PATCH}")
+DIFF=$(sed -e '1,/^---$/d' "${PATCH}")
CCS=`echo -e "$CMT_MSG\n$HEADERS" | sed -n -e 's/^Cc: \(.*\)$/\1,/gp' \
-e 's/^Signed-off-by: \(.*\)/\1,/gp'`
@@ -48,7 +48,7 @@ if [ "x${BODY}x" != "xx" ] ; then
fi
echo "$DIFF" >> $1
-LAST_DIR=`dirname "${PATCH}"`
+LAST_DIR=$(dirname "${PATCH}")
grep -v "^LAST_DIR=" "${CONFFILE}" > "${CONFFILE}_"
echo "LAST_DIR=${LAST_DIR}" >> "${CONFFILE}_"
diff --git a/contrib/vim/README b/contrib/vim/README
deleted file mode 100644
index fca1e17251..0000000000
--- a/contrib/vim/README
+++ /dev/null
@@ -1,32 +0,0 @@
-Syntax highlighting for git commit messages, config files, etc. is
-included with the vim distribution as of vim 7.2, and should work
-automatically.
-
-If you have an older version of vim, you can get the latest syntax
-files from the vim project:
-
- http://ftp.vim.org/pub/vim/runtime/syntax/git.vim
- http://ftp.vim.org/pub/vim/runtime/syntax/gitcommit.vim
- http://ftp.vim.org/pub/vim/runtime/syntax/gitconfig.vim
- http://ftp.vim.org/pub/vim/runtime/syntax/gitrebase.vim
- http://ftp.vim.org/pub/vim/runtime/syntax/gitsendemail.vim
-
-These files are also available via FTP at the same location.
-
-To install:
-
- 1. Copy these files to vim's syntax directory $HOME/.vim/syntax
- 2. To auto-detect the editing of various git-related filetypes:
- $ cat >>$HOME/.vim/filetype.vim <<'EOF'
- autocmd BufNewFile,BufRead *.git/COMMIT_EDITMSG setf gitcommit
- autocmd BufNewFile,BufRead *.git/config,.gitconfig setf gitconfig
- autocmd BufNewFile,BufRead git-rebase-todo setf gitrebase
- autocmd BufNewFile,BufRead .msg.[0-9]*
- \ if getline(1) =~ '^From.*# This line is ignored.$' |
- \ setf gitsendemail |
- \ endif
- autocmd BufNewFile,BufRead *.git/**
- \ if getline(1) =~ '^\x\{40\}\>\|^ref: ' |
- \ setf git |
- \ endif
- EOF
diff --git a/contrib/workdir/git-new-workdir b/contrib/workdir/git-new-workdir
index 993cacf324..888c34a521 100755
--- a/contrib/workdir/git-new-workdir
+++ b/contrib/workdir/git-new-workdir
@@ -10,6 +10,10 @@ die () {
exit 128
}
+failed () {
+ die "unable to create new workdir '$new_workdir'!"
+}
+
if test $# -lt 2 || test $# -gt 3
then
usage "$0 <repository> <new_workdir> [<branch>]"
@@ -35,48 +39,67 @@ esac
# don't link to a configured bare repository
isbare=$(git --git-dir="$git_dir" config --bool --get core.bare)
-if test ztrue = z$isbare
+if test ztrue = "z$isbare"
then
die "\"$git_dir\" has core.bare set to true," \
" remove from \"$git_dir/config\" to use $0"
fi
# don't link to a workdir
-if test -L "$git_dir/config"
+if test -h "$git_dir/config"
then
die "\"$orig_git\" is a working directory only, please specify" \
"a complete repository."
fi
-# don't recreate a workdir over an existing repository
-if test -e "$new_workdir"
+# make sure the links in the workdir have full paths to the original repo
+git_dir=$(cd "$git_dir" && pwd) || exit 1
+
+# don't recreate a workdir over an existing directory, unless it's empty
+if test -d "$new_workdir"
then
- die "destination directory '$new_workdir' already exists."
+ if test $(ls -a1 "$new_workdir/." | wc -l) -ne 2
+ then
+ die "destination directory '$new_workdir' is not empty."
+ fi
+ cleandir="$new_workdir/.git"
+else
+ cleandir="$new_workdir"
fi
-# make sure the the links use full paths
-git_dir=$(cd "$git_dir"; pwd)
+mkdir -p "$new_workdir/.git" || failed
+cleandir=$(cd "$cleandir" && pwd) || failed
-# create the workdir
-mkdir -p "$new_workdir/.git" || die "unable to create \"$new_workdir\"!"
+cleanup () {
+ rm -rf "$cleandir"
+}
+siglist="0 1 2 15"
+trap cleanup $siglist
-# create the links to the original repo. explictly exclude index, HEAD and
+# create the links to the original repo. explicitly exclude index, HEAD and
# logs/HEAD from the list since they are purely related to the current working
# directory, and should not be shared.
for x in config refs logs/refs objects info hooks packed-refs remotes rr-cache svn
do
+ # create a containing directory if needed
case $x in
*/*)
- mkdir -p "$(dirname "$new_workdir/.git/$x")"
+ mkdir -p "$new_workdir/.git/${x%/*}"
;;
esac
- ln -s "$git_dir/$x" "$new_workdir/.git/$x"
+
+ ln -s "$git_dir/$x" "$new_workdir/.git/$x" || failed
done
-# now setup the workdir
-cd "$new_workdir"
+# commands below this are run in the context of the new workdir
+cd "$new_workdir" || failed
+
# copy the HEAD from the original repository as a default branch
-cp "$git_dir/HEAD" .git/HEAD
-# checkout the branch (either the same as HEAD from the original repository, or
-# the one that was asked for)
+cp "$git_dir/HEAD" .git/HEAD || failed
+
+# the workdir is set up. if the checkout fails, the user can fix it.
+trap - $siglist
+
+# checkout the branch (either the same as HEAD from the original repository,
+# or the one that was asked for)
git checkout -f $branch