summaryrefslogtreecommitdiff
path: root/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'contrib')
-rw-r--r--contrib/blameview/README9
-rwxr-xr-xcontrib/blameview/blameview.perl155
-rw-r--r--contrib/ciabot/INSTALL54
-rw-r--r--contrib/ciabot/README11
-rwxr-xr-xcontrib/ciabot/ciabot.py255
-rwxr-xr-xcontrib/ciabot/ciabot.sh233
-rw-r--r--contrib/completion/git-completion.bash291
-rw-r--r--contrib/completion/git-completion.zsh27
-rw-r--r--contrib/completion/git-prompt.sh347
-rwxr-xr-xcontrib/contacts/git-contacts205
-rw-r--r--contrib/contacts/git-contacts.txt94
-rw-r--r--contrib/continuous/cidaemon503
-rw-r--r--contrib/continuous/post-receive-cinotify104
-rw-r--r--contrib/credential/gnome-keyring/Makefile4
-rw-r--r--contrib/credential/gnome-keyring/git-credential-gnome-keyring.c297
-rw-r--r--contrib/credential/netrc/Makefile5
-rwxr-xr-xcontrib/credential/netrc/git-credential-netrc423
-rw-r--r--contrib/credential/netrc/test.netrc13
-rwxr-xr-xcontrib/credential/netrc/test.pl106
-rw-r--r--contrib/credential/osxkeychain/git-credential-osxkeychain.c12
-rwxr-xr-xcontrib/examples/git-log.sh15
-rwxr-xr-xcontrib/examples/git-merge.sh2
-rwxr-xr-xcontrib/examples/git-repack.sh194
-rwxr-xr-xcontrib/examples/git-whatchanged.sh28
-rwxr-xr-xcontrib/gitview/gitview2
-rwxr-xr-xcontrib/hg-to-git/hg-to-git.py2
-rw-r--r--contrib/hooks/multimail/README486
-rw-r--r--contrib/hooks/multimail/README.Git15
-rw-r--r--contrib/hooks/multimail/README.migrate-from-post-receive-email145
-rwxr-xr-xcontrib/hooks/multimail/git_multimail.py2393
-rwxr-xr-xcontrib/hooks/multimail/migrate-mailhook-config269
-rwxr-xr-xcontrib/hooks/multimail/post-receive90
-rwxr-xr-xcontrib/hooks/post-receive-email32
-rw-r--r--contrib/mw-to-git/.gitignore1
-rw-r--r--contrib/mw-to-git/.perlcriticrc28
-rw-r--r--contrib/mw-to-git/Git/Mediawiki.pm100
-rw-r--r--contrib/mw-to-git/Makefile43
-rwxr-xr-xcontrib/mw-to-git/bin-wrapper/git14
-rwxr-xr-xcontrib/mw-to-git/git-mw.perl368
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki.perl683
-rwxr-xr-xcontrib/mw-to-git/t/t9365-continuing-queries.sh23
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw-lib.sh29
-rw-r--r--contrib/mw-to-git/t/test.config6
-rw-r--r--contrib/patches/docbook-xsl-manpages-charmap.patch21
-rw-r--r--contrib/remote-helpers/Makefile1
-rwxr-xr-xcontrib/remote-helpers/git-remote-bzr486
-rwxr-xr-xcontrib/remote-helpers/git-remote-hg728
-rwxr-xr-xcontrib/remote-helpers/test-bzr.sh430
-rwxr-xr-xcontrib/remote-helpers/test-hg-bidi.sh90
-rwxr-xr-xcontrib/remote-helpers/test-hg-hg-git.sh219
-rwxr-xr-xcontrib/remote-helpers/test-hg.sh705
-rw-r--r--contrib/subtree/Makefile8
-rwxr-xr-xcontrib/subtree/git-subtree.sh7
-rw-r--r--contrib/subtree/git-subtree.txt2
-rwxr-xr-xcontrib/subtree/t/t7900-subtree.sh8
55 files changed, 8004 insertions, 2817 deletions
diff --git a/contrib/blameview/README b/contrib/blameview/README
deleted file mode 100644
index fada5ce909..0000000000
--- a/contrib/blameview/README
+++ /dev/null
@@ -1,9 +0,0 @@
-This is a sample program to use 'git-blame --incremental', based
-on this message.
-
-From: Jeff King <peff@peff.net>
-Subject: Re: More precise tag following
-To: Linus Torvalds <torvalds@linux-foundation.org>
-Cc: git@vger.kernel.org
-Date: Sat, 27 Jan 2007 18:52:38 -0500
-Message-ID: <20070127235238.GA28706@coredump.intra.peff.net>
diff --git a/contrib/blameview/blameview.perl b/contrib/blameview/blameview.perl
deleted file mode 100755
index 1dec00137b..0000000000
--- a/contrib/blameview/blameview.perl
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/perl
-
-use Gtk2 -init;
-use Gtk2::SimpleList;
-
-my $hash;
-my $fn;
-if ( @ARGV == 1 ) {
- $hash = "HEAD";
- $fn = shift;
-} elsif ( @ARGV == 2 ) {
- $hash = shift;
- $fn = shift;
-} else {
- die "Usage blameview [<rev>] <filename>";
-}
-
-Gtk2::Rc->parse_string(<<'EOS');
-style "treeview_style"
-{
- GtkTreeView::vertical-separator = 0
-}
-class "GtkTreeView" style "treeview_style"
-EOS
-
-my $window = Gtk2::Window->new('toplevel');
-$window->signal_connect(destroy => sub { Gtk2->main_quit });
-my $vpan = Gtk2::VPaned->new();
-$window->add($vpan);
-my $scrolled_window = Gtk2::ScrolledWindow->new;
-$vpan->pack1($scrolled_window, 1, 1);
-my $fileview = Gtk2::SimpleList->new(
- 'Commit' => 'text',
- 'FileLine' => 'text',
- 'Data' => 'text'
-);
-$scrolled_window->add($fileview);
-$fileview->get_column(0)->set_spacing(0);
-$fileview->set_size_request(1024, 768);
-$fileview->set_rules_hint(1);
-$fileview->signal_connect (row_activated => sub {
- my ($sl, $path, $column) = @_;
- my $row_ref = $sl->get_row_data_from_path ($path);
- system("blameview @$row_ref[0]~1 $fn &");
- });
-
-my $commitwindow = Gtk2::ScrolledWindow->new();
-$commitwindow->set_policy ('GTK_POLICY_AUTOMATIC','GTK_POLICY_AUTOMATIC');
-$vpan->pack2($commitwindow, 1, 1);
-my $commit_text = Gtk2::TextView->new();
-my $commit_buffer = Gtk2::TextBuffer->new();
-$commit_text->set_buffer($commit_buffer);
-$commitwindow->add($commit_text);
-
-$fileview->signal_connect (cursor_changed => sub {
- my ($sl) = @_;
- my ($path, $focus_column) = $sl->get_cursor();
- my $row_ref = $sl->get_row_data_from_path ($path);
- my $c_fh;
- open($c_fh, '-|', "git cat-file commit @$row_ref[0]")
- or die "unable to find commit @$row_ref[0]";
- my @buffer = <$c_fh>;
- $commit_buffer->set_text("@buffer");
- close($c_fh);
- });
-
-my $fh;
-open($fh, '-|', "git cat-file blob $hash:$fn")
- or die "unable to open $fn: $!";
-
-while(<$fh>) {
- chomp;
- $fileview->{data}->[$.] = ['HEAD', "$fn:$.", $_];
-}
-
-my $blame;
-open($blame, '-|', qw(git blame --incremental --), $fn, $hash)
- or die "cannot start git-blame $fn";
-
-Glib::IO->add_watch(fileno($blame), 'in', \&read_blame_line);
-
-$window->show_all;
-Gtk2->main;
-exit 0;
-
-my %commitinfo = ();
-
-sub flush_blame_line {
- my ($attr) = @_;
-
- return unless defined $attr;
-
- my ($commit, $s_lno, $lno, $cnt) =
- @{$attr}{qw(COMMIT S_LNO LNO CNT)};
-
- my ($filename, $author, $author_time, $author_tz) =
- @{$commitinfo{$commit}}{qw(FILENAME AUTHOR AUTHOR-TIME AUTHOR-TZ)};
- my $info = $author . ' ' . format_time($author_time, $author_tz);
-
- for(my $i = 0; $i < $cnt; $i++) {
- @{$fileview->{data}->[$lno+$i-1]}[0,1,2] =
- (substr($commit, 0, 8), $filename . ':' . ($s_lno+$i));
- }
-}
-
-my $buf;
-my $current;
-sub read_blame_line {
-
- my $r = sysread($blame, $buf, 1024, length($buf));
- die "I/O error" unless defined $r;
-
- if ($r == 0) {
- flush_blame_line($current);
- $current = undef;
- return 0;
- }
-
- while ($buf =~ s/([^\n]*)\n//) {
- my $line = $1;
-
- if (($commit, $s_lno, $lno, $cnt) =
- ($line =~ /^([0-9a-f]{40}) (\d+) (\d+) (\d+)$/)) {
- flush_blame_line($current);
- $current = +{
- COMMIT => $1,
- S_LNO => $2,
- LNO => $3,
- CNT => $4,
- };
- next;
- }
-
- # extended attribute values
- if ($line =~ /^(author|author-mail|author-time|author-tz|committer|committer-mail|committer-time|committer-tz|summary|filename) (.*)$/) {
- my $commit = $current->{COMMIT};
- $commitinfo{$commit}{uc($1)} = $2;
- next;
- }
- }
- return 1;
-}
-
-sub format_time {
- my $time = shift;
- my $tz = shift;
-
- my $minutes = $tz < 0 ? 0-$tz : $tz;
- $minutes = ($minutes / 100)*60 + ($minutes % 100);
- $minutes = $tz < 0 ? 0-$minutes : $minutes;
- $time += $minutes * 60;
- my @t = gmtime($time);
- return sprintf('%04d-%02d-%02d %02d:%02d:%02d %s',
- $t[5] + 1900, @t[4,3,2,1,0], $tz);
-}
diff --git a/contrib/ciabot/INSTALL b/contrib/ciabot/INSTALL
deleted file mode 100644
index 7222961d35..0000000000
--- a/contrib/ciabot/INSTALL
+++ /dev/null
@@ -1,54 +0,0 @@
-= Installation instructions =
-
-Two scripts are included. The Python one (ciabot.py) is faster and
-more capable; the shell one (ciabot.sh) is a fallback in case Python
-gives your git hosting site indigestion. (I know of no such sites.)
-
-It is no longer necessary to modify the script in order to put it
-in place; in fact, this is now discouraged. It is entirely
-configurable with the following git config variables:
-
-ciabot.project = name of the project
-ciabot.repo = name of the project repo for gitweb/cgit purposes
-ciabot.xmlrpc = if true, ship notifications via XML-RPC
-ciabot.revformat = format in which the revision is shown
-
-The revformat variable may have the following values
-raw -> full hex ID of commit
-short -> first 12 chars of hex ID
-describe -> describe relative to last tag, falling back to short
-
-ciabot.project defaults to the directory name of the repository toplevel.
-ciabot.repo defaults to ciabot.project lowercased.
-ciabot.xmlrpc defaults to True
-ciabot.revformat defaults to 'describe'.
-
-This means that in the normal case you need not do any configuration at all,
-however setting ciabot.project will allow the hook to run slightly faster.
-
-Once you've set these variables, try your script with -n to see the
-notification message dumped to stdout and verify that it looks sane.
-
-To live-test these scripts, your project needs to have been registered with
-the CIA site. Here are the steps:
-
-1. Open an IRC window on irc://freenode/commits or your registered
- project IRC channel.
-
-2. Run ciabot.py and/or ciabot.sh from any directory under git
- control.
-
-You should see a notification on the channel for your most recent commit.
-
-After verifying correct function, install one of these scripts either
-in a post-commit hook or in an update hook.
-
-In post-commit, run it without arguments. It will query for
-current HEAD and the latest commit ID to get the information it
-needs.
-
-In update, call it with a refname followed by a list of commits:
-You want to reverse the order git rev-list emits because it lists
-from most recent to oldest.
-
-/path/to/ciabot.py ${refname} $(git rev-list ${oldhead}..${newhead} | tac)
diff --git a/contrib/ciabot/README b/contrib/ciabot/README
deleted file mode 100644
index 2dfe1f91f5..0000000000
--- a/contrib/ciabot/README
+++ /dev/null
@@ -1,11 +0,0 @@
-These are hook scripts for the CIA notification service at <http://cia.vc/>
-
-They are maintained by Eric S. Raymond <esr@thyrsus.com>. There is an
-upstream resource page for them at <http://www.catb.org/esr/ciabot/>,
-but they are unlikely to change rapidly.
-
-You probably want the Python version; it's faster, more capable, and
-better documented. The shell version is maintained only as a fallback
-for use on hosting sites that don't permit Python hook scripts.
-
-See the file INSTALL for installation instructions.
diff --git a/contrib/ciabot/ciabot.py b/contrib/ciabot/ciabot.py
deleted file mode 100755
index 36b5665ff8..0000000000
--- a/contrib/ciabot/ciabot.py
+++ /dev/null
@@ -1,255 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2010 Eric S. Raymond <esr@thyrsus.com>
-# Distributed under BSD terms.
-#
-# This script contains porcelain and porcelain byproducts.
-# It's Python because the Python standard libraries avoid portability/security
-# issues raised by callouts in the ancestral Perl and sh scripts. It should
-# be compatible back to Python 2.1.5
-#
-# usage: ciabot.py [-V] [-n] [-p projectname] [refname [commits...]]
-#
-# This script is meant to be run either in a post-commit hook or in an
-# update hook. Try it with -n to see the notification mail dumped to
-# stdout and verify that it looks sane. With -V it dumps its version
-# and exits.
-#
-# In post-commit, run it without arguments. It will query for
-# current HEAD and the latest commit ID to get the information it
-# needs.
-#
-# In update, call it with a refname followed by a list of commits:
-# You want to reverse the order git rev-list emits because it lists
-# from most recent to oldest.
-#
-# /path/to/ciabot.py ${refname} $(git rev-list ${oldhead}..${newhead} | tac)
-#
-# Configuration variables affecting this script:
-#
-# ciabot.project = name of the project
-# ciabot.repo = name of the project repo for gitweb/cgit purposes
-# ciabot.xmlrpc = if true (default), ship notifications via XML-RPC
-# ciabot.revformat = format in which the revision is shown
-#
-# ciabot.project defaults to the directory name of the repository toplevel.
-# ciabot.repo defaults to ciabot.project lowercased.
-#
-# This means that in the normal case you need not do any configuration at all,
-# but setting the project name will speed it up slightly.
-#
-# The revformat variable may have the following values
-# raw -> full hex ID of commit
-# short -> first 12 chars of hex ID
-# describe = -> describe relative to last tag, falling back to short
-# The default is 'describe'.
-#
-# Note: the CIA project now says only XML-RPC is reliable, so
-# we default to that.
-#
-
-import sys
-if sys.hexversion < 0x02000000:
- # The limiter is the xml.sax module
- sys.stderr.write("ciabot.py: requires Python 2.0.0 or later.\n")
- sys.exit(1)
-
-import os, commands, socket, urllib
-from xml.sax.saxutils import escape
-
-# Changeset URL prefix for your repo: when the commit ID is appended
-# to this, it should point at a CGI that will display the commit
-# through gitweb or something similar. The defaults will probably
-# work if you have a typical gitweb/cgit setup.
-#
-#urlprefix="http://%(host)s/cgi-bin/gitweb.cgi?p=%(repo)s;a=commit;h="
-urlprefix="http://%(host)s/cgi-bin/cgit.cgi/%(repo)s/commit/?id="
-
-# The service used to turn your gitwebbish URL into a tinyurl so it
-# will take up less space on the IRC notification line.
-tinyifier = "http://tinyurl.com/api-create.php?url="
-
-# The template used to generate the XML messages to CIA. You can make
-# visible changes to the IRC-bot notification lines by hacking this.
-# The default will produce a notfication line that looks like this:
-#
-# ${project}: ${author} ${repo}:${branch} * ${rev} ${files}: ${logmsg} ${url}
-#
-# By omitting $files you can collapse the files part to a single slash.
-xml = '''\
-<message>
- <generator>
- <name>CIA Python client for Git</name>
- <version>%(version)s</version>
- <url>%(generator)s</url>
- </generator>
- <source>
- <project>%(project)s</project>
- <branch>%(repo)s:%(branch)s</branch>
- </source>
- <timestamp>%(ts)s</timestamp>
- <body>
- <commit>
- <author>%(author)s</author>
- <revision>%(rev)s</revision>
- <files>
- %(files)s
- </files>
- <log>%(logmsg)s %(url)s</log>
- <url>%(url)s</url>
- </commit>
- </body>
-</message>
-'''
-
-#
-# No user-serviceable parts below this line:
-#
-
-# Where to ship e-mail notifications.
-toaddr = "cia@cia.vc"
-
-# Identify the generator script.
-# Should only change when the script itself gets a new home and maintainer.
-generator = "http://www.catb.org/~esr/ciabot.py"
-version = "3.6"
-
-def do(command):
- return commands.getstatusoutput(command)[1]
-
-def report(refname, merged, xmlrpc=True):
- "Generate a commit notification to be reported to CIA"
-
- # Try to tinyfy a reference to a web view for this commit.
- try:
- url = open(urllib.urlretrieve(tinyifier + urlprefix + merged)[0]).read()
- except:
- url = urlprefix + merged
-
- branch = os.path.basename(refname)
-
- # Compute a description for the revision
- if revformat == 'raw':
- rev = merged
- elif revformat == 'short':
- rev = ''
- else: # revformat == 'describe'
- rev = do("git describe %s 2>/dev/null" % merged)
- if not rev:
- rev = merged[:12]
-
- # Extract the meta-information for the commit
- files=do("git diff-tree -r --name-only '"+ merged +"' | sed -e '1d' -e 's-.*-<file>&</file>-'")
- metainfo = do("git log -1 '--pretty=format:%an <%ae>%n%at%n%s' " + merged)
- (author, ts, logmsg) = metainfo.split("\n")
- logmsg = escape(logmsg)
-
- # This discards the part of the author's address after @.
- # Might be be nice to ship the full email address, if not
- # for spammers' address harvesters - getting this wrong
- # would make the freenode #commits channel into harvester heaven.
- author = escape(author.replace("<", "").split("@")[0].split()[-1])
-
- # This ignores the timezone. Not clear what to do with it...
- ts = ts.strip().split()[0]
-
- context = locals()
- context.update(globals())
-
- out = xml % context
- mail = '''\
-Message-ID: <%(merged)s.%(author)s@%(project)s>
-From: %(fromaddr)s
-To: %(toaddr)s
-Content-type: text/xml
-Subject: DeliverXML
-
-%(out)s''' % locals()
-
- if xmlrpc:
- return out
- else:
- return mail
-
-if __name__ == "__main__":
- import getopt
-
- # Get all config variables
- revformat = do("git config --get ciabot.revformat")
- project = do("git config --get ciabot.project")
- repo = do("git config --get ciabot.repo")
- xmlrpc = do("git config --get ciabot.xmlrpc")
- xmlrpc = not (xmlrpc and xmlrpc == "false")
-
- host = socket.getfqdn()
- fromaddr = "CIABOT-NOREPLY@" + host
-
- try:
- (options, arguments) = getopt.getopt(sys.argv[1:], "np:xV")
- except getopt.GetoptError, msg:
- print "ciabot.py: " + str(msg)
- raise SystemExit, 1
-
- notify = True
- for (switch, val) in options:
- if switch == '-p':
- project = val
- elif switch == '-n':
- notify = False
- elif switch == '-x':
- xmlrpc = True
- elif switch == '-V':
- print "ciabot.py: version", version
- sys.exit(0)
-
- # The project variable defaults to the name of the repository toplevel.
- if not project:
- here = os.getcwd()
- while True:
- if os.path.exists(os.path.join(here, ".git")):
- project = os.path.basename(here)
- break
- elif here == '/':
- sys.stderr.write("ciabot.py: no .git below root!\n")
- sys.exit(1)
- here = os.path.dirname(here)
-
- if not repo:
- repo = project.lower()
-
- urlprefix = urlprefix % globals()
-
- # The script wants a reference to head followed by the list of
- # commit ID to report about.
- if len(arguments) == 0:
- refname = do("git symbolic-ref HEAD 2>/dev/null")
- merges = [do("git rev-parse HEAD")]
- else:
- refname = arguments[0]
- merges = arguments[1:]
-
- if notify:
- if xmlrpc:
- import xmlrpclib
- server = xmlrpclib.Server('http://cia.vc/RPC2');
- else:
- import smtplib
- server = smtplib.SMTP('localhost')
-
- for merged in merges:
- message = report(refname, merged, xmlrpc)
- if not notify:
- print message
- elif xmlrpc:
- try:
- # RPC server is flaky, this can fail due to timeout.
- server.hub.deliver(message)
- except socket.error, e:
- sys.stderr.write("%s\n" % e)
- else:
- server.sendmail(fromaddr, [toaddr], message)
-
- if notify:
- if not xmlrpc:
- server.quit()
-
-#End
diff --git a/contrib/ciabot/ciabot.sh b/contrib/ciabot/ciabot.sh
deleted file mode 100755
index 3fbbc534ae..0000000000
--- a/contrib/ciabot/ciabot.sh
+++ /dev/null
@@ -1,233 +0,0 @@
-#!/bin/sh
-# Distributed under the terms of the GNU General Public License v2
-# Copyright (c) 2006 Fernando J. Pereda <ferdy@gentoo.org>
-# Copyright (c) 2008 Natanael Copa <natanael.copa@gmail.com>
-# Copyright (c) 2010 Eric S. Raymond <esr@thyrsus.com>
-# Assistance and review by Petr Baudis, author of ciabot.pl,
-# is gratefully acknowledged.
-#
-# This is a version 3.x of ciabot.sh; use -V to find the exact
-# version. Versions 1 and 2 were shipped in 2006 and 2008 and are not
-# version-stamped. The version 2 maintainer has passed the baton.
-#
-# Note: This script should be considered obsolete.
-# There is a faster, better-documented rewrite in Python: find it as ciabot.py
-# Use this only if your hosting site forbids Python hooks.
-# It requires: git(1), hostname(1), cut(1), sendmail(1), and wget(1).
-#
-# Originally based on Git ciabot.pl by Petr Baudis.
-# This script contains porcelain and porcelain byproducts.
-#
-# usage: ciabot.sh [-V] [-n] [-p projectname] [refname commit]
-#
-# This script is meant to be run either in a post-commit hook or in an
-# update hook. Try it with -n to see the notification mail dumped to
-# stdout and verify that it looks sane. With -V it dumps its version
-# and exits.
-#
-# In post-commit, run it without arguments. It will query for
-# current HEAD and the latest commit ID to get the information it
-# needs.
-#
-# In update, you have to call it once per merged commit:
-#
-# refname=$1
-# oldhead=$2
-# newhead=$3
-# for merged in $(git rev-list ${oldhead}..${newhead} | tac) ; do
-# /path/to/ciabot.sh ${refname} ${merged}
-# done
-#
-# The reason for the tac call is that git rev-list emits commits from
-# most recent to least - better to ship notifactions from oldest to newest.
-#
-# Configuration variables affecting this script:
-#
-# ciabot.project = name of the project
-# ciabot.repo = name of the project repo for gitweb/cgit purposes
-# ciabot.revformat = format in which the revision is shown
-#
-# ciabot.project defaults to the directory name of the repository toplevel.
-# ciabot.repo defaults to ciabot.project lowercased.
-#
-# This means that in the normal case you need not do any configuration at all,
-# but setting the project name will speed it up slightly.
-#
-# The revformat variable may have the following values
-# raw -> full hex ID of commit
-# short -> first 12 chars of hex ID
-# describe = -> describe relative to last tag, falling back to short
-# The default is 'describe'.
-#
-# Note: the shell ancestors of this script used mail, not XML-RPC, in
-# order to avoid stalling until timeout when the CIA XML-RPC server is
-# down. It is unknown whether this is still an issue in 2010, but
-# XML-RPC would be annoying to do from sh in any case. (XML-RPC does
-# have the advantage that it guarantees notification of multiple commits
-# shpped from an update in their actual order.)
-#
-
-# The project as known to CIA. You can set this with a -p option,
-# or let it default to the directory name of the repo toplevel.
-project=$(git config --get ciabot.project)
-
-if [ -z $project ]
-then
- here=`pwd`;
- while :; do
- if [ -d $here/.git ]
- then
- project=`basename $here`
- break
- elif [ $here = '/' ]
- then
- echo "ciabot.sh: no .git below root!"
- exit 1
- fi
- here=`dirname $here`
- done
-fi
-
-# Name of the repo for gitweb/cgit purposes
-repo=$(git config --get ciabot.repo)
-[ -z $repo] && repo=$(echo "${project}" | tr '[A-Z]' '[a-z]')
-
-# What revision format do we want in the summary?
-revformat=$(git config --get ciabot.revformat)
-
-# Fully qualified domain name of the repo host. You can hardwire this
-# to make the script faster. The -f option works under Linux and FreeBSD,
-# but not OpenBSD and NetBSD. But under OpenBSD and NetBSD,
-# hostname without options gives the FQDN.
-if hostname -f >/dev/null 2>&1
-then
- hostname=`hostname -f`
-else
- hostname=`hostname`
-fi
-
-# Changeset URL prefix for your repo: when the commit ID is appended
-# to this, it should point at a CGI that will display the commit
-# through gitweb or something similar. The defaults will probably
-# work if you have a typical gitweb/cgit setup.
-#urlprefix="http://${host}/cgi-bin/gitweb.cgi?p=${repo};a=commit;h="
-urlprefix="http://${host}/cgi-bin/cgit.cgi/${repo}/commit/?id="
-
-#
-# You probably will not need to change the following:
-#
-
-# Identify the script. The 'generator' variable should change only
-# when the script itself gets a new home and maintainer.
-generator="http://www.catb.org/~esr/ciabot/ciabot.sh"
-version=3.5
-
-# Addresses for the e-mail
-from="CIABOT-NOREPLY@${hostname}"
-to="cia@cia.vc"
-
-# SMTP client to use - may need to edit the absolute pathname for your system
-sendmail="sendmail -t -f ${from}"
-
-#
-# No user-serviceable parts below this line:
-#
-
-# Should include all places sendmail is likely to lurk.
-PATH="$PATH:/usr/sbin/"
-
-mode=mailit
-while getopts pnV opt
-do
- case $opt in
- p) project=$2; shift ; shift ;;
- n) mode=dumpit; shift ;;
- V) echo "ciabot.sh: version $version"; exit 0; shift ;;
- esac
-done
-
-# Cough and die if user has not specified a project
-if [ -z "$project" ]
-then
- echo "ciabot.sh: no project specified, bailing out." >&2
- exit 1
-fi
-
-if [ $# -eq 0 ] ; then
- refname=$(git symbolic-ref HEAD 2>/dev/null)
- merged=$(git rev-parse HEAD)
-else
- refname=$1
- merged=$2
-fi
-
-# This tries to turn your gitwebbish URL into a tinyurl so it will take up
-# less space on the IRC notification line. Some repo sites (I'm looking at
-# you, berlios.de!) forbid wget calls for security reasons. On these,
-# the code will fall back to the full un-tinyfied URL.
-longurl=${urlprefix}${merged}
-url=$(wget -O - -q http://tinyurl.com/api-create.php?url=${longurl} 2>/dev/null)
-if [ -z "$url" ]; then
- url="${longurl}"
-fi
-
-refname=${refname##refs/heads/}
-
-case $revformat in
-raw) rev=$merged ;;
-short) rev='' ;;
-*) rev=$(git describe ${merged} 2>/dev/null) ;;
-esac
-[ -z ${rev} ] && rev=$(echo "$merged" | cut -c 1-12)
-
-# We discard the part of the author's address after @.
-# Might be nice to ship the full email address, if not
-# for spammers' address harvesters - getting this wrong
-# would make the freenode #commits channel into harvester heaven.
-author=$(git log -1 '--pretty=format:%an <%ae>' $merged)
-author=$(echo "$author" | sed -n -e '/^.*<\([^@]*\).*$/s--\1-p')
-
-logmessage=$(git log -1 '--pretty=format:%s' $merged)
-ts=$(git log -1 '--pretty=format:%at' $merged)
-files=$(git diff-tree -r --name-only ${merged} | sed -e '1d' -e 's-.*-<file>&</file>-')
-
-out="
-<message>
- <generator>
- <name>CIA Shell client for Git</name>
- <version>${version}</version>
- <url>${generator}</url>
- </generator>
- <source>
- <project>${project}</project>
- <branch>$repo:${refname}</branch>
- </source>
- <timestamp>${ts}</timestamp>
- <body>
- <commit>
- <author>${author}</author>
- <revision>${rev}</revision>
- <files>
- ${files}
- </files>
- <log>${logmessage} ${url}</log>
- <url>${url}</url>
- </commit>
- </body>
-</message>"
-
-if [ "$mode" = "dumpit" ]
-then
- sendmail=cat
-fi
-
-${sendmail} << EOM
-Message-ID: <${merged}.${author}@${project}>
-From: ${from}
-To: ${to}
-Content-type: text/xml
-Subject: DeliverXML
-${out}
-EOM
-
-# vim: set tw=70 :
diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash
index e769800393..dba3c15700 100644
--- a/contrib/completion/git-completion.bash
+++ b/contrib/completion/git-completion.bash
@@ -33,8 +33,6 @@ esac
# returns location of .git repo
__gitdir ()
{
- # Note: this function is duplicated in git-prompt.sh
- # When updating it, make sure you update the other one to match.
if [ -z "${1-}" ]; then
if [ -n "${__git_dir-}" ]; then
echo "$__git_dir"
@@ -53,19 +51,6 @@ __gitdir ()
fi
}
-__gitcomp_1 ()
-{
- local c IFS=$' \t\n'
- for c in $1; do
- c="$c$2"
- case $c in
- --*=*|*.) ;;
- *) c="$c " ;;
- esac
- printf '%s\n' "$c"
- done
-}
-
# The following function is based on code from:
#
# bash_completion - programmable completion functions for bash 3.2+
@@ -195,8 +180,18 @@ _get_comp_words_by_ref ()
}
fi
-# Generates completion reply with compgen, appending a space to possible
-# completion words, if necessary.
+__gitcompadd ()
+{
+ local i=0
+ for x in $1; do
+ if [[ "$x" == "$3"* ]]; then
+ COMPREPLY[i++]="$2$x$4"
+ fi
+ done
+}
+
+# Generates completion reply, appending a space to possible completion words,
+# if necessary.
# It accepts 1 to 4 arguments:
# 1: List of possible completion words.
# 2: A prefix to be added to each possible completion word (optional).
@@ -208,19 +203,25 @@ __gitcomp ()
case "$cur_" in
--*=)
- COMPREPLY=()
;;
*)
- local IFS=$'\n'
- COMPREPLY=($(compgen -P "${2-}" \
- -W "$(__gitcomp_1 "${1-}" "${4-}")" \
- -- "$cur_"))
+ local c i=0 IFS=$' \t\n'
+ for c in $1; do
+ c="$c${4-}"
+ if [[ $c == "$cur_"* ]]; then
+ case $c in
+ --*=*|*.) ;;
+ *) c="$c " ;;
+ esac
+ COMPREPLY[i++]="${2-}$c"
+ fi
+ done
;;
esac
}
-# Generates completion reply with compgen from newline-separated possible
-# completion words by appending a space to all of them.
+# Generates completion reply from newline-separated possible completion words
+# by appending a space to all of them.
# It accepts 1 to 4 arguments:
# 1: List of possible completion words, separated by a single newline.
# 2: A prefix to be added to each possible completion word (optional).
@@ -231,7 +232,7 @@ __gitcomp ()
__gitcomp_nl ()
{
local IFS=$'\n'
- COMPREPLY=($(compgen -P "${2-}" -S "${4- }" -W "$1" -- "${3-$cur}"))
+ __gitcompadd "$1" "${2-}" "${3-$cur}" "${4- }"
}
# Generates completion reply with compgen from newline-separated possible
@@ -249,106 +250,50 @@ __gitcomp_file ()
# since tilde expansion is not applied.
# This means that COMPREPLY will be empty and Bash default
# completion will be used.
- COMPREPLY=($(compgen -P "${2-}" -W "$1" -- "${3-$cur}"))
-
- # Tell Bash that compspec generates filenames.
- compopt -o filenames 2>/dev/null
-}
-
-__git_index_file_list_filter_compat ()
-{
- local path
-
- while read -r path; do
- case "$path" in
- ?*/*) echo "${path%%/*}/" ;;
- *) echo "$path" ;;
- esac
- done
-}
-
-__git_index_file_list_filter_bash ()
-{
- local path
-
- while read -r path; do
- case "$path" in
- ?*/*)
- # XXX if we append a slash to directory names when using
- # `compopt -o filenames`, Bash will append another slash.
- # This is pretty stupid, and this the reason why we have to
- # define a compatible version for this function.
- echo "${path%%/*}" ;;
- *)
- echo "$path" ;;
- esac
- done
-}
+ __gitcompadd "$1" "${2-}" "${3-$cur}" ""
-# Process path list returned by "ls-files" and "diff-index --name-only"
-# commands, in order to list only file names relative to a specified
-# directory, and append a slash to directory names.
-__git_index_file_list_filter ()
-{
- # Default to Bash >= 4.x
- __git_index_file_list_filter_bash
+ # use a hack to enable file mode in bash < 4
+ compopt -o filenames +o nospace 2>/dev/null ||
+ compgen -f /non-existing-dir/ > /dev/null
}
-# Execute git ls-files, returning paths relative to the directory
-# specified in the first argument, and using the options specified in
-# the second argument.
+# Execute 'git ls-files', unless the --committable option is specified, in
+# which case it runs 'git diff-index' to find out the files that can be
+# committed. It return paths relative to the directory specified in the first
+# argument, and using the options specified in the second argument.
__git_ls_files_helper ()
{
(
test -n "${CDPATH+set}" && unset CDPATH
- # NOTE: $2 is not quoted in order to support multiple options
- cd "$1" && git ls-files --exclude-standard $2
+ cd "$1"
+ if [ "$2" == "--committable" ]; then
+ git diff-index --name-only --relative HEAD
+ else
+ # NOTE: $2 is not quoted in order to support multiple options
+ git ls-files --exclude-standard $2
+ fi
) 2>/dev/null
}
-# Execute git diff-index, returning paths relative to the directory
-# specified in the first argument, and using the tree object id
-# specified in the second argument.
-__git_diff_index_helper ()
-{
- (
- test -n "${CDPATH+set}" && unset CDPATH
- cd "$1" && git diff-index --name-only --relative "$2"
- ) 2>/dev/null
-}
-
# __git_index_files accepts 1 or 2 arguments:
# 1: Options to pass to ls-files (required).
-# Supported options are --cached, --modified, --deleted, --others,
-# and --directory.
# 2: A directory path (optional).
# If provided, only files within the specified directory are listed.
# Sub directories are never recursed. Path must have a trailing
# slash.
__git_index_files ()
{
- local dir="$(__gitdir)" root="${2-.}"
+ local dir="$(__gitdir)" root="${2-.}" file
if [ -d "$dir" ]; then
- __git_ls_files_helper "$root" "$1" | __git_index_file_list_filter |
- sort | uniq
- fi
-}
-
-# __git_diff_index_files accepts 1 or 2 arguments:
-# 1) The id of a tree object.
-# 2) A directory path (optional).
-# If provided, only files within the specified directory are listed.
-# Sub directories are never recursed. Path must have a trailing
-# slash.
-__git_diff_index_files ()
-{
- local dir="$(__gitdir)" root="${2-.}"
-
- if [ -d "$dir" ]; then
- __git_diff_index_helper "$root" "$1" | __git_index_file_list_filter |
- sort | uniq
+ __git_ls_files_helper "$root" "$1" |
+ while read -r file; do
+ case "$file" in
+ ?*/*) echo "${file%%/*}" ;;
+ *) echo "$file" ;;
+ esac
+ done | sort | uniq
fi
}
@@ -424,14 +369,8 @@ __git_refs ()
done
;;
*)
- git ls-remote "$dir" HEAD ORIG_HEAD 'refs/tags/*' 'refs/heads/*' 'refs/remotes/*' 2>/dev/null | \
- while read -r hash i; do
- case "$i" in
- *^{}) ;;
- refs/*) echo "${i#refs/*/}" ;;
- *) echo "$i" ;;
- esac
- done
+ echo "HEAD"
+ git for-each-ref --format="%(refname:short)" -- "refs/remotes/$dir/" | sed -e "s#^$dir/##"
;;
esac
}
@@ -549,44 +488,23 @@ __git_complete_revlist_file ()
}
-# __git_complete_index_file requires 1 argument: the options to pass to
-# ls-file
+# __git_complete_index_file requires 1 argument:
+# 1: the options to pass to ls-file
+#
+# The exception is --committable, which finds the files appropriate commit.
__git_complete_index_file ()
{
- local pfx cur_="$cur"
+ local pfx="" cur_="$cur"
case "$cur_" in
?*/*)
pfx="${cur_%/*}"
cur_="${cur_##*/}"
pfx="${pfx}/"
-
- __gitcomp_file "$(__git_index_files "$1" "$pfx")" "$pfx" "$cur_"
- ;;
- *)
- __gitcomp_file "$(__git_index_files "$1")" "" "$cur_"
;;
esac
-}
-
-# __git_complete_diff_index_file requires 1 argument: the id of a tree
-# object
-__git_complete_diff_index_file ()
-{
- local pfx cur_="$cur"
-
- case "$cur_" in
- ?*/*)
- pfx="${cur_%/*}"
- cur_="${cur_##*/}"
- pfx="${pfx}/"
- __gitcomp_file "$(__git_diff_index_files "$1" "$pfx")" "$pfx" "$cur_"
- ;;
- *)
- __gitcomp_file "$(__git_diff_index_files "$1")" "" "$cur_"
- ;;
- esac
+ __gitcomp_file "$(__git_index_files "$1" "$pfx")" "$pfx" "$cur_"
}
__git_complete_file ()
@@ -614,7 +532,6 @@ __git_complete_remote_or_refspec ()
case "$cmd" in
push) no_complete_refspec=1 ;;
fetch)
- COMPREPLY=()
return
;;
*) ;;
@@ -630,7 +547,6 @@ __git_complete_remote_or_refspec ()
return
fi
if [ $no_complete_refspec = 1 ]; then
- COMPREPLY=()
return
fi
[ "$remote" = "." ] && remote=
@@ -732,6 +648,7 @@ __git_list_porcelain_commands ()
cat-file) : plumbing;;
check-attr) : plumbing;;
check-ignore) : plumbing;;
+ check-mailmap) : plumbing;;
check-ref-format) : plumbing;;
checkout-index) : plumbing;;
commit-tree) : plumbing;;
@@ -951,7 +868,6 @@ _git_am ()
"
return
esac
- COMPREPLY=()
}
_git_apply ()
@@ -971,7 +887,6 @@ _git_apply ()
"
return
esac
- COMPREPLY=()
}
_git_add ()
@@ -986,7 +901,7 @@ _git_add ()
esac
# XXX should we check for --update and --all options ?
- __git_complete_index_file "--others --modified"
+ __git_complete_index_file "--others --modified --directory --no-empty-directory"
}
_git_archive ()
@@ -1031,7 +946,6 @@ _git_bisect ()
__gitcomp_nl "$(__git_refs)"
;;
*)
- COMPREPLY=()
;;
esac
}
@@ -1124,9 +1038,14 @@ _git_cherry ()
_git_cherry_pick ()
{
+ local dir="$(__gitdir)"
+ if [ -f "$dir"/CHERRY_PICK_HEAD ]; then
+ __gitcomp "--continue --quit --abort"
+ return
+ fi
case "$cur" in
--*)
- __gitcomp "--edit --no-commit"
+ __gitcomp "--edit --no-commit --signoff --strategy= --mainline"
;;
*)
__gitcomp_nl "$(__git_refs)"
@@ -1144,7 +1063,7 @@ _git_clean ()
esac
# XXX should we check for -x option ?
- __git_complete_index_file "--others"
+ __git_complete_index_file "--others --directory"
}
_git_clone ()
@@ -1170,7 +1089,6 @@ _git_clone ()
return
;;
esac
- COMPREPLY=()
}
_git_commit ()
@@ -1211,7 +1129,7 @@ _git_commit ()
esac
if git rev-parse --verify --quiet HEAD >/dev/null; then
- __git_complete_diff_index_file "HEAD"
+ __git_complete_index_file "--committable"
else
# This is the first commit
__git_complete_index_file "--cached"
@@ -1244,7 +1162,7 @@ __git_diff_common_options="--stat --numstat --shortstat --summary
--no-prefix --src-prefix= --dst-prefix=
--inter-hunk-context=
--patience --histogram --minimal
- --raw
+ --raw --word-diff
--dirstat --dirstat= --dirstat-by-file
--dirstat-by-file= --cumulative
--diff-algorithm=
@@ -1270,7 +1188,7 @@ _git_diff ()
__git_complete_revlist_file
}
-__git_mergetools_common="diffuse ecmerge emerge kdiff3 meld opendiff
+__git_mergetools_common="diffuse diffmerge ecmerge emerge kdiff3 meld opendiff
tkdiff vimdiff gvimdiff xxdiff araxis p4merge bc3 codecompare
"
@@ -1292,7 +1210,7 @@ _git_difftool ()
return
;;
esac
- __git_complete_file
+ __git_complete_revlist_file
}
__git_fetch_options="
@@ -1312,11 +1230,12 @@ _git_fetch ()
}
__git_format_patch_options="
- --stdout --attach --no-attach --thread --thread= --output-directory
+ --stdout --attach --no-attach --thread --thread= --no-thread
--numbered --start-number --numbered-files --keep-subject --signoff
--signature --no-signature --in-reply-to= --cc= --full-index --binary
--not --all --cover-letter --no-prefix --src-prefix= --dst-prefix=
--inline --suffix= --ignore-if-in-upstream --subject-prefix=
+ --output-directory --reroll-count --to= --quiet --notes
"
_git_format_patch ()
@@ -1347,7 +1266,6 @@ _git_fsck ()
return
;;
esac
- COMPREPLY=()
}
_git_gc ()
@@ -1358,7 +1276,6 @@ _git_gc ()
return
;;
esac
- COMPREPLY=()
}
_git_gitk ()
@@ -1435,7 +1352,6 @@ _git_init ()
return
;;
esac
- COMPREPLY=()
}
_git_ls_files ()
@@ -1571,7 +1487,6 @@ _git_mergetool ()
return
;;
esac
- COMPREPLY=()
}
_git_merge_base ()
@@ -1812,7 +1727,7 @@ __git_config_get_set_variables ()
_git_config ()
{
case "$prev" in
- branch.*.remote)
+ branch.*.remote|branch.*.pushremote)
__gitcomp_nl "$(__git_remotes)"
return
;;
@@ -1824,11 +1739,15 @@ _git_config ()
__gitcomp "false true"
return
;;
+ remote.pushdefault)
+ __gitcomp_nl "$(__git_remotes)"
+ return
+ ;;
remote.*.fetch)
local remote="${prev#remote.}"
remote="${remote%.fetch}"
if [ -z "$cur" ]; then
- COMPREPLY=("refs/heads/")
+ __gitcomp_nl "refs/heads/" "" "" ""
return
fi
__gitcomp_nl "$(__git_refs_remotes "$remote")"
@@ -1892,7 +1811,6 @@ _git_config ()
return
;;
*.*)
- COMPREPLY=()
return
;;
esac
@@ -1909,7 +1827,7 @@ _git_config ()
;;
branch.*.*)
local pfx="${cur%.*}." cur_="${cur##*.}"
- __gitcomp "remote merge mergeoptions rebase" "$pfx" "$cur_"
+ __gitcomp "remote pushremote merge mergeoptions rebase" "$pfx" "$cur_"
return
;;
branch.*)
@@ -2040,7 +1958,6 @@ _git_config ()
core.fileMode
core.fsyncobjectfiles
core.gitProxy
- core.ignoreCygwinFSTricks
core.ignoreStat
core.ignorecase
core.logAllRefUpdates
@@ -2204,6 +2121,7 @@ _git_config ()
receive.fsckObjects
receive.unpackLimit
receive.updateserverinfo
+ remote.pushdefault
remotes.
repack.usedeltabaseoffset
rerere.autoupdate
@@ -2274,7 +2192,6 @@ _git_remote ()
__gitcomp "$c"
;;
*)
- COMPREPLY=()
;;
esac
}
@@ -2358,7 +2275,7 @@ _git_show ()
return
;;
esac
- __git_complete_file
+ __git_complete_revlist_file
}
_git_show_branch ()
@@ -2390,8 +2307,6 @@ _git_stash ()
*)
if [ -z "$(__git_find_on_cmdline "$save_opts")" ]; then
__gitcomp "$subcommands"
- else
- COMPREPLY=()
fi
;;
esac
@@ -2404,14 +2319,12 @@ _git_stash ()
__gitcomp "--index --quiet"
;;
show,--*|drop,--*|branch,--*)
- COMPREPLY=()
;;
show,*|apply,*|drop,*|pop,*|branch,*)
__gitcomp_nl "$(git --git-dir="$(__gitdir)" stash list \
| sed -n -e 's/:.*//p')"
;;
*)
- COMPREPLY=()
;;
esac
fi
@@ -2421,7 +2334,7 @@ _git_submodule ()
{
__git_has_doubledash && return
- local subcommands="add status init update summary foreach sync"
+ local subcommands="add status init deinit update summary foreach sync"
if [ -z "$(__git_find_on_cmdline "$subcommands")" ]; then
case "$cur" in
--*)
@@ -2453,7 +2366,7 @@ _git_svn ()
--no-metadata --use-svm-props --use-svnsync-props
--log-window-size= --no-checkout --quiet
--repack-flags --use-log-author --localtime
- --ignore-paths= $remote_opts
+ --ignore-paths= --include-paths= $remote_opts
"
local init_opts="
--template= --shared= --trunk= --tags=
@@ -2528,7 +2441,6 @@ _git_svn ()
__gitcomp "--revision= --parent"
;;
*)
- COMPREPLY=()
;;
esac
fi
@@ -2553,13 +2465,10 @@ _git_tag ()
case "$prev" in
-m|-F)
- COMPREPLY=()
;;
-*|tag)
if [ $f = 1 ]; then
__gitcomp_nl "$(__git_tags)"
- else
- COMPREPLY=()
fi
;;
*)
@@ -2581,9 +2490,10 @@ __git_main ()
i="${words[c]}"
case "$i" in
--git-dir=*) __git_dir="${i#--git-dir=}" ;;
+ --git-dir) ((c++)) ; __git_dir="${words[c]}" ;;
--bare) __git_dir="." ;;
--help) command="help"; break ;;
- -c) c=$((++c)) ;;
+ -c|--work-tree|--namespace) ((c++)) ;;
-*) ;;
*) command="$i"; break ;;
esac
@@ -2601,6 +2511,7 @@ __git_main ()
--exec-path
--exec-path=
--html-path
+ --man-path
--info-path
--work-tree=
--namespace=
@@ -2669,7 +2580,7 @@ if [[ -n ${ZSH_VERSION-} ]]; then
--*=*|*.) ;;
*) c="$c " ;;
esac
- array[$#array+1]="$c"
+ array[${#array[@]}+1]="$c"
done
compset -P '*[=:]'
compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
@@ -2695,35 +2606,19 @@ if [[ -n ${ZSH_VERSION-} ]]; then
compadd -Q -p "${2-}" -f -- ${=1} && _ret=0
}
- __git_zsh_helper ()
- {
- emulate -L ksh
- local cur cword prev
- cur=${words[CURRENT-1]}
- prev=${words[CURRENT-2]}
- let cword=CURRENT-1
- __${service}_main
- }
-
_git ()
{
- emulate -L zsh
- local _ret=1
- __git_zsh_helper
- let _ret && _default -S '' && _ret=0
+ local _ret=1 cur cword prev
+ cur=${words[CURRENT]}
+ prev=${words[CURRENT-1]}
+ let cword=CURRENT-1
+ emulate ksh -c __${service}_main
+ let _ret && _default && _ret=0
return _ret
}
compdef _git git gitk
return
-elif [[ -n ${BASH_VERSION-} ]]; then
- if ((${BASH_VERSINFO[0]} < 4)); then
- # compopt is not supported
- __git_index_file_list_filter ()
- {
- __git_index_file_list_filter_compat
- }
- fi
fi
__git_func_wrap ()
diff --git a/contrib/completion/git-completion.zsh b/contrib/completion/git-completion.zsh
index 2565d2eef4..fac5e711eb 100644
--- a/contrib/completion/git-completion.zsh
+++ b/contrib/completion/git-completion.zsh
@@ -4,18 +4,17 @@
#
# Copyright (c) 2012-2013 Felipe Contreras <felipe.contreras@gmail.com>
#
-# You need git's bash completion script installed somewhere, by default on the
-# same directory as this script.
+# You need git's bash completion script installed somewhere, by default it
+# would be the location bash-completion uses.
#
-# If your script is on ~/.git-completion.sh instead, you can configure it on
-# your ~/.zshrc:
+# If your script is somewhere else, you can configure it on your ~/.zshrc:
#
# zstyle ':completion:*:*:git:*' script ~/.git-completion.sh
#
-# The recommended way to install this script is to copy to
-# '~/.zsh/completion/_git', and then add the following to your ~/.zshrc file:
+# The recommended way to install this script is to copy to '~/.zsh/_git', and
+# then add the following to your ~/.zshrc file:
#
-# fpath=(~/.zsh/completion $fpath)
+# fpath=(~/.zsh $fpath)
complete ()
{
@@ -27,7 +26,19 @@ zstyle -T ':completion:*:*:git:*' tag-order && \
zstyle ':completion:*:*:git:*' tag-order 'common-commands'
zstyle -s ":completion:*:*:git:*" script script
-test -z "$script" && script="$(dirname ${funcsourcetrace[1]%:*})"/git-completion.bash
+if [ -z "$script" ]; then
+ local -a locations
+ local e
+ locations=(
+ '/etc/bash_completion.d/git' # fedora, old debian
+ '/usr/share/bash-completion/completions/git' # arch, ubuntu, new debian
+ '/usr/share/bash-completion/git' # gentoo
+ $(dirname ${funcsourcetrace[1]%:*})/git-completion.bash
+ )
+ for e in $locations; do
+ test -f $e && script="$e" && break
+ done
+fi
ZSH_VERSION='' . "$script"
__gitcomp ()
diff --git a/contrib/completion/git-prompt.sh b/contrib/completion/git-prompt.sh
index 054c52e90a..7b732d2aeb 100644
--- a/contrib/completion/git-prompt.sh
+++ b/contrib/completion/git-prompt.sh
@@ -3,7 +3,7 @@
# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
# Distributed under the GNU General Public License, version 2.0.
#
-# This script allows you to see the current branch in your prompt.
+# This script allows you to see repository status in your prompt.
#
# To enable:
#
@@ -13,23 +13,27 @@
# 3a) Change your PS1 to call __git_ps1 as
# command-substitution:
# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
-# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
+# ZSH: setopt PROMPT_SUBST ; PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
# the optional argument will be used as format string.
-# 3b) Alternatively, if you are using bash, __git_ps1 can be
-# used for PROMPT_COMMAND with two parameters, <pre> and
-# <post>, which are strings you would put in $PS1 before
-# and after the status string generated by the git-prompt
-# machinery. e.g.
-# PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "'
-# will show username, at-sign, host, colon, cwd, then
-# various status string, followed by dollar and SP, as
-# your prompt.
+# 3b) Alternatively, for a slightly faster prompt, __git_ps1 can
+# be used for PROMPT_COMMAND in Bash or for precmd() in Zsh
+# with two parameters, <pre> and <post>, which are strings
+# you would put in $PS1 before and after the status string
+# generated by the git-prompt machinery. e.g.
+# Bash: PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "'
+# will show username, at-sign, host, colon, cwd, then
+# various status string, followed by dollar and SP, as
+# your prompt.
+# ZSH: precmd () { __git_ps1 "%n" ":%~$ " "|%s" }
+# will show username, pipe, then various status string,
+# followed by colon, cwd, dollar and SP, as your prompt.
# Optionally, you can supply a third argument with a printf
# format string to finetune the output of the branch status
#
-# The argument to __git_ps1 will be displayed only if you are currently
-# in a git repository. The %s token will be the name of the current
-# branch.
+# The repository status will be displayed only if you are currently in a
+# git repository. The %s token is the placeholder for the shown status.
+#
+# The prompt status always includes the current branch name.
#
# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty value,
# unstaged (*) and staged (+) changes will be shown next to the branch
@@ -56,6 +60,7 @@
# of values:
#
# verbose show number of commits ahead/behind (+/-) upstream
+# name if verbose, then also show the upstream abbrev name
# legacy don't use the '--count' option available in recent
# versions of git-rev-list
# git always compare HEAD to @{upstream}
@@ -77,31 +82,12 @@
#
# If you would like a colored hint about the current dirty state, set
# GIT_PS1_SHOWCOLORHINTS to a nonempty value. The colors are based on
-# the colored output of "git status -sb".
+# the colored output of "git status -sb" and are available only when
+# using __git_ps1 for PROMPT_COMMAND or precmd.
-# __gitdir accepts 0 or 1 arguments (i.e., location)
-# returns location of .git repo
-__gitdir ()
-{
- # Note: this function is duplicated in git-completion.bash
- # When updating it, make sure you update the other one to match.
- if [ -z "${1-}" ]; then
- if [ -n "${__git_dir-}" ]; then
- echo "$__git_dir"
- elif [ -n "${GIT_DIR-}" ]; then
- test -d "${GIT_DIR-}" || return 1
- echo "$GIT_DIR"
- elif [ -d .git ]; then
- echo .git
- else
- git rev-parse --git-dir 2>/dev/null
- fi
- elif [ -d "$1/.git" ]; then
- echo "$1/.git"
- else
- echo "$1"
- fi
-}
+# check whether printf supports -v
+__git_printf_supports_v=
+printf -v __git_printf_supports_v -- '%s' yes >/dev/null 2>&1
# stores the divergence from upstream in $p
# used by GIT_PS1_SHOWUPSTREAM
@@ -109,7 +95,7 @@ __git_ps1_show_upstream ()
{
local key value
local svn_remote svn_url_pattern count n
- local upstream=git legacy="" verbose=""
+ local upstream=git legacy="" verbose="" name=""
svn_remote=()
# get some config options from git-config
@@ -124,8 +110,8 @@ __git_ps1_show_upstream ()
fi
;;
svn-remote.*.url)
- svn_remote[ $((${#svn_remote[@]} + 1)) ]="$value"
- svn_url_pattern+="\\|$value"
+ svn_remote[$((${#svn_remote[@]} + 1))]="$value"
+ svn_url_pattern="$svn_url_pattern\\|$value"
upstream=svn+git # default upstream is SVN if available, else git
;;
esac
@@ -137,6 +123,7 @@ __git_ps1_show_upstream ()
git|svn) upstream="$option" ;;
verbose) verbose=1 ;;
legacy) legacy=1 ;;
+ name) name=1 ;;
esac
done
@@ -146,10 +133,11 @@ __git_ps1_show_upstream ()
svn*)
# get the upstream from the "git-svn-id: ..." in a commit message
# (git-svn uses essentially the same procedure internally)
- local svn_upstream=($(git log --first-parent -1 \
+ local -a svn_upstream
+ svn_upstream=($(git log --first-parent -1 \
--grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null))
if [[ 0 -ne ${#svn_upstream[@]} ]]; then
- svn_upstream=${svn_upstream[ ${#svn_upstream[@]} - 2 ]}
+ svn_upstream=${svn_upstream[${#svn_upstream[@]} - 2]}
svn_upstream=${svn_upstream%@*}
local n_stop="${#svn_remote[@]}"
for ((n=1; n <= n_stop; n++)); do
@@ -218,10 +206,58 @@ __git_ps1_show_upstream ()
*) # diverged from upstream
p=" u+${count#* }-${count% *}" ;;
esac
+ if [[ -n "$count" && -n "$name" ]]; then
+ p="$p $(git rev-parse --abbrev-ref "$upstream" 2>/dev/null)"
+ fi
fi
}
+# Helper function that is meant to be called from __git_ps1. It
+# injects color codes into the appropriate gitstring variables used
+# to build a gitstring.
+__git_ps1_colorize_gitstring ()
+{
+ if [[ -n ${ZSH_VERSION-} ]]; then
+ local c_red='%F{red}'
+ local c_green='%F{green}'
+ local c_lblue='%F{blue}'
+ local c_clear='%f'
+ else
+ # Using \[ and \] around colors is necessary to prevent
+ # issues with command line editing/browsing/completion!
+ local c_red='\[\e[31m\]'
+ local c_green='\[\e[32m\]'
+ local c_lblue='\[\e[1;34m\]'
+ local c_clear='\[\e[0m\]'
+ fi
+ local bad_color=$c_red
+ local ok_color=$c_green
+ local flags_color="$c_lblue"
+
+ local branch_color=""
+ if [ $detached = no ]; then
+ branch_color="$ok_color"
+ else
+ branch_color="$bad_color"
+ fi
+ c="$branch_color$c"
+
+ z="$c_clear$z"
+ if [ "$w" = "*" ]; then
+ w="$bad_color$w"
+ fi
+ if [ -n "$i" ]; then
+ i="$ok_color$i"
+ fi
+ if [ -n "$s" ]; then
+ s="$flags_color$s"
+ fi
+ if [ -n "$u" ]; then
+ u="$bad_color$u"
+ fi
+ r="$c_clear$r"
+}
# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
# when called from PS1 using command substitution
@@ -254,41 +290,83 @@ __git_ps1 ()
;;
esac
- local g="$(__gitdir)"
- if [ -z "$g" ]; then
+ local repo_info rev_parse_exit_code
+ repo_info="$(git rev-parse --git-dir --is-inside-git-dir \
+ --is-bare-repository --is-inside-work-tree \
+ --short HEAD 2>/dev/null)"
+ rev_parse_exit_code="$?"
+
+ if [ -z "$repo_info" ]; then
if [ $pcmode = yes ]; then
#In PC mode PS1 always needs to be set
PS1="$ps1pc_start$ps1pc_end"
fi
- else
- local r=""
- local b=""
+ return
+ fi
+
+ local short_sha
+ if [ "$rev_parse_exit_code" = "0" ]; then
+ short_sha="${repo_info##*$'\n'}"
+ repo_info="${repo_info%$'\n'*}"
+ fi
+ local inside_worktree="${repo_info##*$'\n'}"
+ repo_info="${repo_info%$'\n'*}"
+ local bare_repo="${repo_info##*$'\n'}"
+ repo_info="${repo_info%$'\n'*}"
+ local inside_gitdir="${repo_info##*$'\n'}"
+ local g="${repo_info%$'\n'*}"
+
+ local r=""
+ local b=""
+ local step=""
+ local total=""
+ if [ -d "$g/rebase-merge" ]; then
+ read b 2>/dev/null <"$g/rebase-merge/head-name"
+ read step 2>/dev/null <"$g/rebase-merge/msgnum"
+ read total 2>/dev/null <"$g/rebase-merge/end"
if [ -f "$g/rebase-merge/interactive" ]; then
r="|REBASE-i"
- b="$(cat "$g/rebase-merge/head-name")"
- elif [ -d "$g/rebase-merge" ]; then
+ else
r="|REBASE-m"
- b="$(cat "$g/rebase-merge/head-name")"
+ fi
+ else
+ if [ -d "$g/rebase-apply" ]; then
+ read step 2>/dev/null <"$g/rebase-apply/next"
+ read total 2>/dev/null <"$g/rebase-apply/last"
+ if [ -f "$g/rebase-apply/rebasing" ]; then
+ read b 2>/dev/null <"$g/rebase-apply/head-name"
+ r="|REBASE"
+ elif [ -f "$g/rebase-apply/applying" ]; then
+ r="|AM"
+ else
+ r="|AM/REBASE"
+ fi
+ elif [ -f "$g/MERGE_HEAD" ]; then
+ r="|MERGING"
+ elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
+ r="|CHERRY-PICKING"
+ elif [ -f "$g/REVERT_HEAD" ]; then
+ r="|REVERTING"
+ elif [ -f "$g/BISECT_LOG" ]; then
+ r="|BISECTING"
+ fi
+
+ if [ -n "$b" ]; then
+ :
+ elif [ -h "$g/HEAD" ]; then
+ # symlink symbolic ref
+ b="$(git symbolic-ref HEAD 2>/dev/null)"
else
- if [ -d "$g/rebase-apply" ]; then
- if [ -f "$g/rebase-apply/rebasing" ]; then
- r="|REBASE"
- elif [ -f "$g/rebase-apply/applying" ]; then
- r="|AM"
- else
- r="|AM/REBASE"
+ local head=""
+ if ! read head 2>/dev/null <"$g/HEAD"; then
+ if [ $pcmode = yes ]; then
+ PS1="$ps1pc_start$ps1pc_end"
fi
- elif [ -f "$g/MERGE_HEAD" ]; then
- r="|MERGING"
- elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
- r="|CHERRY-PICKING"
- elif [ -f "$g/REVERT_HEAD" ]; then
- r="|REVERTING"
- elif [ -f "$g/BISECT_LOG" ]; then
- r="|BISECTING"
+ return
fi
-
- b="$(git symbolic-ref HEAD 2>/dev/null)" || {
+ # is it a symbolic ref?
+ b="${head#ref: }"
+ if [ "$head" = "$b" ]; then
detached=yes
b="$(
case "${GIT_PS1_DESCRIBE_STYLE-}" in
@@ -302,100 +380,75 @@ __git_ps1 ()
git describe --tags --exact-match HEAD ;;
esac 2>/dev/null)" ||
- b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." ||
- b="unknown"
+ b="$short_sha..."
b="($b)"
- }
+ fi
fi
+ fi
+
+ if [ -n "$step" ] && [ -n "$total" ]; then
+ r="$r $step/$total"
+ fi
- local w=""
- local i=""
- local s=""
- local u=""
- local c=""
- local p=""
+ local w=""
+ local i=""
+ local s=""
+ local u=""
+ local c=""
+ local p=""
- if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then
- if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then
- c="BARE:"
+ if [ "true" = "$inside_gitdir" ]; then
+ if [ "true" = "$bare_repo" ]; then
+ c="BARE:"
+ else
+ b="GIT_DIR!"
+ fi
+ elif [ "true" = "$inside_worktree" ]; then
+ if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ] &&
+ [ "$(git config --bool bash.showDirtyState)" != "false" ]
+ then
+ git diff --no-ext-diff --quiet --exit-code || w="*"
+ if [ -n "$short_sha" ]; then
+ git diff-index --cached --quiet HEAD -- || i="+"
else
- b="GIT_DIR!"
- fi
- elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then
- if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ] &&
- [ "$(git config --bool bash.showDirtyState)" != "false" ]
- then
- git diff --no-ext-diff --quiet --exit-code || w="*"
- if git rev-parse --quiet --verify HEAD >/dev/null; then
- git diff-index --cached --quiet HEAD -- || i="+"
- else
- i="#"
- fi
- fi
- if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then
- git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$"
+ i="#"
fi
+ fi
+ if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ] &&
+ [ -r "$g/refs/stash" ]; then
+ s="$"
+ fi
- if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ] &&
- [ "$(git config --bool bash.showUntrackedFiles)" != "false" ] &&
- [ -n "$(git ls-files --others --exclude-standard)" ]
- then
- u="%${ZSH_VERSION+%}"
- fi
+ if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ] &&
+ [ "$(git config --bool bash.showUntrackedFiles)" != "false" ] &&
+ git ls-files --others --exclude-standard --error-unmatch -- '*' >/dev/null 2>/dev/null
+ then
+ u="%${ZSH_VERSION+%}"
+ fi
- if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
- __git_ps1_show_upstream
- fi
+ if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
+ __git_ps1_show_upstream
fi
+ fi
- local f="$w$i$s$u"
- if [ $pcmode = yes ]; then
- local gitstring=
- if [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then
- local c_red='\e[31m'
- local c_green='\e[32m'
- local c_lblue='\e[1;34m'
- local c_clear='\e[0m'
- local bad_color=$c_red
- local ok_color=$c_green
- local branch_color="$c_clear"
- local flags_color="$c_lblue"
- local branchstring="$c${b##refs/heads/}"
+ local z="${GIT_PS1_STATESEPARATOR-" "}"
- if [ $detached = no ]; then
- branch_color="$ok_color"
- else
- branch_color="$bad_color"
- fi
+ # NO color option unless in PROMPT_COMMAND mode
+ if [ $pcmode = yes ] && [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then
+ __git_ps1_colorize_gitstring
+ fi
- # Setting gitstring directly with \[ and \] around colors
- # is necessary to prevent wrapping issues!
- gitstring="\[$branch_color\]$branchstring\[$c_clear\]"
+ local f="$w$i$s$u"
+ local gitstring="$c${b##refs/heads/}${f:+$z$f}$r$p"
- if [ -n "$w$i$s$u$r$p" ]; then
- gitstring="$gitstring "
- fi
- if [ "$w" = "*" ]; then
- gitstring="$gitstring\[$bad_color\]$w"
- fi
- if [ -n "$i" ]; then
- gitstring="$gitstring\[$ok_color\]$i"
- fi
- if [ -n "$s" ]; then
- gitstring="$gitstring\[$flags_color\]$s"
- fi
- if [ -n "$u" ]; then
- gitstring="$gitstring\[$bad_color\]$u"
- fi
- gitstring="$gitstring\[$c_clear\]$r$p"
- else
- gitstring="$c${b##refs/heads/}${f:+ $f}$r$p"
- fi
+ if [ $pcmode = yes ]; then
+ if [ "${__git_printf_supports_v-}" != yes ]; then
gitstring=$(printf -- "$printf_format" "$gitstring")
- PS1="$ps1pc_start$gitstring$ps1pc_end"
else
- # NO color option unless in PROMPT_COMMAND mode
- printf -- "$printf_format" "$c${b##refs/heads/}${f:+ $f}$r$p"
+ printf -v gitstring -- "$printf_format" "$gitstring"
fi
+ PS1="$ps1pc_start$gitstring$ps1pc_end"
+ else
+ printf -- "$printf_format" "$gitstring"
fi
}
diff --git a/contrib/contacts/git-contacts b/contrib/contacts/git-contacts
new file mode 100755
index 0000000000..428cc1a9a1
--- /dev/null
+++ b/contrib/contacts/git-contacts
@@ -0,0 +1,205 @@
+#!/usr/bin/perl
+
+# List people who might be interested in a patch. Useful as the argument to
+# git-send-email --cc-cmd option, and in other situations.
+#
+# Usage: git contacts <file | rev-list option> ...
+
+use strict;
+use warnings;
+use IPC::Open2;
+
+my $since = '5-years-ago';
+my $min_percent = 10;
+my $labels_rx = qr/Signed-off-by|Reviewed-by|Acked-by|Cc/i;
+my %seen;
+
+sub format_contact {
+ my ($name, $email) = @_;
+ return "$name <$email>";
+}
+
+sub parse_commit {
+ my ($commit, $data) = @_;
+ my $contacts = $commit->{contacts};
+ my $inbody = 0;
+ for (split(/^/m, $data)) {
+ if (not $inbody) {
+ if (/^author ([^<>]+) <(\S+)> .+$/) {
+ $contacts->{format_contact($1, $2)} = 1;
+ } elsif (/^$/) {
+ $inbody = 1;
+ }
+ } elsif (/^$labels_rx:\s+([^<>]+)\s+<(\S+?)>$/o) {
+ $contacts->{format_contact($1, $2)} = 1;
+ }
+ }
+}
+
+sub import_commits {
+ my ($commits) = @_;
+ return unless %$commits;
+ my $pid = open2 my $reader, my $writer, qw(git cat-file --batch);
+ for my $id (keys(%$commits)) {
+ print $writer "$id\n";
+ my $line = <$reader>;
+ if ($line =~ /^([0-9a-f]{40}) commit (\d+)/) {
+ my ($cid, $len) = ($1, $2);
+ die "expected $id but got $cid\n" unless $id eq $cid;
+ my $data;
+ # cat-file emits newline after data, so read len+1
+ read $reader, $data, $len + 1;
+ parse_commit($commits->{$id}, $data);
+ }
+ }
+ close $reader;
+ close $writer;
+ waitpid($pid, 0);
+ die "git-cat-file error: $?\n" if $?;
+}
+
+sub get_blame {
+ my ($commits, $source, $from, $ranges) = @_;
+ return unless @$ranges;
+ open my $f, '-|',
+ qw(git blame --porcelain -C),
+ map({"-L$_->[0],+$_->[1]"} @$ranges),
+ '--since', $since, "$from^", '--', $source or die;
+ while (<$f>) {
+ if (/^([0-9a-f]{40}) \d+ \d+ \d+$/) {
+ my $id = $1;
+ $commits->{$id} = { id => $id, contacts => {} }
+ unless $seen{$id};
+ $seen{$id} = 1;
+ }
+ }
+ close $f;
+}
+
+sub blame_sources {
+ my ($sources, $commits) = @_;
+ for my $s (keys %$sources) {
+ for my $id (keys %{$sources->{$s}}) {
+ get_blame($commits, $s, $id, $sources->{$s}{$id});
+ }
+ }
+}
+
+sub scan_patches {
+ my ($sources, $id, $f) = @_;
+ my $source;
+ while (<$f>) {
+ if (/^From ([0-9a-f]{40}) Mon Sep 17 00:00:00 2001$/) {
+ $id = $1;
+ $seen{$id} = 1;
+ }
+ next unless $id;
+ if (m{^--- (?:a/(.+)|/dev/null)$}) {
+ $source = $1;
+ } elsif (/^--- /) {
+ die "Cannot parse hunk source: $_\n";
+ } elsif (/^@@ -(\d+)(?:,(\d+))?/ && $source) {
+ my $len = defined($2) ? $2 : 1;
+ push @{$sources->{$source}{$id}}, [$1, $len] if $len;
+ }
+ }
+}
+
+sub scan_patch_file {
+ my ($commits, $file) = @_;
+ open my $f, '<', $file or die "read failure: $file: $!\n";
+ scan_patches($commits, undef, $f);
+ close $f;
+}
+
+sub parse_rev_args {
+ my @args = @_;
+ open my $f, '-|',
+ qw(git rev-parse --revs-only --default HEAD --symbolic), @args
+ or die;
+ my @revs;
+ while (<$f>) {
+ chomp;
+ push @revs, $_;
+ }
+ close $f;
+ return @revs if scalar(@revs) != 1;
+ return "^$revs[0]", 'HEAD' unless $revs[0] =~ /^-/;
+ return $revs[0], 'HEAD';
+}
+
+sub scan_rev_args {
+ my ($commits, $args) = @_;
+ my @revs = parse_rev_args(@$args);
+ open my $f, '-|', qw(git rev-list --reverse), @revs or die;
+ while (<$f>) {
+ chomp;
+ my $id = $_;
+ $seen{$id} = 1;
+ open my $g, '-|', qw(git show -C --oneline), $id or die;
+ scan_patches($commits, $id, $g);
+ close $g;
+ }
+ close $f;
+}
+
+sub mailmap_contacts {
+ my ($contacts) = @_;
+ my %mapped;
+ my $pid = open2 my $reader, my $writer, qw(git check-mailmap --stdin);
+ for my $contact (keys(%$contacts)) {
+ print $writer "$contact\n";
+ my $canonical = <$reader>;
+ chomp $canonical;
+ $mapped{$canonical} += $contacts->{$contact};
+ }
+ close $reader;
+ close $writer;
+ waitpid($pid, 0);
+ die "git-check-mailmap error: $?\n" if $?;
+ return \%mapped;
+}
+
+if (!@ARGV) {
+ die "No input revisions or patch files\n";
+}
+
+my (@files, @rev_args);
+for (@ARGV) {
+ if (-e) {
+ push @files, $_;
+ } else {
+ push @rev_args, $_;
+ }
+}
+
+my %sources;
+for (@files) {
+ scan_patch_file(\%sources, $_);
+}
+if (@rev_args) {
+ scan_rev_args(\%sources, \@rev_args)
+}
+
+my $toplevel = `git rev-parse --show-toplevel`;
+chomp $toplevel;
+chdir($toplevel) or die "chdir failure: $toplevel: $!\n";
+
+my %commits;
+blame_sources(\%sources, \%commits);
+import_commits(\%commits);
+
+my $contacts = {};
+for my $commit (values %commits) {
+ for my $contact (keys %{$commit->{contacts}}) {
+ $contacts->{$contact}++;
+ }
+}
+$contacts = mailmap_contacts($contacts);
+
+my $ncommits = scalar(keys %commits);
+for my $contact (keys %$contacts) {
+ my $percent = $contacts->{$contact} * 100 / $ncommits;
+ next if $percent < $min_percent;
+ print "$contact\n";
+}
diff --git a/contrib/contacts/git-contacts.txt b/contrib/contacts/git-contacts.txt
new file mode 100644
index 0000000000..dd914d1261
--- /dev/null
+++ b/contrib/contacts/git-contacts.txt
@@ -0,0 +1,94 @@
+git-contacts(1)
+===============
+
+NAME
+----
+git-contacts - List people who might be interested in a set of changes
+
+
+SYNOPSIS
+--------
+[verse]
+'git contacts' (<patch>|<range>|<rev>)...
+
+
+DESCRIPTION
+-----------
+
+Given a set of changes, specified as patch files or revisions, determine people
+who might be interested in those changes. This is done by consulting the
+history of each patch or revision hunk to find people mentioned by commits
+which touched the lines of files under consideration.
+
+Input consists of one or more patch files or revision arguments. A revision
+argument can be a range or a single `<rev>` which is interpreted as
+`<rev>..HEAD`, thus the same revision arguments are accepted as for
+linkgit:git-format-patch[1]. Patch files and revision arguments can be combined
+in the same invocation.
+
+This command can be useful for determining the list of people with whom to
+discuss proposed changes, or for finding the list of recipients to Cc: when
+submitting a patch series via `git send-email`. For the latter case, `git
+contacts` can be used as the argument to `git send-email`'s `--cc-cmd` option.
+
+
+DISCUSSION
+----------
+
+`git blame` is invoked for each hunk in a patch file or revision. For each
+commit mentioned by `git blame`, the commit message is consulted for people who
+authored, reviewed, signed, acknowledged, or were Cc:'d. Once the list of
+participants is known, each person's relevance is computed by considering how
+many commits mentioned that person compared with the total number of commits
+under consideration. The final output consists only of participants who exceed
+a minimum threshold of participation.
+
+
+OUTPUT
+------
+
+For each person of interest, a single line is output, terminated by a newline.
+If the person's name is known, ``Name $$<user@host>$$'' is printed; otherwise
+only ``$$<user@host>$$'' is printed.
+
+
+EXAMPLES
+--------
+
+* Consult patch files:
++
+------------
+$ git contacts feature/*.patch
+------------
+
+* Revision range:
++
+------------
+$ git contacts R1..R2
+------------
+
+* From a single revision to `HEAD`:
++
+------------
+$ git contacts origin
+------------
+
+* Helper for `git send-email`:
++
+------------
+$ git send-email --cc-cmd='git contacts' feature/*.patch
+------------
+
+
+LIMITATIONS
+-----------
+
+Several conditions controlling a person's significance are currently
+hard-coded, such as minimum participation level (10%), blame date-limiting (5
+years), and `-C` level for detecting moved and copied lines (a single `-C`). In
+the future, these conditions may become configurable.
+
+
+GIT
+---
+Part of the linkgit:git[1] suite
diff --git a/contrib/continuous/cidaemon b/contrib/continuous/cidaemon
deleted file mode 100644
index 4009a151de..0000000000
--- a/contrib/continuous/cidaemon
+++ /dev/null
@@ -1,503 +0,0 @@
-#!/usr/bin/perl
-#
-# A daemon that waits for update events sent by its companion
-# post-receive-cinotify hook, checks out a new copy of source,
-# compiles it, and emails the guilty parties if the compile
-# (and optionally test suite) fails.
-#
-# To use this daemon, configure it and run it. It will disconnect
-# from your terminal and fork into the background. The daemon must
-# have local filesystem access to the source repositories, as it
-# uses objects/info/alternates to avoid copying objects.
-#
-# Add its companion post-receive-cinotify hook as the post-receive
-# hook to each repository that the daemon should monitor. Yes, a
-# single daemon can monitor more than one repository.
-#
-# To use multiple daemons on the same system, give them each a
-# unique queue file and tmpdir.
-#
-# Global Config
-# -------------
-# Reads from a Git style configuration file. This will be
-# ~/.gitconfig by default but can be overridden by setting
-# the GIT_CONFIG_FILE environment variable before starting.
-#
-# cidaemon.smtpHost
-# Hostname of the SMTP server the daemon will send email
-# through. Defaults to 'localhost'.
-#
-# cidaemon.smtpUser
-# Username to authenticate to the SMTP server as. This
-# variable is optional; if it is not supplied then no
-# authentication will be performed.
-#
-# cidaemon.smtpPassword
-# Password to authenticate to the SMTP server as. This
-# variable is optional. If not supplied but smtpUser was,
-# the daemon prompts for the password before forking into
-# the background.
-#
-# cidaemon.smtpAuth
-# Type of authentication to perform with the SMTP server.
-# If set to 'login' and smtpUser was defined, this will
-# use the AUTH LOGIN command, which is suitable for use
-# with at least one version of Microsoft Exchange Server.
-# If not set the daemon will use whatever auth methods
-# are supported by your version of Net::SMTP.
-#
-# cidaemon.email
-# Email address that daemon generated emails will be sent
-# from. This should be a useful email address within your
-# organization. Required.
-#
-# cidaemon.name
-# Human friendly name that the daemon will send emails as.
-# Defaults to 'cidaemon'.
-#
-# cidaemon.scanDelay
-# Number of seconds to sleep between polls of the queue file.
-# Defaults to 60.
-#
-# cidaemon.recentCache
-# Number of recent commit SHA-1s per repository to cache and
-# skip building if they appear again. This is useful to avoid
-# rebuilding the same commit multiple times just because it was
-# pushed into more than one branch. Defaults to 100.
-#
-# cidaemon.tmpdir
-# Scratch directory to create the builds within. The daemon
-# makes a new subdirectory for each build, then deletes it when
-# the build has finished. The pid file is also placed here.
-# Defaults to '/tmp'.
-#
-# cidaemon.queue
-# Path to the queue file that the post-receive-cinotify hook
-# appends events to. This file is polled by the daemon. It
-# must not be on an NFS mount (uses flock). Required.
-#
-# cidaemon.nocc
-# Perl regex patterns to match against author and committer
-# lines. If a pattern matches, that author or committer will
-# not be notified of a build failure.
-#
-# Per Repository Config
-# ----------------------
-# Read from the source repository's config file.
-#
-# builder.command
-# Shell command to execute the build. This command must
-# return 0 on "success" and non-zero on failure. If you
-# also want to run a test suite, make sure your command
-# does that too. Required.
-#
-# builder.queue
-# Queue file to notify the cidaemon through. Should match
-# cidaemon.queue. If not set the hook will not notify the
-# cidaemon.
-#
-# builder.skip
-# Perl regex patterns of refs that should not be sent to
-# cidaemon. Updates of these refs will be ignored.
-#
-# builder.newBranchBase
-# Glob patterns of refs that should be used to form the
-# 'old' revions of a newly created ref. This should set
-# to be globs that match your 'mainline' branches. This
-# way a build failure of a brand new topic branch does not
-# attempt to email everyone since the beginning of time;
-# instead it only emails those authors of commits not in
-# these 'mainline' branches.
-
-local $ENV{PATH} = join ':', qw(
- /opt/git/bin
- /usr/bin
- /bin
- );
-
-use strict;
-use warnings;
-use FindBin qw($RealBin);
-use File::Spec;
-use lib File::Spec->catfile($RealBin, '..', 'perl5');
-use Storable qw(retrieve nstore);
-use Fcntl ':flock';
-use POSIX qw(strftime);
-use Getopt::Long qw(:config no_auto_abbrev auto_help);
-
-sub git_config ($;$)
-{
- my $var = shift;
- my $required = shift || 0;
- local *GIT;
- open GIT, '-|','git','config','--get',$var;
- my $r = <GIT>;
- chop $r if $r;
- close GIT;
- die "error: $var not set.\n" if ($required && !$r);
- return $r;
-}
-
-package EXCHANGE_NET_SMTP;
-
-# Microsoft Exchange Server requires an 'AUTH LOGIN'
-# style of authentication. This is different from
-# the default supported by Net::SMTP so we subclass
-# and override the auth method to support that.
-
-use Net::SMTP;
-use Net::Cmd;
-use MIME::Base64 qw(encode_base64);
-our @ISA = qw(Net::SMTP);
-our $auth_type = ::git_config 'cidaemon.smtpAuth';
-
-sub new
-{
- my $self = shift;
- my $type = ref($self) || $self;
- $type->SUPER::new(@_);
-}
-
-sub auth
-{
- my $self = shift;
- return $self->SUPER::auth(@_) unless $auth_type eq 'login';
-
- my $user = encode_base64 shift, '';
- my $pass = encode_base64 shift, '';
- return 0 unless CMD_MORE == $self->command("AUTH LOGIN")->response;
- return 0 unless CMD_MORE == $self->command($user)->response;
- CMD_OK == $self->command($pass)->response;
-}
-
-package main;
-
-my ($debug_flag, %recent);
-
-my $ex_host = git_config('cidaemon.smtpHost') || 'localhost';
-my $ex_user = git_config('cidaemon.smtpUser');
-my $ex_pass = git_config('cidaemon.smtpPassword');
-
-my $ex_from_addr = git_config('cidaemon.email', 1);
-my $ex_from_name = git_config('cidaemon.name') || 'cidaemon';
-
-my $scan_delay = git_config('cidaemon.scanDelay') || 60;
-my $recent_size = git_config('cidaemon.recentCache') || 100;
-my $tmpdir = git_config('cidaemon.tmpdir') || '/tmp';
-my $queue_name = git_config('cidaemon.queue', 1);
-my $queue_lock = "$queue_name.lock";
-
-my @nocc_list;
-open GIT,'git config --get-all cidaemon.nocc|';
-while (<GIT>) {
- chop;
- push @nocc_list, $_;
-}
-close GIT;
-
-sub nocc_author ($)
-{
- local $_ = shift;
- foreach my $pat (@nocc_list) {
- return 1 if /$pat/;
- }
- 0;
-}
-
-sub input_echo ($)
-{
- my $prompt = shift;
-
- local $| = 1;
- print $prompt;
- my $input = <STDIN>;
- chop $input;
- return $input;
-}
-
-sub input_noecho ($)
-{
- my $prompt = shift;
-
- my $end = sub {system('stty','echo');print "\n";exit};
- local $SIG{TERM} = $end;
- local $SIG{INT} = $end;
- system('stty','-echo');
-
- local $| = 1;
- print $prompt;
- my $input = <STDIN>;
- system('stty','echo');
- print "\n";
- chop $input;
- return $input;
-}
-
-sub rfc2822_date ()
-{
- strftime("%a, %d %b %Y %H:%M:%S %Z", localtime);
-}
-
-sub send_email ($$$)
-{
- my ($subj, $body, $to) = @_;
- my $now = rfc2822_date;
- my $to_str = '';
- my @rcpt_to;
- foreach (@$to) {
- my $s = $_;
- $s =~ s/^/"/;
- $s =~ s/(\s+<)/"$1/;
- $to_str .= ', ' if $to_str;
- $to_str .= $s;
- push @rcpt_to, $1 if $s =~ /<(.*)>/;
- }
- die "Nobody to send to.\n" unless @rcpt_to;
- my $msg = <<EOF;
-From: "$ex_from_name" <$ex_from_addr>
-To: $to_str
-Date: $now
-Subject: $subj
-
-$body
-EOF
-
- my $smtp = EXCHANGE_NET_SMTP->new(Host => $ex_host)
- or die "Cannot connect to $ex_host: $!\n";
- if ($ex_user && $ex_pass) {
- $smtp->auth($ex_user,$ex_pass)
- or die "$ex_host rejected $ex_user\n";
- }
- $smtp->mail($ex_from_addr)
- or die "$ex_host rejected $ex_from_addr\n";
- scalar($smtp->recipient(@rcpt_to, { SkipBad => 1 }))
- or die "$ex_host did not accept any addresses.\n";
- $smtp->data($msg)
- or die "$ex_host rejected message data\n";
- $smtp->quit;
-}
-
-sub pop_queue ()
-{
- open LOCK, ">$queue_lock" or die "Can't open $queue_lock: $!";
- flock LOCK, LOCK_EX;
-
- my $queue = -f $queue_name ? retrieve $queue_name : [];
- my $ent = shift @$queue;
- nstore $queue, $queue_name;
-
- flock LOCK, LOCK_UN;
- close LOCK;
- $ent;
-}
-
-sub git_exec (@)
-{
- system('git',@_) == 0 or die "Cannot git " . join(' ', @_) . "\n";
-}
-
-sub git_val (@)
-{
- open(C, '-|','git',@_);
- my $r = <C>;
- chop $r if $r;
- close C;
- $r;
-}
-
-sub do_build ($$)
-{
- my ($git_dir, $new) = @_;
-
- my $tmp = File::Spec->catfile($tmpdir, "builder$$");
- system('rm','-rf',$tmp) == 0 or die "Cannot clear $tmp\n";
- die "Cannot clear $tmp.\n" if -e $tmp;
-
- my $result = 1;
- eval {
- my $command;
- {
- local $ENV{GIT_DIR} = $git_dir;
- $command = git_val 'config','builder.command';
- }
- die "No builder.command for $git_dir.\n" unless $command;
-
- git_exec 'clone','-n','-l','-s',$git_dir,$tmp;
- chmod 0700, $tmp or die "Cannot lock $tmp\n";
- chdir $tmp or die "Cannot enter $tmp\n";
-
- git_exec 'update-ref','HEAD',$new;
- git_exec 'read-tree','-m','-u','HEAD','HEAD';
- system $command;
- if ($? == -1) {
- print STDERR "failed to execute '$command': $!\n";
- $result = 1;
- } elsif ($? & 127) {
- my $sig = $? & 127;
- print STDERR "'$command' died from signal $sig\n";
- $result = 1;
- } else {
- my $r = $? >> 8;
- print STDERR "'$command' exited with $r\n" if $r;
- $result = $r;
- }
- };
- if ($@) {
- $result = 2;
- print STDERR "$@\n";
- }
-
- chdir '/';
- system('rm','-rf',$tmp);
- rmdir $tmp;
- $result;
-}
-
-sub build_failed ($$$$$)
-{
- my ($git_dir, $ref, $old, $new, $msg) = @_;
-
- $git_dir =~ m,/([^/]+)$,;
- my $repo_name = $1;
- $ref =~ s,^refs/(heads|tags)/,,;
-
- my %authors;
- my $shortlog;
- my $revstr;
- {
- local $ENV{GIT_DIR} = $git_dir;
- my @revs = ($new);
- push @revs, '--not', @$old if @$old;
- open LOG,'-|','git','rev-list','--pretty=raw',@revs;
- while (<LOG>) {
- if (s/^(author|committer) //) {
- chomp;
- s/>.*$/>/;
- $authors{$_} = 1 unless nocc_author $_;
- }
- }
- close LOG;
- open LOG,'-|','git','shortlog',@revs;
- $shortlog .= $_ while <LOG>;
- close LOG;
- $revstr = join(' ', @revs);
- }
-
- my @to = sort keys %authors;
- unless (@to) {
- print STDERR "error: No authors in $revstr\n";
- return;
- }
-
- my $subject = "[$repo_name] $ref : Build Failed";
- my $body = <<EOF;
-Project: $git_dir
-Branch: $ref
-Commits: $revstr
-
-$shortlog
-Build Output:
---------------------------------------------------------------
-$msg
-EOF
- send_email($subject, $body, \@to);
-}
-
-sub run_build ($$$$)
-{
- my ($git_dir, $ref, $old, $new) = @_;
-
- if ($debug_flag) {
- my @revs = ($new);
- push @revs, '--not', @$old if @$old;
- print "BUILDING $git_dir\n";
- print " BRANCH: $ref\n";
- print " COMMITS: ", join(' ', @revs), "\n";
- }
-
- local(*R, *W);
- pipe R, W or die "cannot pipe builder: $!";
-
- my $builder = fork();
- if (!defined $builder) {
- die "cannot fork builder: $!";
- } elsif (0 == $builder) {
- close R;
- close STDIN;open(STDIN, '/dev/null');
- open(STDOUT, '>&W');
- open(STDERR, '>&W');
- exit do_build $git_dir, $new;
- } else {
- close W;
- my $out = '';
- $out .= $_ while <R>;
- close R;
- waitpid $builder, 0;
- build_failed $git_dir, $ref, $old, $new, $out if $?;
- }
-
- print "DONE\n\n" if $debug_flag;
-}
-
-sub daemon_loop ()
-{
- my $run = 1;
- my $stop_sub = sub {$run = 0};
- $SIG{HUP} = $stop_sub;
- $SIG{INT} = $stop_sub;
- $SIG{TERM} = $stop_sub;
-
- mkdir $tmpdir, 0755;
- my $pidfile = File::Spec->catfile($tmpdir, "cidaemon.pid");
- open(O, ">$pidfile"); print O "$$\n"; close O;
-
- while ($run) {
- my $ent = pop_queue;
- if ($ent) {
- my ($git_dir, $ref, $old, $new) = @$ent;
-
- $ent = $recent{$git_dir};
- $recent{$git_dir} = $ent = [[], {}] unless $ent;
- my ($rec_arr, $rec_hash) = @$ent;
- next if $rec_hash->{$new}++;
- while (@$rec_arr >= $recent_size) {
- my $to_kill = shift @$rec_arr;
- delete $rec_hash->{$to_kill};
- }
- push @$rec_arr, $new;
-
- run_build $git_dir, $ref, $old, $new;
- } else {
- sleep $scan_delay;
- }
- }
-
- unlink $pidfile;
-}
-
-$debug_flag = 0;
-GetOptions(
- 'debug|d' => \$debug_flag,
- 'smtp-user=s' => \$ex_user,
-) or die "usage: $0 [--debug] [--smtp-user=user]\n";
-
-$ex_pass = input_noecho("$ex_user SMTP password: ")
- if ($ex_user && !$ex_pass);
-
-if ($debug_flag) {
- daemon_loop;
- exit 0;
-}
-
-my $daemon = fork();
-if (!defined $daemon) {
- die "cannot fork daemon: $!";
-} elsif (0 == $daemon) {
- close STDIN;open(STDIN, '/dev/null');
- close STDOUT;open(STDOUT, '>/dev/null');
- close STDERR;open(STDERR, '>/dev/null');
- daemon_loop;
- exit 0;
-} else {
- print "Daemon $daemon running in the background.\n";
-}
diff --git a/contrib/continuous/post-receive-cinotify b/contrib/continuous/post-receive-cinotify
deleted file mode 100644
index b8f5a609af..0000000000
--- a/contrib/continuous/post-receive-cinotify
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/perl
-#
-# A hook that notifies its companion cidaemon through a simple
-# queue file that a ref has been updated via a push (actually
-# by a receive-pack running on the server).
-#
-# See cidaemon for per-repository configuration details.
-#
-# To use this hook, add it as the post-receive hook, make it
-# executable, and set its configuration options.
-#
-
-local $ENV{PATH} = '/opt/git/bin';
-
-use strict;
-use warnings;
-use File::Spec;
-use Storable qw(retrieve nstore);
-use Fcntl ':flock';
-
-my $git_dir = File::Spec->rel2abs($ENV{GIT_DIR});
-my $queue_name = `git config --get builder.queue`;chop $queue_name;
-$queue_name =~ m,^([^\s]+)$,; $queue_name = $1; # untaint
-unless ($queue_name) {
- 1 while <STDIN>;
- print STDERR "\nerror: builder.queue not set. Not enqueing.\n\n";
- exit;
-}
-my $queue_lock = "$queue_name.lock";
-
-my @skip;
-open S, "git config --get-all builder.skip|";
-while (<S>) {
- chop;
- push @skip, $_;
-}
-close S;
-
-my @new_branch_base;
-open S, "git config --get-all builder.newBranchBase|";
-while (<S>) {
- chop;
- push @new_branch_base, $_;
-}
-close S;
-
-sub skip ($)
-{
- local $_ = shift;
- foreach my $p (@skip) {
- return 1 if /^$p/;
- }
- 0;
-}
-
-open LOCK, ">$queue_lock" or die "Can't open $queue_lock: $!";
-flock LOCK, LOCK_EX;
-
-my $queue = -f $queue_name ? retrieve $queue_name : [];
-my %existing;
-foreach my $r (@$queue) {
- my ($gd, $ref) = @$r;
- $existing{$gd}{$ref} = $r;
-}
-
-my @new_branch_commits;
-my $loaded_new_branch_commits = 0;
-
-while (<STDIN>) {
- chop;
- my ($old, $new, $ref) = split / /, $_, 3;
-
- next if $old eq $new;
- next if $new =~ /^0{40}$/;
- next if skip $ref;
-
- my $r = $existing{$git_dir}{$ref};
- if ($r) {
- $r->[3] = $new;
- } else {
- if ($old =~ /^0{40}$/) {
- if (!$loaded_new_branch_commits && @new_branch_base) {
- open M,'-|','git','show-ref',@new_branch_base;
- while (<M>) {
- ($_) = split / /, $_;
- push @new_branch_commits, $_;
- }
- close M;
- $loaded_new_branch_commits = 1;
- }
- $old = [@new_branch_commits];
- } else {
- $old = [$old];
- }
-
- $r = [$git_dir, $ref, $old, $new];
- $existing{$git_dir}{$ref} = $r;
- push @$queue, $r;
- }
-}
-nstore $queue, $queue_name;
-
-flock LOCK, LOCK_UN;
-close LOCK;
diff --git a/contrib/credential/gnome-keyring/Makefile b/contrib/credential/gnome-keyring/Makefile
index e6561d8db6..c3c7c98aa1 100644
--- a/contrib/credential/gnome-keyring/Makefile
+++ b/contrib/credential/gnome-keyring/Makefile
@@ -8,8 +8,8 @@ CFLAGS = -g -O2 -Wall
-include ../../../config.mak.autogen
-include ../../../config.mak
-INCS:=$(shell pkg-config --cflags gnome-keyring-1)
-LIBS:=$(shell pkg-config --libs gnome-keyring-1)
+INCS:=$(shell pkg-config --cflags gnome-keyring-1 glib-2.0)
+LIBS:=$(shell pkg-config --libs gnome-keyring-1 glib-2.0)
SRCS:=$(MAIN).c
OBJS:=$(SRCS:.c=.o)
diff --git a/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c b/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c
index f2cdefee60..635c96bc56 100644
--- a/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c
+++ b/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c
@@ -25,133 +25,150 @@
#include <stdio.h>
#include <string.h>
-#include <stdarg.h>
#include <stdlib.h>
-#include <errno.h>
+#include <glib.h>
#include <gnome-keyring.h>
-/*
- * This credential struct and API is simplified from git's credential.{h,c}
- */
-struct credential
-{
- char *protocol;
- char *host;
- unsigned short port;
- char *path;
- char *username;
- char *password;
-};
+#ifdef GNOME_KEYRING_DEFAULT
-#define CREDENTIAL_INIT \
- { NULL,NULL,0,NULL,NULL,NULL }
+ /* Modern gnome-keyring */
-void credential_init(struct credential *c);
-void credential_clear(struct credential *c);
-int credential_read(struct credential *c);
-void credential_write(const struct credential *c);
+#include <gnome-keyring-memory.h>
-typedef int (*credential_op_cb)(struct credential*);
+#else
-struct credential_operation
-{
- char *name;
- credential_op_cb op;
-};
+ /*
+ * Support ancient gnome-keyring, circ. RHEL 5.X.
+ * GNOME_KEYRING_DEFAULT seems to have been introduced with Gnome 2.22,
+ * and the other features roughly around Gnome 2.20, 6 months before.
+ * Ubuntu 8.04 used Gnome 2.22 (I think). Not sure any distro used 2.20.
+ * So the existence/non-existence of GNOME_KEYRING_DEFAULT seems like
+ * a decent thing to use as an indicator.
+ */
-#define CREDENTIAL_OP_END \
- { NULL,NULL }
+#define GNOME_KEYRING_DEFAULT NULL
/*
- * Table with operation callbacks is defined in concrete
- * credential helper implementation and contains entries
- * like { "get", function_to_get_credential } terminated
- * by CREDENTIAL_OP_END.
+ * ancient gnome-keyring returns DENIED when an entry is not found.
+ * Setting NO_MATCH to DENIED will prevent us from reporting DENIED
+ * errors during get and erase operations, but we will still report
+ * DENIED errors during a store.
*/
-struct credential_operation const credential_helper_ops[];
+#define GNOME_KEYRING_RESULT_NO_MATCH GNOME_KEYRING_RESULT_DENIED
-/* ---------------- common helper functions ----------------- */
+#define gnome_keyring_memory_alloc g_malloc
+#define gnome_keyring_memory_free gnome_keyring_free_password
+#define gnome_keyring_memory_strdup g_strdup
-static inline void free_password(char *password)
+static const char* gnome_keyring_result_to_message(GnomeKeyringResult result)
{
- char *c = password;
- if (!password)
- return;
-
- while (*c) *c++ = '\0';
- free(password);
+ switch (result) {
+ case GNOME_KEYRING_RESULT_OK:
+ return "OK";
+ case GNOME_KEYRING_RESULT_DENIED:
+ return "Denied";
+ case GNOME_KEYRING_RESULT_NO_KEYRING_DAEMON:
+ return "No Keyring Daemon";
+ case GNOME_KEYRING_RESULT_ALREADY_UNLOCKED:
+ return "Already UnLocked";
+ case GNOME_KEYRING_RESULT_NO_SUCH_KEYRING:
+ return "No Such Keyring";
+ case GNOME_KEYRING_RESULT_BAD_ARGUMENTS:
+ return "Bad Arguments";
+ case GNOME_KEYRING_RESULT_IO_ERROR:
+ return "IO Error";
+ case GNOME_KEYRING_RESULT_CANCELLED:
+ return "Cancelled";
+ case GNOME_KEYRING_RESULT_ALREADY_EXISTS:
+ return "Already Exists";
+ default:
+ return "Unknown Error";
+ }
}
-static inline void warning(const char *fmt, ...)
+/*
+ * Support really ancient gnome-keyring, circ. RHEL 4.X.
+ * Just a guess for the Glib version. Glib 2.8 was roughly Gnome 2.12 ?
+ * Which was released with gnome-keyring 0.4.3 ??
+ */
+#if GLIB_MAJOR_VERSION == 2 && GLIB_MINOR_VERSION < 8
+
+static void gnome_keyring_done_cb(GnomeKeyringResult result, gpointer user_data)
{
- va_list ap;
+ gpointer *data = (gpointer*) user_data;
+ int *done = (int*) data[0];
+ GnomeKeyringResult *r = (GnomeKeyringResult*) data[1];
- va_start(ap, fmt);
- fprintf(stderr, "warning: ");
- vfprintf(stderr, fmt, ap);
- fprintf(stderr, "\n" );
- va_end(ap);
+ *r = result;
+ *done = 1;
}
-static inline void error(const char *fmt, ...)
+static void wait_for_request_completion(int *done)
{
- va_list ap;
-
- va_start(ap, fmt);
- fprintf(stderr, "error: ");
- vfprintf(stderr, fmt, ap);
- fprintf(stderr, "\n" );
- va_end(ap);
+ GMainContext *mc = g_main_context_default();
+ while (!*done)
+ g_main_context_iteration(mc, TRUE);
}
-static inline void die(const char *fmt, ...)
+static GnomeKeyringResult gnome_keyring_item_delete_sync(const char *keyring, guint32 id)
{
- va_list ap;
+ int done = 0;
+ GnomeKeyringResult result;
+ gpointer data[] = { &done, &result };
+
+ gnome_keyring_item_delete(keyring, id, gnome_keyring_done_cb, data,
+ NULL);
+
+ wait_for_request_completion(&done);
- va_start(ap,fmt);
- error(fmt, ap);
- va_end(ap);
- exit(EXIT_FAILURE);
+ return result;
}
-static inline void die_errno(int err)
+#endif
+#endif
+
+/*
+ * This credential struct and API is simplified from git's credential.{h,c}
+ */
+struct credential
{
- error("%s", strerror(err));
- exit(EXIT_FAILURE);
-}
+ char *protocol;
+ char *host;
+ unsigned short port;
+ char *path;
+ char *username;
+ char *password;
+};
-static inline char *xstrdup(const char *str)
+#define CREDENTIAL_INIT \
+ { NULL,NULL,0,NULL,NULL,NULL }
+
+typedef int (*credential_op_cb)(struct credential*);
+
+struct credential_operation
{
- char *ret = strdup(str);
- if (!ret)
- die_errno(errno);
+ char *name;
+ credential_op_cb op;
+};
- return ret;
-}
+#define CREDENTIAL_OP_END \
+ { NULL,NULL }
/* ----------------- GNOME Keyring functions ----------------- */
/* create a special keyring option string, if path is given */
static char* keyring_object(struct credential *c)
{
- char* object = NULL;
-
if (!c->path)
- return object;
-
- object = (char*) malloc(strlen(c->host)+strlen(c->path)+8);
- if(!object)
- die_errno(errno);
+ return NULL;
- if(c->port)
- sprintf(object,"%s:%hd/%s",c->host,c->port,c->path);
- else
- sprintf(object,"%s/%s",c->host,c->path);
+ if (c->port)
+ return g_strdup_printf("%s:%hd/%s", c->host, c->port, c->path);
- return object;
+ return g_strdup_printf("%s/%s", c->host, c->path);
}
-int keyring_get(struct credential *c)
+static int keyring_get(struct credential *c)
{
char* object = NULL;
GList *entries;
@@ -173,7 +190,7 @@ int keyring_get(struct credential *c)
c->port,
&entries);
- free(object);
+ g_free(object);
if (result == GNOME_KEYRING_RESULT_NO_MATCH)
return EXIT_SUCCESS;
@@ -182,18 +199,18 @@ int keyring_get(struct credential *c)
return EXIT_SUCCESS;
if (result != GNOME_KEYRING_RESULT_OK) {
- error("%s",gnome_keyring_result_to_message(result));
+ g_critical("%s", gnome_keyring_result_to_message(result));
return EXIT_FAILURE;
}
/* pick the first one from the list */
password_data = (GnomeKeyringNetworkPasswordData *) entries->data;
- free_password(c->password);
- c->password = xstrdup(password_data->password);
+ gnome_keyring_memory_free(c->password);
+ c->password = gnome_keyring_memory_strdup(password_data->password);
if (!c->username)
- c->username = xstrdup(password_data->user);
+ c->username = g_strdup(password_data->user);
gnome_keyring_network_password_list_free(entries);
@@ -201,10 +218,11 @@ int keyring_get(struct credential *c)
}
-int keyring_store(struct credential *c)
+static int keyring_store(struct credential *c)
{
guint32 item_id;
char *object = NULL;
+ GnomeKeyringResult result;
/*
* Sanity check that what we are storing is actually sensible.
@@ -219,7 +237,7 @@ int keyring_store(struct credential *c)
object = keyring_object(c);
- gnome_keyring_set_network_password_sync(
+ result = gnome_keyring_set_network_password_sync(
GNOME_KEYRING_DEFAULT,
c->username,
NULL /* domain */,
@@ -231,11 +249,18 @@ int keyring_store(struct credential *c)
c->password,
&item_id);
- free(object);
+ g_free(object);
+
+ if (result != GNOME_KEYRING_RESULT_OK &&
+ result != GNOME_KEYRING_RESULT_CANCELLED) {
+ g_critical("%s", gnome_keyring_result_to_message(result));
+ return EXIT_FAILURE;
+ }
+
return EXIT_SUCCESS;
}
-int keyring_erase(struct credential *c)
+static int keyring_erase(struct credential *c)
{
char *object = NULL;
GList *entries;
@@ -265,7 +290,7 @@ int keyring_erase(struct credential *c)
c->port,
&entries);
- free(object);
+ g_free(object);
if (result == GNOME_KEYRING_RESULT_NO_MATCH)
return EXIT_SUCCESS;
@@ -275,7 +300,7 @@ int keyring_erase(struct credential *c)
if (result != GNOME_KEYRING_RESULT_OK)
{
- error("%s",gnome_keyring_result_to_message(result));
+ g_critical("%s", gnome_keyring_result_to_message(result));
return EXIT_FAILURE;
}
@@ -289,7 +314,7 @@ int keyring_erase(struct credential *c)
if (result != GNOME_KEYRING_RESULT_OK)
{
- error("%s",gnome_keyring_result_to_message(result));
+ g_critical("%s", gnome_keyring_result_to_message(result));
return EXIT_FAILURE;
}
@@ -300,7 +325,7 @@ int keyring_erase(struct credential *c)
* Table with helper operation callbacks, used by generic
* credential helper main function.
*/
-struct credential_operation const credential_helper_ops[] =
+static struct credential_operation const credential_helper_ops[] =
{
{ "get", keyring_get },
{ "store", keyring_store },
@@ -310,66 +335,69 @@ struct credential_operation const credential_helper_ops[] =
/* ------------------ credential functions ------------------ */
-void credential_init(struct credential *c)
+static void credential_init(struct credential *c)
{
memset(c, 0, sizeof(*c));
}
-void credential_clear(struct credential *c)
+static void credential_clear(struct credential *c)
{
- free(c->protocol);
- free(c->host);
- free(c->path);
- free(c->username);
- free_password(c->password);
+ g_free(c->protocol);
+ g_free(c->host);
+ g_free(c->path);
+ g_free(c->username);
+ gnome_keyring_memory_free(c->password);
credential_init(c);
}
-int credential_read(struct credential *c)
+static int credential_read(struct credential *c)
{
- char buf[1024];
- ssize_t line_len = 0;
- char *key = buf;
+ char *buf;
+ size_t line_len;
+ char *key;
char *value;
- while (fgets(buf, sizeof(buf), stdin))
+ key = buf = gnome_keyring_memory_alloc(1024);
+
+ while (fgets(buf, 1024, stdin))
{
line_len = strlen(buf);
- if(buf[line_len-1]=='\n')
+ if (line_len && buf[line_len-1] == '\n')
buf[--line_len]='\0';
- if(!line_len)
+ if (!line_len)
break;
value = strchr(buf,'=');
- if(!value) {
- warning("invalid credential line: %s", key);
+ if (!value) {
+ g_warning("invalid credential line: %s", key);
+ gnome_keyring_memory_free(buf);
return -1;
}
*value++ = '\0';
if (!strcmp(key, "protocol")) {
- free(c->protocol);
- c->protocol = xstrdup(value);
+ g_free(c->protocol);
+ c->protocol = g_strdup(value);
} else if (!strcmp(key, "host")) {
- free(c->host);
- c->host = xstrdup(value);
+ g_free(c->host);
+ c->host = g_strdup(value);
value = strrchr(c->host,':');
if (value) {
*value++ = '\0';
c->port = atoi(value);
}
} else if (!strcmp(key, "path")) {
- free(c->path);
- c->path = xstrdup(value);
+ g_free(c->path);
+ c->path = g_strdup(value);
} else if (!strcmp(key, "username")) {
- free(c->username);
- c->username = xstrdup(value);
+ g_free(c->username);
+ c->username = g_strdup(value);
} else if (!strcmp(key, "password")) {
- free_password(c->password);
- c->password = xstrdup(value);
+ gnome_keyring_memory_free(c->password);
+ c->password = gnome_keyring_memory_strdup(value);
while (*value) *value++ = '\0';
}
/*
@@ -378,17 +406,20 @@ int credential_read(struct credential *c)
* learn new lines, and the helpers are updated to match.
*/
}
+
+ gnome_keyring_memory_free(buf);
+
return 0;
}
-void credential_write_item(FILE *fp, const char *key, const char *value)
+static void credential_write_item(FILE *fp, const char *key, const char *value)
{
if (!value)
return;
fprintf(fp, "%s=%s\n", key, value);
}
-void credential_write(const struct credential *c)
+static void credential_write(const struct credential *c)
{
/* only write username/password, if set */
credential_write_item(stdout, "username", c->username);
@@ -402,9 +433,9 @@ static void usage(const char *name)
basename = (basename) ? basename + 1 : name;
fprintf(stderr, "usage: %s <", basename);
- while(try_op->name) {
+ while (try_op->name) {
fprintf(stderr,"%s",(try_op++)->name);
- if(try_op->name)
+ if (try_op->name)
fprintf(stderr,"%s","|");
}
fprintf(stderr,"%s",">\n");
@@ -419,19 +450,21 @@ int main(int argc, char *argv[])
if (!argv[1]) {
usage(argv[0]);
- goto out;
+ exit(EXIT_FAILURE);
}
+ g_set_application_name("Git Credential Helper");
+
/* lookup operation callback */
- while(try_op->name && strcmp(argv[1], try_op->name))
+ while (try_op->name && strcmp(argv[1], try_op->name))
try_op++;
/* unsupported operation given -- ignore silently */
- if(!try_op->name || !try_op->op)
+ if (!try_op->name || !try_op->op)
goto out;
ret = credential_read(&cred);
- if(ret)
+ if (ret)
goto out;
/* perform credential operation */
diff --git a/contrib/credential/netrc/Makefile b/contrib/credential/netrc/Makefile
new file mode 100644
index 0000000000..51b76138a5
--- /dev/null
+++ b/contrib/credential/netrc/Makefile
@@ -0,0 +1,5 @@
+test:
+ ./test.pl
+
+testverbose:
+ ./test.pl -d -v
diff --git a/contrib/credential/netrc/git-credential-netrc b/contrib/credential/netrc/git-credential-netrc
new file mode 100755
index 0000000000..1571a7b269
--- /dev/null
+++ b/contrib/credential/netrc/git-credential-netrc
@@ -0,0 +1,423 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+use Getopt::Long;
+use File::Basename;
+
+my $VERSION = "0.1";
+
+my %options = (
+ help => 0,
+ debug => 0,
+ verbose => 0,
+ insecure => 0,
+ file => [],
+
+ # identical token maps, e.g. host -> host, will be inserted later
+ tmap => {
+ port => 'protocol',
+ machine => 'host',
+ path => 'path',
+ login => 'username',
+ user => 'username',
+ password => 'password',
+ }
+ );
+
+# Map each credential protocol token to itself on the netrc side.
+foreach (values %{$options{tmap}}) {
+ $options{tmap}->{$_} = $_;
+}
+
+# Now, $options{tmap} has a mapping from the netrc format to the Git credential
+# helper protocol.
+
+# Next, we build the reverse token map.
+
+# When $rmap{foo} contains 'bar', that means that what the Git credential helper
+# protocol calls 'bar' is found as 'foo' in the netrc/authinfo file. Keys in
+# %rmap are what we expect to read from the netrc/authinfo file.
+
+my %rmap;
+foreach my $k (keys %{$options{tmap}}) {
+ push @{$rmap{$options{tmap}->{$k}}}, $k;
+}
+
+Getopt::Long::Configure("bundling");
+
+# TODO: maybe allow the token map $options{tmap} to be configurable.
+GetOptions(\%options,
+ "help|h",
+ "debug|d",
+ "insecure|k",
+ "verbose|v",
+ "file|f=s@",
+ );
+
+if ($options{help}) {
+ my $shortname = basename($0);
+ $shortname =~ s/git-credential-//;
+
+ print <<EOHIPPUS;
+
+$0 [-f AUTHFILE1] [-f AUTHFILEN] [-d] [-v] [-k] get
+
+Version $VERSION by tzz\@lifelogs.com. License: BSD.
+
+Options:
+
+ -f|--file AUTHFILE : specify netrc-style files. Files with the .gpg extension
+ will be decrypted by GPG before parsing. Multiple -f
+ arguments are OK. They are processed in order, and the
+ first matching entry found is returned via the credential
+ helper protocol (see below).
+
+ When no -f option is given, .authinfo.gpg, .netrc.gpg,
+ .authinfo, and .netrc files in your home directory are used
+ in this order.
+
+ -k|--insecure : ignore bad file ownership or permissions
+
+ -d|--debug : turn on debugging (developer info)
+
+ -v|--verbose : be more verbose (show files and information found)
+
+To enable this credential helper:
+
+ git config credential.helper '$shortname -f AUTHFILE1 -f AUTHFILE2'
+
+(Note that Git will prepend "git-credential-" to the helper name and look for it
+in the path.)
+
+...and if you want lots of debugging info:
+
+ git config credential.helper '$shortname -f AUTHFILE -d'
+
+...or to see the files opened and data found:
+
+ git config credential.helper '$shortname -f AUTHFILE -v'
+
+Only "get" mode is supported by this credential helper. It opens every AUTHFILE
+and looks for the first entry that matches the requested search criteria:
+
+ 'port|protocol':
+ The protocol that will be used (e.g., https). (protocol=X)
+
+ 'machine|host':
+ The remote hostname for a network credential. (host=X)
+
+ 'path':
+ The path with which the credential will be used. (path=X)
+
+ 'login|user|username':
+ The credential’s username, if we already have one. (username=X)
+
+Thus, when we get this query on STDIN:
+
+host=github.com
+protocol=https
+username=tzz
+
+this credential helper will look for the first entry in every AUTHFILE that
+matches
+
+machine github.com port https login tzz
+
+OR
+
+machine github.com protocol https login tzz
+
+OR... etc. acceptable tokens as listed above. Any unknown tokens are
+simply ignored.
+
+Then, the helper will print out whatever tokens it got from the entry, including
+"password" tokens, mapping back to Git's helper protocol; e.g. "port" is mapped
+back to "protocol". Any redundant entry tokens (part of the original query) are
+skipped.
+
+Again, note that only the first matching entry from all the AUTHFILEs, processed
+in the sequence given on the command line, is used.
+
+Netrc/authinfo tokens can be quoted as 'STRING' or "STRING".
+
+No caching is performed by this credential helper.
+
+EOHIPPUS
+
+ exit 0;
+}
+
+my $mode = shift @ARGV;
+
+# Credentials must get a parameter, so die if it's missing.
+die "Syntax: $0 [-f AUTHFILE1] [-f AUTHFILEN] [-d] get" unless defined $mode;
+
+# Only support 'get' mode; with any other unsupported ones we just exit.
+exit 0 unless $mode eq 'get';
+
+my $files = $options{file};
+
+# if no files were given, use a predefined list.
+# note that .gpg files come first
+unless (scalar @$files) {
+ my @candidates = qw[
+ ~/.authinfo.gpg
+ ~/.netrc.gpg
+ ~/.authinfo
+ ~/.netrc
+ ];
+
+ $files = $options{file} = [ map { glob $_ } @candidates ];
+}
+
+my $query = read_credential_data_from_stdin();
+
+FILE:
+foreach my $file (@$files) {
+ my $gpgmode = $file =~ m/\.gpg$/;
+ unless (-r $file) {
+ log_verbose("Unable to read $file; skipping it");
+ next FILE;
+ }
+
+ # the following check is copied from Net::Netrc, for non-GPG files
+ # OS/2 and Win32 do not handle stat in a way compatible with this check :-(
+ unless ($gpgmode || $options{insecure} ||
+ $^O eq 'os2'
+ || $^O eq 'MSWin32'
+ || $^O eq 'MacOS'
+ || $^O =~ /^cygwin/) {
+ my @stat = stat($file);
+
+ if (@stat) {
+ if ($stat[2] & 077) {
+ log_verbose("Insecure $file (mode=%04o); skipping it",
+ $stat[2] & 07777);
+ next FILE;
+ }
+
+ if ($stat[4] != $<) {
+ log_verbose("Not owner of $file; skipping it");
+ next FILE;
+ }
+ }
+ }
+
+ my @entries = load_netrc($file, $gpgmode);
+
+ unless (scalar @entries) {
+ if ($!) {
+ log_verbose("Unable to open $file: $!");
+ } else {
+ log_verbose("No netrc entries found in $file");
+ }
+
+ next FILE;
+ }
+
+ my $entry = find_netrc_entry($query, @entries);
+ if ($entry) {
+ print_credential_data($entry, $query);
+ # we're done!
+ last FILE;
+ }
+}
+
+exit 0;
+
+sub load_netrc {
+ my $file = shift @_;
+ my $gpgmode = shift @_;
+
+ my $io;
+ if ($gpgmode) {
+ my @cmd = (qw(gpg --decrypt), $file);
+ log_verbose("Using GPG to open $file: [@cmd]");
+ open $io, "-|", @cmd;
+ } else {
+ log_verbose("Opening $file...");
+ open $io, '<', $file;
+ }
+
+ # nothing to do if the open failed (we log the error later)
+ return unless $io;
+
+ # Net::Netrc does this, but the functionality is merged with the file
+ # detection logic, so we have to extract just the part we need
+ my @netrc_entries = net_netrc_loader($io);
+
+ # these entries will use the credential helper protocol token names
+ my @entries;
+
+ foreach my $nentry (@netrc_entries) {
+ my %entry;
+ my $num_port;
+
+ if (!defined $nentry->{machine}) {
+ next;
+ }
+ if (defined $nentry->{port} && $nentry->{port} =~ m/^\d+$/) {
+ $num_port = $nentry->{port};
+ delete $nentry->{port};
+ }
+
+ # create the new entry for the credential helper protocol
+ $entry{$options{tmap}->{$_}} = $nentry->{$_} foreach keys %$nentry;
+
+ # for "host X port Y" where Y is an integer (captured by
+ # $num_port above), set the host to "X:Y"
+ if (defined $entry{host} && defined $num_port) {
+ $entry{host} = join(':', $entry{host}, $num_port);
+ }
+
+ push @entries, \%entry;
+ }
+
+ return @entries;
+}
+
+sub net_netrc_loader {
+ my $fh = shift @_;
+ my @entries;
+ my ($mach, $macdef, $tok, @tok);
+
+ LINE:
+ while (<$fh>) {
+ undef $macdef if /\A\n\Z/;
+
+ if ($macdef) {
+ next LINE;
+ }
+
+ s/^\s*//;
+ chomp;
+
+ while (length && s/^("((?:[^"]+|\\.)*)"|((?:[^\\\s]+|\\.)*))\s*//) {
+ (my $tok = $+) =~ s/\\(.)/$1/g;
+ push(@tok, $tok);
+ }
+
+ TOKEN:
+ while (@tok) {
+ if ($tok[0] eq "default") {
+ shift(@tok);
+ $mach = { machine => undef };
+ next TOKEN;
+ }
+
+ $tok = shift(@tok);
+
+ if ($tok eq "machine") {
+ my $host = shift @tok;
+ $mach = { machine => $host };
+ push @entries, $mach;
+ } elsif (exists $options{tmap}->{$tok}) {
+ unless ($mach) {
+ log_debug("Skipping token $tok because no machine was given");
+ next TOKEN;
+ }
+
+ my $value = shift @tok;
+ unless (defined $value) {
+ log_debug("Token $tok had no value, skipping it.");
+ next TOKEN;
+ }
+
+ # Following line added by rmerrell to remove '/' escape char in .netrc
+ $value =~ s/\/\\/\\/g;
+ $mach->{$tok} = $value;
+ } elsif ($tok eq "macdef") { # we ignore macros
+ next TOKEN unless $mach;
+ my $value = shift @tok;
+ $macdef = 1;
+ }
+ }
+ }
+
+ return @entries;
+}
+
+sub read_credential_data_from_stdin {
+ # the query: start with every token with no value
+ my %q = map { $_ => undef } values(%{$options{tmap}});
+
+ while (<STDIN>) {
+ next unless m/^([^=]+)=(.+)/;
+
+ my ($token, $value) = ($1, $2);
+ die "Unknown search token $token" unless exists $q{$token};
+ $q{$token} = $value;
+ log_debug("We were given search token $token and value $value");
+ }
+
+ foreach (sort keys %q) {
+ log_debug("Searching for %s = %s", $_, $q{$_} || '(any value)');
+ }
+
+ return \%q;
+}
+
+# takes the search tokens and then a list of entries
+# each entry is a hash reference
+sub find_netrc_entry {
+ my $query = shift @_;
+
+ ENTRY:
+ foreach my $entry (@_)
+ {
+ my $entry_text = join ', ', map { "$_=$entry->{$_}" } keys %$entry;
+ foreach my $check (sort keys %$query) {
+ if (!defined $entry->{$check}) {
+ log_debug("OK: entry has no $check token, so any value satisfies check $check");
+ } elsif (defined $query->{$check}) {
+ log_debug("compare %s [%s] to [%s] (entry: %s)",
+ $check,
+ $entry->{$check},
+ $query->{$check},
+ $entry_text);
+ unless ($query->{$check} eq $entry->{$check}) {
+ next ENTRY;
+ }
+ } else {
+ log_debug("OK: any value satisfies check $check");
+ }
+ }
+
+ return $entry;
+ }
+
+ # nothing was found
+ return;
+}
+
+sub print_credential_data {
+ my $entry = shift @_;
+ my $query = shift @_;
+
+ log_debug("entry has passed all the search checks");
+ TOKEN:
+ foreach my $git_token (sort keys %$entry) {
+ log_debug("looking for useful token $git_token");
+ # don't print unknown (to the credential helper protocol) tokens
+ next TOKEN unless exists $query->{$git_token};
+
+ # don't print things asked in the query (the entry matches them)
+ next TOKEN if defined $query->{$git_token};
+
+ log_debug("FOUND: $git_token=$entry->{$git_token}");
+ printf "%s=%s\n", $git_token, $entry->{$git_token};
+ }
+}
+sub log_verbose {
+ return unless $options{verbose};
+ printf STDERR @_;
+ printf STDERR "\n";
+}
+
+sub log_debug {
+ return unless $options{debug};
+ printf STDERR @_;
+ printf STDERR "\n";
+}
diff --git a/contrib/credential/netrc/test.netrc b/contrib/credential/netrc/test.netrc
new file mode 100644
index 0000000000..ba119a937f
--- /dev/null
+++ b/contrib/credential/netrc/test.netrc
@@ -0,0 +1,13 @@
+machine imap login tzz@lifelogs.com port imaps password letmeknow
+machine imap login bob port imaps password bobwillknow
+
+# comment test
+
+machine imap2 login tzz port 1099 password tzzknow
+machine imap2 login bob password bobwillknow
+
+# another command
+
+machine github.com
+ multilinetoken anothervalue
+ login carol password carolknows
diff --git a/contrib/credential/netrc/test.pl b/contrib/credential/netrc/test.pl
new file mode 100755
index 0000000000..169b6463c3
--- /dev/null
+++ b/contrib/credential/netrc/test.pl
@@ -0,0 +1,106 @@
+#!/usr/bin/perl
+
+use warnings;
+use strict;
+use Test;
+use IPC::Open2;
+
+BEGIN { plan tests => 15 }
+
+my @global_credential_args = @ARGV;
+my $netrc = './test.netrc';
+print "# Testing insecure file, nothing should be found\n";
+chmod 0644, $netrc;
+my $cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'github.com' });
+
+ok(scalar keys %$cred, 0, "Got 0 keys from insecure file");
+
+print "# Testing missing file, nothing should be found\n";
+chmod 0644, $netrc;
+$cred = run_credential(['-f', '///nosuchfile///', 'get'],
+ { host => 'github.com' });
+
+ok(scalar keys %$cred, 0, "Got 0 keys from missing file");
+
+chmod 0600, $netrc;
+
+print "# Testing with invalid data\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ "bad data");
+ok(scalar keys %$cred, 4, "Got first found keys with bad data");
+
+print "# Testing netrc file for a missing corovamilkbar entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'corovamilkbar' });
+
+ok(scalar keys %$cred, 0, "Got no corovamilkbar keys");
+
+print "# Testing netrc file for a github.com entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'github.com' });
+
+ok(scalar keys %$cred, 2, "Got 2 Github keys");
+
+ok($cred->{password}, 'carolknows', "Got correct Github password");
+ok($cred->{username}, 'carol', "Got correct Github username");
+
+print "# Testing netrc file for a username-specific entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'imap', username => 'bob' });
+
+ok(scalar keys %$cred, 2, "Got 2 username-specific keys");
+
+ok($cred->{password}, 'bobwillknow', "Got correct user-specific password");
+ok($cred->{protocol}, 'imaps', "Got correct user-specific protocol");
+
+print "# Testing netrc file for a host:port-specific entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'imap2:1099' });
+
+ok(scalar keys %$cred, 2, "Got 2 host:port-specific keys");
+
+ok($cred->{password}, 'tzzknow', "Got correct host:port-specific password");
+ok($cred->{username}, 'tzz', "Got correct host:port-specific username");
+
+print "# Testing netrc file that 'host:port kills host' entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'imap2' });
+
+ok(scalar keys %$cred, 2, "Got 2 'host:port kills host' keys");
+
+ok($cred->{password}, 'bobwillknow', "Got correct 'host:port kills host' password");
+ok($cred->{username}, 'bob', "Got correct 'host:port kills host' username");
+
+sub run_credential
+{
+ my $args = shift @_;
+ my $data = shift @_;
+ my $pid = open2(my $chld_out, my $chld_in,
+ './git-credential-netrc', @global_credential_args,
+ @$args);
+
+ die "Couldn't open pipe to netrc credential helper: $!" unless $pid;
+
+ if (ref $data eq 'HASH')
+ {
+ print $chld_in "$_=$data->{$_}\n" foreach sort keys %$data;
+ }
+ else
+ {
+ print $chld_in "$data\n";
+ }
+
+ close $chld_in;
+ my %ret;
+
+ while (<$chld_out>)
+ {
+ chomp;
+ next unless m/^([^=]+)=(.+)/;
+
+ $ret{$1} = $2;
+ }
+
+ return \%ret;
+}
diff --git a/contrib/credential/osxkeychain/git-credential-osxkeychain.c b/contrib/credential/osxkeychain/git-credential-osxkeychain.c
index 3940202b36..bcd3f575a3 100644
--- a/contrib/credential/osxkeychain/git-credential-osxkeychain.c
+++ b/contrib/credential/osxkeychain/git-credential-osxkeychain.c
@@ -127,10 +127,20 @@ static void read_credential(void)
*v++ = '\0';
if (!strcmp(buf, "protocol")) {
- if (!strcmp(v, "https"))
+ if (!strcmp(v, "imap"))
+ protocol = kSecProtocolTypeIMAP;
+ else if (!strcmp(v, "imaps"))
+ protocol = kSecProtocolTypeIMAPS;
+ else if (!strcmp(v, "ftp"))
+ protocol = kSecProtocolTypeFTP;
+ else if (!strcmp(v, "ftps"))
+ protocol = kSecProtocolTypeFTPS;
+ else if (!strcmp(v, "https"))
protocol = kSecProtocolTypeHTTPS;
else if (!strcmp(v, "http"))
protocol = kSecProtocolTypeHTTP;
+ else if (!strcmp(v, "smtp"))
+ protocol = kSecProtocolTypeSMTP;
else /* we don't yet handle other protocols */
exit(0);
}
diff --git a/contrib/examples/git-log.sh b/contrib/examples/git-log.sh
new file mode 100755
index 0000000000..c2ea71cf14
--- /dev/null
+++ b/contrib/examples/git-log.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+#
+# Copyright (c) 2005 Linus Torvalds
+#
+
+USAGE='[--max-count=<n>] [<since>..<limit>] [--pretty=<format>] [git-rev-list options]'
+SUBDIRECTORY_OK='Yes'
+. git-sh-setup
+
+revs=$(git-rev-parse --revs-only --no-flags --default HEAD "$@") || exit
+[ "$revs" ] || {
+ die "No HEAD ref"
+}
+git-rev-list --pretty $(git-rev-parse --default HEAD "$@") |
+LESS=-S ${PAGER:-less}
diff --git a/contrib/examples/git-merge.sh b/contrib/examples/git-merge.sh
index 7b922c3948..a5e42a9f01 100755
--- a/contrib/examples/git-merge.sh
+++ b/contrib/examples/git-merge.sh
@@ -263,7 +263,7 @@ fi
# This could be traditional "merge <msg> HEAD <commit>..." and the
# way we can tell it is to see if the second token is HEAD, but some
-# people might have misused the interface and used a committish that
+# people might have misused the interface and used a commit-ish that
# is the same as HEAD there instead. Traditional format never would
# have "-m" so it is an additional safety measure to check for it.
diff --git a/contrib/examples/git-repack.sh b/contrib/examples/git-repack.sh
new file mode 100755
index 0000000000..757933174e
--- /dev/null
+++ b/contrib/examples/git-repack.sh
@@ -0,0 +1,194 @@
+#!/bin/sh
+#
+# Copyright (c) 2005 Linus Torvalds
+#
+
+OPTIONS_KEEPDASHDASH=
+OPTIONS_SPEC="\
+git repack [options]
+--
+a pack everything in a single pack
+A same as -a, and turn unreachable objects loose
+d remove redundant packs, and run git-prune-packed
+f pass --no-reuse-delta to git-pack-objects
+F pass --no-reuse-object to git-pack-objects
+n do not run git-update-server-info
+q,quiet be quiet
+l pass --local to git-pack-objects
+unpack-unreachable= with -A, do not loosen objects older than this
+ Packing constraints
+window= size of the window used for delta compression
+window-memory= same as the above, but limit memory size instead of entries count
+depth= limits the maximum delta depth
+max-pack-size= maximum size of each packfile
+"
+SUBDIRECTORY_OK='Yes'
+. git-sh-setup
+
+no_update_info= all_into_one= remove_redundant= unpack_unreachable=
+local= no_reuse= extra=
+while test $# != 0
+do
+ case "$1" in
+ -n) no_update_info=t ;;
+ -a) all_into_one=t ;;
+ -A) all_into_one=t
+ unpack_unreachable=--unpack-unreachable ;;
+ --unpack-unreachable)
+ unpack_unreachable="--unpack-unreachable=$2"; shift ;;
+ -d) remove_redundant=t ;;
+ -q) GIT_QUIET=t ;;
+ -f) no_reuse=--no-reuse-delta ;;
+ -F) no_reuse=--no-reuse-object ;;
+ -l) local=--local ;;
+ --max-pack-size|--window|--window-memory|--depth)
+ extra="$extra $1=$2"; shift ;;
+ --) shift; break;;
+ *) usage ;;
+ esac
+ shift
+done
+
+case "`git config --bool repack.usedeltabaseoffset || echo true`" in
+true)
+ extra="$extra --delta-base-offset" ;;
+esac
+
+PACKDIR="$GIT_OBJECT_DIRECTORY/pack"
+PACKTMP="$PACKDIR/.tmp-$$-pack"
+rm -f "$PACKTMP"-*
+trap 'rm -f "$PACKTMP"-*' 0 1 2 3 15
+
+# There will be more repacking strategies to come...
+case ",$all_into_one," in
+,,)
+ args='--unpacked --incremental'
+ ;;
+,t,)
+ args= existing=
+ if [ -d "$PACKDIR" ]; then
+ for e in `cd "$PACKDIR" && find . -type f -name '*.pack' \
+ | sed -e 's/^\.\///' -e 's/\.pack$//'`
+ do
+ if [ -e "$PACKDIR/$e.keep" ]; then
+ : keep
+ else
+ existing="$existing $e"
+ fi
+ done
+ if test -n "$existing" -a -n "$unpack_unreachable" -a \
+ -n "$remove_redundant"
+ then
+ # This may have arbitrary user arguments, so we
+ # have to protect it against whitespace splitting
+ # when it gets run as "pack-objects $args" later.
+ # Fortunately, we know it's an approxidate, so we
+ # can just use dots instead.
+ args="$args $(echo "$unpack_unreachable" | tr ' ' .)"
+ fi
+ fi
+ ;;
+esac
+
+mkdir -p "$PACKDIR" || exit
+
+args="$args $local ${GIT_QUIET:+-q} $no_reuse$extra"
+names=$(git pack-objects --keep-true-parents --honor-pack-keep --non-empty --all --reflog $args </dev/null "$PACKTMP") ||
+ exit 1
+if [ -z "$names" ]; then
+ say Nothing new to pack.
+fi
+
+# Ok we have prepared all new packfiles.
+
+# First see if there are packs of the same name and if so
+# if we can move them out of the way (this can happen if we
+# repacked immediately after packing fully.
+rollback=
+failed=
+for name in $names
+do
+ for sfx in pack idx
+ do
+ file=pack-$name.$sfx
+ test -f "$PACKDIR/$file" || continue
+ rm -f "$PACKDIR/old-$file" &&
+ mv "$PACKDIR/$file" "$PACKDIR/old-$file" || {
+ failed=t
+ break
+ }
+ rollback="$rollback $file"
+ done
+ test -z "$failed" || break
+done
+
+# If renaming failed for any of them, roll the ones we have
+# already renamed back to their original names.
+if test -n "$failed"
+then
+ rollback_failure=
+ for file in $rollback
+ do
+ mv "$PACKDIR/old-$file" "$PACKDIR/$file" ||
+ rollback_failure="$rollback_failure $file"
+ done
+ if test -n "$rollback_failure"
+ then
+ echo >&2 "WARNING: Some packs in use have been renamed by"
+ echo >&2 "WARNING: prefixing old- to their name, in order to"
+ echo >&2 "WARNING: replace them with the new version of the"
+ echo >&2 "WARNING: file. But the operation failed, and"
+ echo >&2 "WARNING: attempt to rename them back to their"
+ echo >&2 "WARNING: original names also failed."
+ echo >&2 "WARNING: Please rename them in $PACKDIR manually:"
+ for file in $rollback_failure
+ do
+ echo >&2 "WARNING: old-$file -> $file"
+ done
+ fi
+ exit 1
+fi
+
+# Now the ones with the same name are out of the way...
+fullbases=
+for name in $names
+do
+ fullbases="$fullbases pack-$name"
+ chmod a-w "$PACKTMP-$name.pack"
+ chmod a-w "$PACKTMP-$name.idx"
+ mv -f "$PACKTMP-$name.pack" "$PACKDIR/pack-$name.pack" &&
+ mv -f "$PACKTMP-$name.idx" "$PACKDIR/pack-$name.idx" ||
+ exit
+done
+
+# Remove the "old-" files
+for name in $names
+do
+ rm -f "$PACKDIR/old-pack-$name.idx"
+ rm -f "$PACKDIR/old-pack-$name.pack"
+done
+
+# End of pack replacement.
+
+if test "$remove_redundant" = t
+then
+ # We know $existing are all redundant.
+ if [ -n "$existing" ]
+ then
+ ( cd "$PACKDIR" &&
+ for e in $existing
+ do
+ case " $fullbases " in
+ *" $e "*) ;;
+ *) rm -f "$e.pack" "$e.idx" "$e.keep" ;;
+ esac
+ done
+ )
+ fi
+ git prune-packed ${GIT_QUIET:+-q}
+fi
+
+case "$no_update_info" in
+t) : ;;
+*) git update-server-info ;;
+esac
diff --git a/contrib/examples/git-whatchanged.sh b/contrib/examples/git-whatchanged.sh
new file mode 100755
index 0000000000..1fb9feb348
--- /dev/null
+++ b/contrib/examples/git-whatchanged.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+USAGE='[-p] [--max-count=<n>] [<since>..<limit>] [--pretty=<format>] [-m] [git-diff-tree options] [git-rev-list options]'
+SUBDIRECTORY_OK='Yes'
+. git-sh-setup
+
+diff_tree_flags=$(git-rev-parse --sq --no-revs --flags "$@") || exit
+case "$0" in
+*whatchanged)
+ count=
+ test -z "$diff_tree_flags" &&
+ diff_tree_flags=$(git-repo-config --get whatchanged.difftree)
+ diff_tree_default_flags='-c -M --abbrev' ;;
+*show)
+ count=-n1
+ test -z "$diff_tree_flags" &&
+ diff_tree_flags=$(git-repo-config --get show.difftree)
+ diff_tree_default_flags='--cc --always' ;;
+esac
+test -z "$diff_tree_flags" &&
+ diff_tree_flags="$diff_tree_default_flags"
+
+rev_list_args=$(git-rev-parse --sq --default HEAD --revs-only "$@") &&
+diff_tree_args=$(git-rev-parse --sq --no-revs --no-flags "$@") &&
+
+eval "git-rev-list $count $rev_list_args" |
+eval "git-diff-tree --stdin --pretty -r $diff_tree_flags $diff_tree_args" |
+LESS="$LESS -S" ${PAGER:-less}
diff --git a/contrib/gitview/gitview b/contrib/gitview/gitview
index 4c99dfb903..4e23c650fe 100755
--- a/contrib/gitview/gitview
+++ b/contrib/gitview/gitview
@@ -1205,7 +1205,7 @@ class GitView(object):
#The first parent always continue on the same line
try:
- # check we alreay have the value
+ # check we already have the value
tmp_node_pos = self.nodepos[commit.parent_sha1[0]]
except KeyError:
self.colours[commit.parent_sha1[0]] = colour
diff --git a/contrib/hg-to-git/hg-to-git.py b/contrib/hg-to-git/hg-to-git.py
index 232625a7b7..60dec86d37 100755
--- a/contrib/hg-to-git/hg-to-git.py
+++ b/contrib/hg-to-git/hg-to-git.py
@@ -225,7 +225,7 @@ for cset in range(int(tip) + 1):
os.system('git ls-files -x .hg --deleted | git update-index --remove --stdin')
# commit
- os.system(getgitenv(user, date) + 'git commit --allow-empty -a -F %s' % filecomment)
+ os.system(getgitenv(user, date) + 'git commit --allow-empty --allow-empty-message -a -F %s' % filecomment)
os.unlink(filecomment)
# tag
diff --git a/contrib/hooks/multimail/README b/contrib/hooks/multimail/README
new file mode 100644
index 0000000000..9904396710
--- /dev/null
+++ b/contrib/hooks/multimail/README
@@ -0,0 +1,486 @@
+ git-multimail
+ =============
+
+git-multimail is a tool for sending notification emails on pushes to a
+Git repository. It includes a Python module called git_multimail.py,
+which can either be used as a hook script directly or can be imported
+as a Python module into another script.
+
+git-multimail is derived from the Git project's old
+contrib/hooks/post-receive-email, and is mostly compatible with that
+script. See README.migrate-from-post-receive-email for details about
+the differences and for how to migrate from post-receive-email to
+git-multimail.
+
+git-multimail, like the rest of the Git project, is licensed under
+GPLv2 (see the COPYING file for details).
+
+Please note: although, as a convenience, git-multimail may be
+distributed along with the main Git project, development of
+git-multimail takes place in its own, separate project. See section
+"Getting involved" below for more information.
+
+
+By default, for each push received by the repository, git-multimail:
+
+1. Outputs one email summarizing each reference that was changed.
+ These "reference change" (called "refchange" below) emails describe
+ the nature of the change (e.g., was the reference created, deleted,
+ fast-forwarded, etc.) and include a one-line summary of each commit
+ that was added to the reference.
+
+2. Outputs one email for each new commit that was introduced by the
+ reference change. These "commit" emails include a list of the
+ files changed by the commit, followed by the diffs of files
+ modified by the commit. The commit emails are threaded to the
+ corresponding reference change email via "In-Reply-To". This style
+ (similar to the "git format-patch" style used on the Git mailing
+ list) makes it easy to scan through the emails, jump to patches
+ that need further attention, and write comments about specific
+ commits. Commits are handled in reverse topological order (i.e.,
+ parents shown before children). For example,
+
+ [git] branch master updated
+ + [git] 01/08: doc: fix xref link from api docs to manual pages
+ + [git] 02/08: api-credentials.txt: show the big picture first
+ + [git] 03/08: api-credentials.txt: mention credential.helper explicitly
+ + [git] 04/08: api-credentials.txt: add "see also" section
+ + [git] 05/08: t3510 (cherry-pick-sequence): add missing '&&'
+ + [git] 06/08: Merge branch 'rr/maint-t3510-cascade-fix'
+ + [git] 07/08: Merge branch 'mm/api-credentials-doc'
+ + [git] 08/08: Git 1.7.11-rc2
+
+ Each commit appears in exactly one commit email, the first time
+ that it is pushed to the repository. If a commit is later merged
+ into another branch, then a one-line summary of the commit is
+ included in the reference change email (as usual), but no
+ additional commit email is generated.
+
+ By default, reference change emails have their "Reply-To" field set
+ to the person who pushed the change, and commit emails have their
+ "Reply-To" field set to the author of the commit.
+
+3. Output one "announce" mail for each new annotated tag, including
+ information about the tag and optionally a shortlog describing the
+ changes since the previous tag. Such emails might be useful if you
+ use annotated tags to mark releases of your project.
+
+
+Requirements
+------------
+
+* Python 2.x, version 2.4 or later. No non-standard Python modules
+ are required. git-multimail does *not* currently work with Python
+ 3.x.
+
+ The example scripts invoke Python using the following shebang line
+ (following PEP 394 [1]):
+
+ #! /usr/bin/env python2
+
+ If your system's Python2 interpreter is not in your PATH or is not
+ called "python2", you can change the lines accordingly. Or you can
+ invoke the Python interpreter explicitly, for example via a tiny
+ shell script like
+
+ #! /bin/sh
+ /usr/local/bin/python /path/to/git_multimail.py "$@"
+
+* The "git" command must be in your PATH. git-multimail is known to
+ work with Git versions back to 1.7.1. (Earlier versions have not
+ been tested; if you do so, please report your results.)
+
+* To send emails using the default configuration, a standard sendmail
+ program must be located at '/usr/sbin/sendmail' and configured
+ correctly to send emails. If this is not the case, see the
+ multimailhook.mailer configuration variable below for how to
+ configure git-multimail to send emails via an SMTP server.
+
+
+Invocation
+----------
+
+git_multimail.py is designed to be used as a "post-receive" hook in a
+Git repository (see githooks(5)). Link or copy it to
+$GIT_DIR/hooks/post-receive within the repository for which email
+notifications are desired. Usually it should be installed on the
+central repository for a project, to which all commits are eventually
+pushed.
+
+For use on pre-v1.5.1 Git servers, git_multimail.py can also work as
+an "update" hook, taking its arguments on the command line. To use
+this script in this manner, link or copy it to $GIT_DIR/hooks/update.
+Please note that the script is not completely reliable in this mode
+[2].
+
+Alternatively, git_multimail.py can be imported as a Python module
+into your own Python post-receive script. This method is a bit more
+work, but allows the behavior of the hook to be customized using
+arbitrary Python code. For example, you can use a custom environment
+(perhaps inheriting from GenericEnvironment or GitoliteEnvironment) to
+
+* change how the user who did the push is determined
+
+* read users' email addresses from an LDAP server or from a database
+
+* decide which users should be notified about which commits based on
+ the contents of the commits (e.g., for users who want to be notified
+ only about changes affecting particular files or subdirectories)
+
+Or you can change how emails are sent by writing your own Mailer
+class. The "post-receive" script in this directory demonstrates how
+to use git_multimail.py as a Python module. (If you make interesting
+changes of this type, please consider sharing them with the
+community.)
+
+
+Configuration
+-------------
+
+By default, git-multimail mostly takes its configuration from the
+following "git config" settings:
+
+multimailhook.environment
+
+ This describes the general environment of the repository.
+ Currently supported values:
+
+ "generic" -- the username of the pusher is read from $USER and the
+ repository name is derived from the repository's path.
+
+ "gitolite" -- the username of the pusher is read from $GL_USER and
+ the repository name from $GL_REPO.
+
+ If neither of these environments is suitable for your setup, then
+ you can implement a Python class that inherits from Environment
+ and instantiate it via a script that looks like the example
+ post-receive script.
+
+ The environment value can be specified on the command line using
+ the --environment option. If it is not specified on the command
+ line or by multimailhook.environment, then it defaults to
+ "gitolite" if the environment contains variables $GL_USER and
+ $GL_REPO; otherwise "generic".
+
+multimailhook.repoName
+
+ A short name of this Git repository, to be used in various places
+ in the notification email text. The default is to use $GL_REPO
+ for gitolite repositories, or otherwise to derive this value from
+ the repository path name.
+
+multimailhook.mailinglist
+
+ The list of email addresses to which notification emails should be
+ sent, as RFC 2822 email addresses separated by commas. This
+ configuration option can be multivalued. Leave it unset or set it
+ to the empty string to not send emails by default. The next few
+ settings can be used to configure specific address lists for
+ specific types of notification email.
+
+multimailhook.refchangeList
+
+ The list of email addresses to which summary emails about
+ reference changes should be sent, as RFC 2822 email addresses
+ separated by commas. This configuration option can be
+ multivalued. The default is the value in
+ multimailhook.mailinglist. Set this value to the empty string to
+ prevent reference change emails from being sent.
+
+multimailhook.announceList
+
+ The list of email addresses to which emails about new annotated
+ tags should be sent, as RFC 2822 email addresses separated by
+ commas. This configuration option can be multivalued. The
+ default is the value in multimailhook.refchangelist or
+ multimailhook.mailinglist. Set this value to the empty string to
+ prevent annotated tag announcement emails from being sent.
+
+multimailhook.commitList
+
+ The list of email addresses to which emails about individual new
+ commits should be sent, as RFC 2822 email addresses separated by
+ commas. This configuration option can be multivalued. The
+ default is the value in multimailhook.mailinglist. Set this value
+ to the empty string to prevent notification emails about
+ individual commits from being sent.
+
+multimailhook.announceShortlog
+
+ If this option is set to true, then emails about changes to
+ annotated tags include a shortlog of changes since the previous
+ tag. This can be useful if the annotated tags represent releases;
+ then the shortlog will be a kind of rough summary of what has
+ happened since the last release. But if your tagging policy is
+ not so straightforward, then the shortlog might be confusing
+ rather than useful. Default is false.
+
+multimailhook.refchangeShowLog
+
+ If this option is set to true, then summary emails about reference
+ changes will include a detailed log of the added commits in
+ addition to the one line summary. The log is generated by running
+ "git log" with the options specified in multimailhook.logOpts.
+ Default is false.
+
+multimailhook.mailer
+
+ This option changes the way emails are sent. Accepted values are:
+
+ - sendmail (the default): use the command /usr/sbin/sendmail or
+ /usr/lib/sendmail (or sendmailCommand, if configured). This
+ mode can be further customized via the following options:
+
+ multimailhook.sendmailCommand
+
+ The command used by mailer "sendmail" to send emails. Shell
+ quoting is allowed in the value of this setting, but remember that
+ Git requires double-quotes to be escaped; e.g.,
+
+ git config multimailhook.sendmailcommand '/usr/sbin/sendmail -t -F \"Git Repo\"'
+
+ Default is '/usr/sbin/sendmail -t' or '/usr/lib/sendmail
+ -t' (depending on which file is present and executable).
+
+ multimailhook.envelopeSender
+
+ If set then pass this value to sendmail via the -f option to set
+ the envelope sender address.
+
+ - smtp: use Python's smtplib. This is useful when the sendmail
+ command is not available on the system. This mode can be
+ further customized via the following options:
+
+ multimailhook.smtpServer
+
+ The name of the SMTP server to connect to. The value can
+ also include a colon and a port number; e.g.,
+ "mail.example.com:25". Default is 'localhost' using port
+ 25.
+
+ multimailhook.envelopeSender
+
+ The sender address to be passed to the SMTP server. If
+ unset, then the value of multimailhook.from is used.
+
+multimailhook.from
+
+ If set then use this value in the From: field of generated emails.
+ If unset, then use the repository's user configuration (user.name
+ and user.email). If user.email is also unset, then use
+ multimailhook.envelopeSender.
+
+multimailhook.administrator
+
+ The name and/or email address of the administrator of the Git
+ repository; used in FOOTER_TEMPLATE. Default is
+ multimailhook.envelopesender if it is set; otherwise a generic
+ string is used.
+
+multimailhook.emailPrefix
+
+ All emails have this string prepended to their subjects, to aid
+ email filtering (though filtering based on the X-Git-* email
+ headers is probably more robust). Default is the short name of
+ the repository in square brackets; e.g., "[myrepo]".
+
+multimailhook.emailMaxLines
+
+ The maximum number of lines that should be included in the body of
+ a generated email. If not specified, there is no limit. Lines
+ beyond the limit are suppressed and counted, and a final line is
+ added indicating the number of suppressed lines.
+
+multimailhook.emailMaxLineLength
+
+ The maximum length of a line in the email body. Lines longer than
+ this limit are truncated to this length with a trailing " [...]"
+ added to indicate the missing text. The default is 500, because
+ (a) diffs with longer lines are probably from binary files, for
+ which a diff is useless, and (b) even if a text file has such long
+ lines, the diffs are probably unreadable anyway. To disable line
+ truncation, set this option to 0.
+
+multimailhook.maxCommitEmails
+
+ The maximum number of commit emails to send for a given change.
+ When the number of patches is larger that this value, only the
+ summary refchange email is sent. This can avoid accidental
+ mailbombing, for example on an initial push. To disable commit
+ emails limit, set this option to 0. The default is 500.
+
+multimailhook.emailStrictUTF8
+
+ If this boolean option is set to "true", then the main part of the
+ email body is forced to be valid UTF-8. Any characters that are
+ not valid UTF-8 are converted to the Unicode replacement
+ character, U+FFFD. The default is "true".
+
+multimailhook.diffOpts
+
+ Options passed to "git diff-tree" when generating the summary
+ information for ReferenceChange emails. Default is "--stat
+ --summary --find-copies-harder". Add -p to those options to
+ include a unified diff of changes in addition to the usual summary
+ output. Shell quoting is allowed; see multimailhook.logOpts for
+ details.
+
+multimailhook.logOpts
+
+ Options passed to "git log" to generate additional info for
+ reference change emails (used only if refchangeShowLog is set).
+ For example, adding --graph will show the graph of revisions, -p
+ will show the complete diff, etc. The default is empty.
+
+ Shell quoting is allowed; for example, a log format that contains
+ spaces can be specified using something like:
+
+ git config multimailhook.logopts '--pretty=format:"%h %aN <%aE>%n%s%n%n%b%n"'
+
+ If you want to set this by editing your configuration file
+ directly, remember that Git requires double-quotes to be escaped
+ (see git-config(1) for more information):
+
+ [multimailhook]
+ logopts = --pretty=format:\"%h %aN <%aE>%n%s%n%n%b%n\"
+
+multimailhook.emailDomain
+
+ Domain name appended to the username of the person doing the push
+ to convert it into an email address (via "%s@%s" % (username,
+ emaildomain)). More complicated schemes can be implemented by
+ overriding Environment and overriding its get_pusher_email()
+ method.
+
+multimailhook.replyTo
+multimailhook.replyToCommit
+multimailhook.replyToRefchange
+
+ Addresses to use in the Reply-To: field for commit emails
+ (replyToCommit) and refchange emails (replyToRefchange).
+ multimailhook.replyTo is used as default when replyToCommit or
+ replyToRefchange is not set. The value for these variables can be
+ either:
+
+ - An email address, which will be used directly.
+
+ - The value "pusher", in which case the pusher's address (if
+ available) will be used. This is the default for refchange
+ emails.
+
+ - The value "author" (meaningful only for replyToCommit), in which
+ case the commit author's address will be used. This is the
+ default for commit emails.
+
+ - The value "none", in which case the Reply-To: field will be
+ omitted.
+
+
+Email filtering aids
+--------------------
+
+All emails include extra headers to enable fine tuned filtering and
+give information for debugging. All emails include the headers
+"X-Git-Repo", "X-Git-Refname", and "X-Git-Reftype". ReferenceChange
+emails also include headers "X-Git-Oldrev" and "X-Git-Newrev";
+Revision emails also include header "X-Git-Rev".
+
+
+Customizing email contents
+--------------------------
+
+git-multimail mostly generates emails by expanding templates. The
+templates can be customized. To avoid the need to edit
+git_multimail.py directly, the preferred way to change the templates
+is to write a separate Python script that imports git_multimail.py as
+a module, then replaces the templates in place. See the provided
+post-receive script for an example of how this is done.
+
+
+Customizing git-multimail for your environment
+----------------------------------------------
+
+git-multimail is mostly customized via an "environment" that describes
+the local environment in which Git is running. Two types of
+environment are built in:
+
+* GenericEnvironment: a stand-alone Git repository.
+
+* GitoliteEnvironment: a Git repository that is managed by gitolite
+ [3]. For such repositories, the identity of the pusher is read from
+ environment variable $GL_USER, and the name of the repository is
+ read from $GL_REPO (if it is not overridden by
+ multimailhook.reponame).
+
+By default, git-multimail assumes GitoliteEnvironment if $GL_USER and
+$GL_REPO are set, and otherwise assumes GenericEnvironment.
+Alternatively, you can choose one of these two environments explicitly
+by setting a "multimailhook.environment" config setting (which can
+have the value "generic" or "gitolite") or by passing an --environment
+option to the script.
+
+If you need to customize the script in ways that are not supported by
+the existing environments, you can define your own environment class
+class using arbitrary Python code. To do so, you need to import
+git_multimail.py as a Python module, as demonstrated by the example
+post-receive script. Then implement your environment class; it should
+usually inherit from one of the existing Environment classes and
+possibly one or more of the EnvironmentMixin classes. Then set the
+"environment" variable to an instance of your own environment class
+and pass it to run_as_post_receive_hook().
+
+The standard environment classes, GenericEnvironment and
+GitoliteEnvironment, are in fact themselves put together out of a
+number of mixin classes, each of which handles one aspect of the
+customization. For the finest control over your configuration, you
+can specify exactly which mixin classes your own environment class
+should inherit from, and override individual methods (or even add your
+own mixin classes) to implement entirely new behaviors. If you
+implement any mixins that might be useful to other people, please
+consider sharing them with the community!
+
+
+Getting involved
+----------------
+
+git-multimail is an open-source project, built by volunteers. We
+would welcome your help!
+
+The current maintainer is Michael Haggerty <mhagger@alum.mit.edu>.
+
+General discussion of git-multimail takes place on the main Git
+mailing list,
+
+ git@vger.kernel.org
+
+Please CC emails regarding git-multimail to me so that I don't
+overlook them.
+
+The git-multimail project itself is currently hosted on GitHub:
+
+ https://github.com/mhagger/git-multimail
+
+We use the GitHub issue tracker to keep track of bugs and feature
+requests, and GitHub pull requests to exchange patches (though, if you
+prefer, you can send patches via the Git mailing list with cc to me).
+
+Please note that although a copy of git-multimail will probably be
+distributed in the "contrib" section of the main Git project,
+development takes place in the separate git-multimail repository on
+GitHub! (Whenever enough changes to git-multimail have accumulated, a
+new code-drop of git-multimail will be submitted for inclusion in the
+Git project.)
+
+
+Footnotes
+---------
+
+[1] http://www.python.org/dev/peps/pep-0394/
+
+[2] Because of the way information is passed to update hooks, the
+ script's method of determining whether a commit has already been
+ seen does not work when it is used as an "update" script. In
+ particular, no notification email will be generated for a new
+ commit that is added to multiple references in the same push.
+
+[3] https://github.com/sitaramc/gitolite
diff --git a/contrib/hooks/multimail/README.Git b/contrib/hooks/multimail/README.Git
new file mode 100644
index 0000000000..9c2e66a69a
--- /dev/null
+++ b/contrib/hooks/multimail/README.Git
@@ -0,0 +1,15 @@
+This copy of git-multimail is distributed as part of the "contrib"
+section of the Git project as a convenience to Git users.
+git-multimail is developed as an independent project at the following
+website:
+
+ https://github.com/mhagger/git-multimail
+
+The version in this directory was obtained from the upstream project
+on 2013-07-14 and consists of the "git-multimail" subdirectory from
+revision
+
+ 1a5cb09c698a74d15a715a86b09ead5f56bf4b06
+
+Please see the README file in this directory for information about how
+to report bugs or contribute to git-multimail.
diff --git a/contrib/hooks/multimail/README.migrate-from-post-receive-email b/contrib/hooks/multimail/README.migrate-from-post-receive-email
new file mode 100644
index 0000000000..1e6a976699
--- /dev/null
+++ b/contrib/hooks/multimail/README.migrate-from-post-receive-email
@@ -0,0 +1,145 @@
+git-multimail is close to, but not exactly, a plug-in replacement for
+the old Git project script contrib/hooks/post-receive-email. This
+document describes the differences and explains how to configure
+git-multimail to get behavior closest to that of post-receive-email.
+
+If you are in a hurry
+=====================
+
+A script called migrate-mailhook-config is included with
+git-multimail. If you run this script within a Git repository that is
+configured to use post-receive-email, it will convert the
+configuration settings into the approximate equivalent settings for
+git-multimail. For more information, run
+
+ migrate-mailhook-config --help
+
+
+Configuration differences
+=========================
+
+* The names of the config options for git-multimail are in namespace
+ "multimailhook.*" instead of "hooks.*". (Editorial comment:
+ post-receive-email should never have used such a generic top-level
+ namespace.)
+
+* In emails about new annotated tags, post-receive-email includes a
+ shortlog of all changes since the previous annotated tag. To get
+ this behavior with git-multimail, you need to set
+ multimailhook.announceshortlog to true:
+
+ git config multimailhook.announceshortlog true
+
+* multimailhook.commitlist -- This is a new configuration variable.
+ Recipients listed here will receive a separate email for each new
+ commit. However, if this variable is *not* set, it defaults to the
+ value of multimailhook.mailinglist. Therefore, if you *don't* want
+ the members of multimailhook.mailinglist to receive one email per
+ commit, then set this value to the empty string:
+
+ git config multimailhook.commitlist ''
+
+* multimailhook.emailprefix -- If this value is not set, then the
+ subjects of generated emails are prefixed with the short name of the
+ repository enclosed in square brackets; e.g., "[myrepo]".
+ post-receive-email defaults to prefix "[SCM]" if this option is not
+ set. So if you were using the old default and want to retain it
+ (for example, to avoid having to change your email filters), set
+ this variable explicitly to the old value:
+
+ git config multimailhook.emailprefix "[SCM]"
+
+* The "multimailhook.showrev" configuration option is not supported.
+ Its main use is obsoleted by the one-email-per-commit feature of
+ git-multimail.
+
+
+Other differences
+=================
+
+This section describes other differences in the behavior of
+git-multimail vs. post-receive-email. For full details, please refer
+to the main README file:
+
+* One email per commit. For each reference change, the script first
+ outputs one email summarizing the reference change (including
+ one-line summaries of the new commits), then it outputs a separate
+ email for each new commit that was introduced, including patches.
+ These one-email-per-commit emails go to the addresses listed in
+ multimailhook.commitlist. post-receive-email sends only one email
+ for each *reference* that is changed, no matter how many commits
+ were added to the reference.
+
+* Better algorithm for detecting new commits. post-receive-email
+ processes one reference change at a time, which causes it to fail to
+ describe new commits that were included in multiple branches. For
+ example, if a single push adds the "*" commits in the diagram below,
+ then post-receive-email would never include the details of the two
+ commits that are common to "master" and "branch" in its
+ notifications.
+
+ o---o---o---*---*---* <-- master
+ \
+ *---* <-- branch
+
+ git-multimail analyzes all reference modifications to determine
+ which commits were not present before the change, therefore avoiding
+ that error.
+
+* In reference change emails, git-multimail tells which commits have
+ been added to the reference vs. are entirely new to the repository,
+ and which commits that have been omitted from the reference
+ vs. entirely discarded from the repository.
+
+* The environment in which Git is running can be configured via an
+ "Environment" abstraction.
+
+* Built-in support for Gitolite-managed repositories.
+
+* Instead of using full SHA1 object names in emails, git-multimail
+ mostly uses abbreviated SHA1s, plus one-line log message summaries
+ where appropriate.
+
+* In the schematic diagrams that explain non-fast-forward commits,
+ git-multimail shows the names of the branches involved.
+
+* The emails generated by git-multimail include the name of the Git
+ repository that was modified; this is convenient for recipients who
+ are monitoring multiple repositories.
+
+* git-multimail allows the email "From" addresses to be configured.
+
+* The recipients lists (multimailhook.mailinglist,
+ multimailhook.refchangelist, multimailhook.announcelist, and
+ multimailhook.commitlist) can be comma-separated values and/or
+ multivalued settings in the config file; e.g.,
+
+ [multimailhook]
+ mailinglist = mr.brown@example.com, mr.black@example.com
+ announcelist = Him <him@example.com>
+ announcelist = Jim <jim@example.com>
+ announcelist = pop@example.com
+
+ This might make it easier to maintain short recipients lists without
+ requiring full-fledged mailing list software.
+
+* By default, git-multimail sets email "Reply-To" headers to reply to
+ the pusher (for reference updates) and to the author (for commit
+ notifications). By default, the pusher's email address is
+ constructed by appending "multimailhook.emaildomain" to the pusher's
+ username.
+
+* The generated emails contain a configurable footer. By default, it
+ lists the name of the administrator who should be contacted to
+ unsubscribe from notification emails.
+
+* New option multimailhook.emailmaxlinelength to limit the length of
+ lines in the main part of the email body. The default limit is 500
+ characters.
+
+* New option multimailhook.emailstrictutf8 to ensure that the main
+ part of the email body is valid UTF-8. Invalid characters are
+ turned into the Unicode replacement character, U+FFFD. By default
+ this option is turned on.
+
+* Written in Python. Easier to add new features.
diff --git a/contrib/hooks/multimail/git_multimail.py b/contrib/hooks/multimail/git_multimail.py
new file mode 100755
index 0000000000..81c6a51706
--- /dev/null
+++ b/contrib/hooks/multimail/git_multimail.py
@@ -0,0 +1,2393 @@
+#! /usr/bin/env python2
+
+# Copyright (c) 2012,2013 Michael Haggerty
+# Derived from contrib/hooks/post-receive-email, which is
+# Copyright (c) 2007 Andy Parkins
+# and also includes contributions by other authors.
+#
+# This file is part of git-multimail.
+#
+# git-multimail is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License version
+# 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see
+# <http://www.gnu.org/licenses/>.
+
+"""Generate notification emails for pushes to a git repository.
+
+This hook sends emails describing changes introduced by pushes to a
+git repository. For each reference that was changed, it emits one
+ReferenceChange email summarizing how the reference was changed,
+followed by one Revision email for each new commit that was introduced
+by the reference change.
+
+Each commit is announced in exactly one Revision email. If the same
+commit is merged into another branch in the same or a later push, then
+the ReferenceChange email will list the commit's SHA1 and its one-line
+summary, but no new Revision email will be generated.
+
+This script is designed to be used as a "post-receive" hook in a git
+repository (see githooks(5)). It can also be used as an "update"
+script, but this usage is not completely reliable and is deprecated.
+
+To help with debugging, this script accepts a --stdout option, which
+causes the emails to be written to standard output rather than sent
+using sendmail.
+
+See the accompanying README file for the complete documentation.
+
+"""
+
+import sys
+import os
+import re
+import bisect
+import subprocess
+import shlex
+import optparse
+import smtplib
+
+try:
+ from email.utils import make_msgid
+ from email.utils import getaddresses
+ from email.utils import formataddr
+ from email.header import Header
+except ImportError:
+ # Prior to Python 2.5, the email module used different names:
+ from email.Utils import make_msgid
+ from email.Utils import getaddresses
+ from email.Utils import formataddr
+ from email.Header import Header
+
+
+DEBUG = False
+
+ZEROS = '0' * 40
+LOGBEGIN = '- Log -----------------------------------------------------------------\n'
+LOGEND = '-----------------------------------------------------------------------\n'
+
+
+# It is assumed in many places that the encoding is uniformly UTF-8,
+# so changing these constants is unsupported. But define them here
+# anyway, to make it easier to find (at least most of) the places
+# where the encoding is important.
+(ENCODING, CHARSET) = ('UTF-8', 'utf-8')
+
+
+REF_CREATED_SUBJECT_TEMPLATE = (
+ '%(emailprefix)s%(refname_type)s %(short_refname)s created'
+ ' (now %(newrev_short)s)'
+ )
+REF_UPDATED_SUBJECT_TEMPLATE = (
+ '%(emailprefix)s%(refname_type)s %(short_refname)s updated'
+ ' (%(oldrev_short)s -> %(newrev_short)s)'
+ )
+REF_DELETED_SUBJECT_TEMPLATE = (
+ '%(emailprefix)s%(refname_type)s %(short_refname)s deleted'
+ ' (was %(oldrev_short)s)'
+ )
+
+REFCHANGE_HEADER_TEMPLATE = """\
+To: %(recipients)s
+Subject: %(subject)s
+MIME-Version: 1.0
+Content-Type: text/plain; charset=%(charset)s
+Content-Transfer-Encoding: 8bit
+Message-ID: %(msgid)s
+From: %(fromaddr)s
+Reply-To: %(reply_to)s
+X-Git-Repo: %(repo_shortname)s
+X-Git-Refname: %(refname)s
+X-Git-Reftype: %(refname_type)s
+X-Git-Oldrev: %(oldrev)s
+X-Git-Newrev: %(newrev)s
+Auto-Submitted: auto-generated
+"""
+
+REFCHANGE_INTRO_TEMPLATE = """\
+This is an automated email from the git hooks/post-receive script.
+
+%(pusher)s pushed a change to %(refname_type)s %(short_refname)s
+in repository %(repo_shortname)s.
+
+"""
+
+
+FOOTER_TEMPLATE = """\
+
+-- \n\
+To stop receiving notification emails like this one, please contact
+%(administrator)s.
+"""
+
+
+REWIND_ONLY_TEMPLATE = """\
+This update removed existing revisions from the reference, leaving the
+reference pointing at a previous point in the repository history.
+
+ * -- * -- N %(refname)s (%(newrev_short)s)
+ \\
+ O -- O -- O (%(oldrev_short)s)
+
+Any revisions marked "omits" are not gone; other references still
+refer to them. Any revisions marked "discards" are gone forever.
+"""
+
+
+NON_FF_TEMPLATE = """\
+This update added new revisions after undoing existing revisions.
+That is to say, some revisions that were in the old version of the
+%(refname_type)s are not in the new version. This situation occurs
+when a user --force pushes a change and generates a repository
+containing something like this:
+
+ * -- * -- B -- O -- O -- O (%(oldrev_short)s)
+ \\
+ N -- N -- N %(refname)s (%(newrev_short)s)
+
+You should already have received notification emails for all of the O
+revisions, and so the following emails describe only the N revisions
+from the common base, B.
+
+Any revisions marked "omits" are not gone; other references still
+refer to them. Any revisions marked "discards" are gone forever.
+"""
+
+
+NO_NEW_REVISIONS_TEMPLATE = """\
+No new revisions were added by this update.
+"""
+
+
+DISCARDED_REVISIONS_TEMPLATE = """\
+This change permanently discards the following revisions:
+"""
+
+
+NO_DISCARDED_REVISIONS_TEMPLATE = """\
+The revisions that were on this %(refname_type)s are still contained in
+other references; therefore, this change does not discard any commits
+from the repository.
+"""
+
+
+NEW_REVISIONS_TEMPLATE = """\
+The %(tot)s revisions listed above as "new" are entirely new to this
+repository and will be described in separate emails. The revisions
+listed as "adds" were already present in the repository and have only
+been added to this reference.
+
+"""
+
+
+TAG_CREATED_TEMPLATE = """\
+ at %(newrev_short)-9s (%(newrev_type)s)
+"""
+
+
+TAG_UPDATED_TEMPLATE = """\
+*** WARNING: tag %(short_refname)s was modified! ***
+
+ from %(oldrev_short)-9s (%(oldrev_type)s)
+ to %(newrev_short)-9s (%(newrev_type)s)
+"""
+
+
+TAG_DELETED_TEMPLATE = """\
+*** WARNING: tag %(short_refname)s was deleted! ***
+
+"""
+
+
+# The template used in summary tables. It looks best if this uses the
+# same alignment as TAG_CREATED_TEMPLATE and TAG_UPDATED_TEMPLATE.
+BRIEF_SUMMARY_TEMPLATE = """\
+%(action)10s %(rev_short)-9s %(text)s
+"""
+
+
+NON_COMMIT_UPDATE_TEMPLATE = """\
+This is an unusual reference change because the reference did not
+refer to a commit either before or after the change. We do not know
+how to provide full information about this reference change.
+"""
+
+
+REVISION_HEADER_TEMPLATE = """\
+To: %(recipients)s
+Subject: %(emailprefix)s%(num)02d/%(tot)02d: %(oneline)s
+MIME-Version: 1.0
+Content-Type: text/plain; charset=%(charset)s
+Content-Transfer-Encoding: 8bit
+From: %(fromaddr)s
+Reply-To: %(reply_to)s
+In-Reply-To: %(reply_to_msgid)s
+References: %(reply_to_msgid)s
+X-Git-Repo: %(repo_shortname)s
+X-Git-Refname: %(refname)s
+X-Git-Reftype: %(refname_type)s
+X-Git-Rev: %(rev)s
+Auto-Submitted: auto-generated
+"""
+
+REVISION_INTRO_TEMPLATE = """\
+This is an automated email from the git hooks/post-receive script.
+
+%(pusher)s pushed a commit to %(refname_type)s %(short_refname)s
+in repository %(repo_shortname)s.
+
+"""
+
+
+REVISION_FOOTER_TEMPLATE = FOOTER_TEMPLATE
+
+
+class CommandError(Exception):
+ def __init__(self, cmd, retcode):
+ self.cmd = cmd
+ self.retcode = retcode
+ Exception.__init__(
+ self,
+ 'Command "%s" failed with retcode %s' % (' '.join(cmd), retcode,)
+ )
+
+
+class ConfigurationException(Exception):
+ pass
+
+
+def read_git_output(args, input=None, keepends=False, **kw):
+ """Read the output of a Git command."""
+
+ return read_output(
+ ['git', '-c', 'i18n.logoutputencoding=%s' % (ENCODING,)] + args,
+ input=input, keepends=keepends, **kw
+ )
+
+
+def read_output(cmd, input=None, keepends=False, **kw):
+ if input:
+ stdin = subprocess.PIPE
+ else:
+ stdin = None
+ p = subprocess.Popen(
+ cmd, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kw
+ )
+ (out, err) = p.communicate(input)
+ retcode = p.wait()
+ if retcode:
+ raise CommandError(cmd, retcode)
+ if not keepends:
+ out = out.rstrip('\n\r')
+ return out
+
+
+def read_git_lines(args, keepends=False, **kw):
+ """Return the lines output by Git command.
+
+ Return as single lines, with newlines stripped off."""
+
+ return read_git_output(args, keepends=True, **kw).splitlines(keepends)
+
+
+class Config(object):
+ def __init__(self, section, git_config=None):
+ """Represent a section of the git configuration.
+
+ If git_config is specified, it is passed to "git config" in
+ the GIT_CONFIG environment variable, meaning that "git config"
+ will read the specified path rather than the Git default
+ config paths."""
+
+ self.section = section
+ if git_config:
+ self.env = os.environ.copy()
+ self.env['GIT_CONFIG'] = git_config
+ else:
+ self.env = None
+
+ @staticmethod
+ def _split(s):
+ """Split NUL-terminated values."""
+
+ words = s.split('\0')
+ assert words[-1] == ''
+ return words[:-1]
+
+ def get(self, name, default=None):
+ try:
+ values = self._split(read_git_output(
+ ['config', '--get', '--null', '%s.%s' % (self.section, name)],
+ env=self.env, keepends=True,
+ ))
+ assert len(values) == 1
+ return values[0]
+ except CommandError:
+ return default
+
+ def get_bool(self, name, default=None):
+ try:
+ value = read_git_output(
+ ['config', '--get', '--bool', '%s.%s' % (self.section, name)],
+ env=self.env,
+ )
+ except CommandError:
+ return default
+ return value == 'true'
+
+ def get_all(self, name, default=None):
+ """Read a (possibly multivalued) setting from the configuration.
+
+ Return the result as a list of values, or default if the name
+ is unset."""
+
+ try:
+ return self._split(read_git_output(
+ ['config', '--get-all', '--null', '%s.%s' % (self.section, name)],
+ env=self.env, keepends=True,
+ ))
+ except CommandError, e:
+ if e.retcode == 1:
+ # "the section or key is invalid"; i.e., there is no
+ # value for the specified key.
+ return default
+ else:
+ raise
+
+ def get_recipients(self, name, default=None):
+ """Read a recipients list from the configuration.
+
+ Return the result as a comma-separated list of email
+ addresses, or default if the option is unset. If the setting
+ has multiple values, concatenate them with comma separators."""
+
+ lines = self.get_all(name, default=None)
+ if lines is None:
+ return default
+ return ', '.join(line.strip() for line in lines)
+
+ def set(self, name, value):
+ read_git_output(
+ ['config', '%s.%s' % (self.section, name), value],
+ env=self.env,
+ )
+
+ def add(self, name, value):
+ read_git_output(
+ ['config', '--add', '%s.%s' % (self.section, name), value],
+ env=self.env,
+ )
+
+ def has_key(self, name):
+ return self.get_all(name, default=None) is not None
+
+ def unset_all(self, name):
+ try:
+ read_git_output(
+ ['config', '--unset-all', '%s.%s' % (self.section, name)],
+ env=self.env,
+ )
+ except CommandError, e:
+ if e.retcode == 5:
+ # The name doesn't exist, which is what we wanted anyway...
+ pass
+ else:
+ raise
+
+ def set_recipients(self, name, value):
+ self.unset_all(name)
+ for pair in getaddresses([value]):
+ self.add(name, formataddr(pair))
+
+
+def generate_summaries(*log_args):
+ """Generate a brief summary for each revision requested.
+
+ log_args are strings that will be passed directly to "git log" as
+ revision selectors. Iterate over (sha1_short, subject) for each
+ commit specified by log_args (subject is the first line of the
+ commit message as a string without EOLs)."""
+
+ cmd = [
+ 'log', '--abbrev', '--format=%h %s',
+ ] + list(log_args) + ['--']
+ for line in read_git_lines(cmd):
+ yield tuple(line.split(' ', 1))
+
+
+def limit_lines(lines, max_lines):
+ for (index, line) in enumerate(lines):
+ if index < max_lines:
+ yield line
+
+ if index >= max_lines:
+ yield '... %d lines suppressed ...\n' % (index + 1 - max_lines,)
+
+
+def limit_linelength(lines, max_linelength):
+ for line in lines:
+ # Don't forget that lines always include a trailing newline.
+ if len(line) > max_linelength + 1:
+ line = line[:max_linelength - 7] + ' [...]\n'
+ yield line
+
+
+class CommitSet(object):
+ """A (constant) set of object names.
+
+ The set should be initialized with full SHA1 object names. The
+ __contains__() method returns True iff its argument is an
+ abbreviation of any the names in the set."""
+
+ def __init__(self, names):
+ self._names = sorted(names)
+
+ def __len__(self):
+ return len(self._names)
+
+ def __contains__(self, sha1_abbrev):
+ """Return True iff this set contains sha1_abbrev (which might be abbreviated)."""
+
+ i = bisect.bisect_left(self._names, sha1_abbrev)
+ return i < len(self) and self._names[i].startswith(sha1_abbrev)
+
+
+class GitObject(object):
+ def __init__(self, sha1, type=None):
+ if sha1 == ZEROS:
+ self.sha1 = self.type = self.commit_sha1 = None
+ else:
+ self.sha1 = sha1
+ self.type = type or read_git_output(['cat-file', '-t', self.sha1])
+
+ if self.type == 'commit':
+ self.commit_sha1 = self.sha1
+ elif self.type == 'tag':
+ try:
+ self.commit_sha1 = read_git_output(
+ ['rev-parse', '--verify', '%s^0' % (self.sha1,)]
+ )
+ except CommandError:
+ # Cannot deref tag to determine commit_sha1
+ self.commit_sha1 = None
+ else:
+ self.commit_sha1 = None
+
+ self.short = read_git_output(['rev-parse', '--short', sha1])
+
+ def get_summary(self):
+ """Return (sha1_short, subject) for this commit."""
+
+ if not self.sha1:
+ raise ValueError('Empty commit has no summary')
+
+ return iter(generate_summaries('--no-walk', self.sha1)).next()
+
+ def __eq__(self, other):
+ return isinstance(other, GitObject) and self.sha1 == other.sha1
+
+ def __hash__(self):
+ return hash(self.sha1)
+
+ def __nonzero__(self):
+ return bool(self.sha1)
+
+ def __str__(self):
+ return self.sha1 or ZEROS
+
+
+class Change(object):
+ """A Change that has been made to the Git repository.
+
+ Abstract class from which both Revisions and ReferenceChanges are
+ derived. A Change knows how to generate a notification email
+ describing itself."""
+
+ def __init__(self, environment):
+ self.environment = environment
+ self._values = None
+
+ def _compute_values(self):
+ """Return a dictionary {keyword : expansion} for this Change.
+
+ Derived classes overload this method to add more entries to
+ the return value. This method is used internally by
+ get_values(). The return value should always be a new
+ dictionary."""
+
+ return self.environment.get_values()
+
+ def get_values(self, **extra_values):
+ """Return a dictionary {keyword : expansion} for this Change.
+
+ Return a dictionary mapping keywords to the values that they
+ should be expanded to for this Change (used when interpolating
+ template strings). If any keyword arguments are supplied, add
+ those to the return value as well. The return value is always
+ a new dictionary."""
+
+ if self._values is None:
+ self._values = self._compute_values()
+
+ values = self._values.copy()
+ if extra_values:
+ values.update(extra_values)
+ return values
+
+ def expand(self, template, **extra_values):
+ """Expand template.
+
+ Expand the template (which should be a string) using string
+ interpolation of the values for this Change. If any keyword
+ arguments are provided, also include those in the keywords
+ available for interpolation."""
+
+ return template % self.get_values(**extra_values)
+
+ def expand_lines(self, template, **extra_values):
+ """Break template into lines and expand each line."""
+
+ values = self.get_values(**extra_values)
+ for line in template.splitlines(True):
+ yield line % values
+
+ def expand_header_lines(self, template, **extra_values):
+ """Break template into lines and expand each line as an RFC 2822 header.
+
+ Encode values and split up lines that are too long. Silently
+ skip lines that contain references to unknown variables."""
+
+ values = self.get_values(**extra_values)
+ for line in template.splitlines():
+ (name, value) = line.split(':', 1)
+
+ try:
+ value = value % values
+ except KeyError, e:
+ if DEBUG:
+ sys.stderr.write(
+ 'Warning: unknown variable %r in the following line; line skipped:\n'
+ ' %s\n'
+ % (e.args[0], line,)
+ )
+ else:
+ try:
+ h = Header(value, header_name=name)
+ except UnicodeDecodeError:
+ h = Header(value, header_name=name, charset=CHARSET, errors='replace')
+ for splitline in ('%s: %s\n' % (name, h.encode(),)).splitlines(True):
+ yield splitline
+
+ def generate_email_header(self):
+ """Generate the RFC 2822 email headers for this Change, a line at a time.
+
+ The output should not include the trailing blank line."""
+
+ raise NotImplementedError()
+
+ def generate_email_intro(self):
+ """Generate the email intro for this Change, a line at a time.
+
+ The output will be used as the standard boilerplate at the top
+ of the email body."""
+
+ raise NotImplementedError()
+
+ def generate_email_body(self):
+ """Generate the main part of the email body, a line at a time.
+
+ The text in the body might be truncated after a specified
+ number of lines (see multimailhook.emailmaxlines)."""
+
+ raise NotImplementedError()
+
+ def generate_email_footer(self):
+ """Generate the footer of the email, a line at a time.
+
+ The footer is always included, irrespective of
+ multimailhook.emailmaxlines."""
+
+ raise NotImplementedError()
+
+ def generate_email(self, push, body_filter=None):
+ """Generate an email describing this change.
+
+ Iterate over the lines (including the header lines) of an
+ email describing this change. If body_filter is not None,
+ then use it to filter the lines that are intended for the
+ email body."""
+
+ for line in self.generate_email_header():
+ yield line
+ yield '\n'
+ for line in self.generate_email_intro():
+ yield line
+
+ body = self.generate_email_body(push)
+ if body_filter is not None:
+ body = body_filter(body)
+ for line in body:
+ yield line
+
+ for line in self.generate_email_footer():
+ yield line
+
+
+class Revision(Change):
+ """A Change consisting of a single git commit."""
+
+ def __init__(self, reference_change, rev, num, tot):
+ Change.__init__(self, reference_change.environment)
+ self.reference_change = reference_change
+ self.rev = rev
+ self.change_type = self.reference_change.change_type
+ self.refname = self.reference_change.refname
+ self.num = num
+ self.tot = tot
+ self.author = read_git_output(['log', '--no-walk', '--format=%aN <%aE>', self.rev.sha1])
+ self.recipients = self.environment.get_revision_recipients(self)
+
+ def _compute_values(self):
+ values = Change._compute_values(self)
+
+ oneline = read_git_output(
+ ['log', '--format=%s', '--no-walk', self.rev.sha1]
+ )
+
+ values['rev'] = self.rev.sha1
+ values['rev_short'] = self.rev.short
+ values['change_type'] = self.change_type
+ values['refname'] = self.refname
+ values['short_refname'] = self.reference_change.short_refname
+ values['refname_type'] = self.reference_change.refname_type
+ values['reply_to_msgid'] = self.reference_change.msgid
+ values['num'] = self.num
+ values['tot'] = self.tot
+ values['recipients'] = self.recipients
+ values['oneline'] = oneline
+ values['author'] = self.author
+
+ reply_to = self.environment.get_reply_to_commit(self)
+ if reply_to:
+ values['reply_to'] = reply_to
+
+ return values
+
+ def generate_email_header(self):
+ for line in self.expand_header_lines(REVISION_HEADER_TEMPLATE):
+ yield line
+
+ def generate_email_intro(self):
+ for line in self.expand_lines(REVISION_INTRO_TEMPLATE):
+ yield line
+
+ def generate_email_body(self, push):
+ """Show this revision."""
+
+ return read_git_lines(
+ [
+ 'log', '-C',
+ '--stat', '-p', '--cc',
+ '-1', self.rev.sha1,
+ ],
+ keepends=True,
+ )
+
+ def generate_email_footer(self):
+ return self.expand_lines(REVISION_FOOTER_TEMPLATE)
+
+
+class ReferenceChange(Change):
+ """A Change to a Git reference.
+
+ An abstract class representing a create, update, or delete of a
+ Git reference. Derived classes handle specific types of reference
+ (e.g., tags vs. branches). These classes generate the main
+ reference change email summarizing the reference change and
+ whether it caused any any commits to be added or removed.
+
+ ReferenceChange objects are usually created using the static
+ create() method, which has the logic to decide which derived class
+ to instantiate."""
+
+ REF_RE = re.compile(r'^refs\/(?P<area>[^\/]+)\/(?P<shortname>.*)$')
+
+ @staticmethod
+ def create(environment, oldrev, newrev, refname):
+ """Return a ReferenceChange object representing the change.
+
+ Return an object that represents the type of change that is being
+ made. oldrev and newrev should be SHA1s or ZEROS."""
+
+ old = GitObject(oldrev)
+ new = GitObject(newrev)
+ rev = new or old
+
+ # The revision type tells us what type the commit is, combined with
+ # the location of the ref we can decide between
+ # - working branch
+ # - tracking branch
+ # - unannotated tag
+ # - annotated tag
+ m = ReferenceChange.REF_RE.match(refname)
+ if m:
+ area = m.group('area')
+ short_refname = m.group('shortname')
+ else:
+ area = ''
+ short_refname = refname
+
+ if rev.type == 'tag':
+ # Annotated tag:
+ klass = AnnotatedTagChange
+ elif rev.type == 'commit':
+ if area == 'tags':
+ # Non-annotated tag:
+ klass = NonAnnotatedTagChange
+ elif area == 'heads':
+ # Branch:
+ klass = BranchChange
+ elif area == 'remotes':
+ # Tracking branch:
+ sys.stderr.write(
+ '*** Push-update of tracking branch %r\n'
+ '*** - incomplete email generated.\n'
+ % (refname,)
+ )
+ klass = OtherReferenceChange
+ else:
+ # Some other reference namespace:
+ sys.stderr.write(
+ '*** Push-update of strange reference %r\n'
+ '*** - incomplete email generated.\n'
+ % (refname,)
+ )
+ klass = OtherReferenceChange
+ else:
+ # Anything else (is there anything else?)
+ sys.stderr.write(
+ '*** Unknown type of update to %r (%s)\n'
+ '*** - incomplete email generated.\n'
+ % (refname, rev.type,)
+ )
+ klass = OtherReferenceChange
+
+ return klass(
+ environment,
+ refname=refname, short_refname=short_refname,
+ old=old, new=new, rev=rev,
+ )
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ Change.__init__(self, environment)
+ self.change_type = {
+ (False, True) : 'create',
+ (True, True) : 'update',
+ (True, False) : 'delete',
+ }[bool(old), bool(new)]
+ self.refname = refname
+ self.short_refname = short_refname
+ self.old = old
+ self.new = new
+ self.rev = rev
+ self.msgid = make_msgid()
+ self.diffopts = environment.diffopts
+ self.logopts = environment.logopts
+ self.showlog = environment.refchange_showlog
+
+ def _compute_values(self):
+ values = Change._compute_values(self)
+
+ values['change_type'] = self.change_type
+ values['refname_type'] = self.refname_type
+ values['refname'] = self.refname
+ values['short_refname'] = self.short_refname
+ values['msgid'] = self.msgid
+ values['recipients'] = self.recipients
+ values['oldrev'] = str(self.old)
+ values['oldrev_short'] = self.old.short
+ values['newrev'] = str(self.new)
+ values['newrev_short'] = self.new.short
+
+ if self.old:
+ values['oldrev_type'] = self.old.type
+ if self.new:
+ values['newrev_type'] = self.new.type
+
+ reply_to = self.environment.get_reply_to_refchange(self)
+ if reply_to:
+ values['reply_to'] = reply_to
+
+ return values
+
+ def get_subject(self):
+ template = {
+ 'create' : REF_CREATED_SUBJECT_TEMPLATE,
+ 'update' : REF_UPDATED_SUBJECT_TEMPLATE,
+ 'delete' : REF_DELETED_SUBJECT_TEMPLATE,
+ }[self.change_type]
+ return self.expand(template)
+
+ def generate_email_header(self):
+ for line in self.expand_header_lines(
+ REFCHANGE_HEADER_TEMPLATE, subject=self.get_subject(),
+ ):
+ yield line
+
+ def generate_email_intro(self):
+ for line in self.expand_lines(REFCHANGE_INTRO_TEMPLATE):
+ yield line
+
+ def generate_email_body(self, push):
+ """Call the appropriate body-generation routine.
+
+ Call one of generate_create_summary() /
+ generate_update_summary() / generate_delete_summary()."""
+
+ change_summary = {
+ 'create' : self.generate_create_summary,
+ 'delete' : self.generate_delete_summary,
+ 'update' : self.generate_update_summary,
+ }[self.change_type](push)
+ for line in change_summary:
+ yield line
+
+ for line in self.generate_revision_change_summary(push):
+ yield line
+
+ def generate_email_footer(self):
+ return self.expand_lines(FOOTER_TEMPLATE)
+
+ def generate_revision_change_log(self, new_commits_list):
+ if self.showlog:
+ yield '\n'
+ yield 'Detailed log of new commits:\n\n'
+ for line in read_git_lines(
+ ['log', '--no-walk']
+ + self.logopts
+ + new_commits_list
+ + ['--'],
+ keepends=True,
+ ):
+ yield line
+
+ def generate_revision_change_summary(self, push):
+ """Generate a summary of the revisions added/removed by this change."""
+
+ if self.new.commit_sha1 and not self.old.commit_sha1:
+ # A new reference was created. List the new revisions
+ # brought by the new reference (i.e., those revisions that
+ # were not in the repository before this reference
+ # change).
+ sha1s = list(push.get_new_commits(self))
+ sha1s.reverse()
+ tot = len(sha1s)
+ new_revisions = [
+ Revision(self, GitObject(sha1), num=i+1, tot=tot)
+ for (i, sha1) in enumerate(sha1s)
+ ]
+
+ if new_revisions:
+ yield self.expand('This %(refname_type)s includes the following new commits:\n')
+ yield '\n'
+ for r in new_revisions:
+ (sha1, subject) = r.rev.get_summary()
+ yield r.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='new', text=subject,
+ )
+ yield '\n'
+ for line in self.expand_lines(NEW_REVISIONS_TEMPLATE, tot=tot):
+ yield line
+ for line in self.generate_revision_change_log([r.rev.sha1 for r in new_revisions]):
+ yield line
+ else:
+ for line in self.expand_lines(NO_NEW_REVISIONS_TEMPLATE):
+ yield line
+
+ elif self.new.commit_sha1 and self.old.commit_sha1:
+ # A reference was changed to point at a different commit.
+ # List the revisions that were removed and/or added *from
+ # that reference* by this reference change, along with a
+ # diff between the trees for its old and new values.
+
+ # List of the revisions that were added to the branch by
+ # this update. Note this list can include revisions that
+ # have already had notification emails; we want such
+ # revisions in the summary even though we will not send
+ # new notification emails for them.
+ adds = list(generate_summaries(
+ '--topo-order', '--reverse', '%s..%s'
+ % (self.old.commit_sha1, self.new.commit_sha1,)
+ ))
+
+ # List of the revisions that were removed from the branch
+ # by this update. This will be empty except for
+ # non-fast-forward updates.
+ discards = list(generate_summaries(
+ '%s..%s' % (self.new.commit_sha1, self.old.commit_sha1,)
+ ))
+
+ if adds:
+ new_commits_list = push.get_new_commits(self)
+ else:
+ new_commits_list = []
+ new_commits = CommitSet(new_commits_list)
+
+ if discards:
+ discarded_commits = CommitSet(push.get_discarded_commits(self))
+ else:
+ discarded_commits = CommitSet([])
+
+ if discards and adds:
+ for (sha1, subject) in discards:
+ if sha1 in discarded_commits:
+ action = 'discards'
+ else:
+ action = 'omits'
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action=action,
+ rev_short=sha1, text=subject,
+ )
+ for (sha1, subject) in adds:
+ if sha1 in new_commits:
+ action = 'new'
+ else:
+ action = 'adds'
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action=action,
+ rev_short=sha1, text=subject,
+ )
+ yield '\n'
+ for line in self.expand_lines(NON_FF_TEMPLATE):
+ yield line
+
+ elif discards:
+ for (sha1, subject) in discards:
+ if sha1 in discarded_commits:
+ action = 'discards'
+ else:
+ action = 'omits'
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action=action,
+ rev_short=sha1, text=subject,
+ )
+ yield '\n'
+ for line in self.expand_lines(REWIND_ONLY_TEMPLATE):
+ yield line
+
+ elif adds:
+ (sha1, subject) = self.old.get_summary()
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='from',
+ rev_short=sha1, text=subject,
+ )
+ for (sha1, subject) in adds:
+ if sha1 in new_commits:
+ action = 'new'
+ else:
+ action = 'adds'
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action=action,
+ rev_short=sha1, text=subject,
+ )
+
+ yield '\n'
+
+ if new_commits:
+ for line in self.expand_lines(NEW_REVISIONS_TEMPLATE, tot=len(new_commits)):
+ yield line
+ for line in self.generate_revision_change_log(new_commits_list):
+ yield line
+ else:
+ for line in self.expand_lines(NO_NEW_REVISIONS_TEMPLATE):
+ yield line
+
+ # The diffstat is shown from the old revision to the new
+ # revision. This is to show the truth of what happened in
+ # this change. There's no point showing the stat from the
+ # base to the new revision because the base is effectively a
+ # random revision at this point - the user will be interested
+ # in what this revision changed - including the undoing of
+ # previous revisions in the case of non-fast-forward updates.
+ yield '\n'
+ yield 'Summary of changes:\n'
+ for line in read_git_lines(
+ ['diff-tree']
+ + self.diffopts
+ + ['%s..%s' % (self.old.commit_sha1, self.new.commit_sha1,)],
+ keepends=True,
+ ):
+ yield line
+
+ elif self.old.commit_sha1 and not self.new.commit_sha1:
+ # A reference was deleted. List the revisions that were
+ # removed from the repository by this reference change.
+
+ sha1s = list(push.get_discarded_commits(self))
+ tot = len(sha1s)
+ discarded_revisions = [
+ Revision(self, GitObject(sha1), num=i+1, tot=tot)
+ for (i, sha1) in enumerate(sha1s)
+ ]
+
+ if discarded_revisions:
+ for line in self.expand_lines(DISCARDED_REVISIONS_TEMPLATE):
+ yield line
+ yield '\n'
+ for r in discarded_revisions:
+ (sha1, subject) = r.rev.get_summary()
+ yield r.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='discards', text=subject,
+ )
+ else:
+ for line in self.expand_lines(NO_DISCARDED_REVISIONS_TEMPLATE):
+ yield line
+
+ elif not self.old.commit_sha1 and not self.new.commit_sha1:
+ for line in self.expand_lines(NON_COMMIT_UPDATE_TEMPLATE):
+ yield line
+
+ def generate_create_summary(self, push):
+ """Called for the creation of a reference."""
+
+ # This is a new reference and so oldrev is not valid
+ (sha1, subject) = self.new.get_summary()
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='at',
+ rev_short=sha1, text=subject,
+ )
+ yield '\n'
+
+ def generate_update_summary(self, push):
+ """Called for the change of a pre-existing branch."""
+
+ return iter([])
+
+ def generate_delete_summary(self, push):
+ """Called for the deletion of any type of reference."""
+
+ (sha1, subject) = self.old.get_summary()
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='was',
+ rev_short=sha1, text=subject,
+ )
+ yield '\n'
+
+
+class BranchChange(ReferenceChange):
+ refname_type = 'branch'
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ ReferenceChange.__init__(
+ self, environment,
+ refname=refname, short_refname=short_refname,
+ old=old, new=new, rev=rev,
+ )
+ self.recipients = environment.get_refchange_recipients(self)
+
+
+class AnnotatedTagChange(ReferenceChange):
+ refname_type = 'annotated tag'
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ ReferenceChange.__init__(
+ self, environment,
+ refname=refname, short_refname=short_refname,
+ old=old, new=new, rev=rev,
+ )
+ self.recipients = environment.get_announce_recipients(self)
+ self.show_shortlog = environment.announce_show_shortlog
+
+ ANNOTATED_TAG_FORMAT = (
+ '%(*objectname)\n'
+ '%(*objecttype)\n'
+ '%(taggername)\n'
+ '%(taggerdate)'
+ )
+
+ def describe_tag(self, push):
+ """Describe the new value of an annotated tag."""
+
+ # Use git for-each-ref to pull out the individual fields from
+ # the tag
+ [tagobject, tagtype, tagger, tagged] = read_git_lines(
+ ['for-each-ref', '--format=%s' % (self.ANNOTATED_TAG_FORMAT,), self.refname],
+ )
+
+ yield self.expand(
+ BRIEF_SUMMARY_TEMPLATE, action='tagging',
+ rev_short=tagobject, text='(%s)' % (tagtype,),
+ )
+ if tagtype == 'commit':
+ # If the tagged object is a commit, then we assume this is a
+ # release, and so we calculate which tag this tag is
+ # replacing
+ try:
+ prevtag = read_git_output(['describe', '--abbrev=0', '%s^' % (self.new,)])
+ except CommandError:
+ prevtag = None
+ if prevtag:
+ yield ' replaces %s\n' % (prevtag,)
+ else:
+ prevtag = None
+ yield ' length %s bytes\n' % (read_git_output(['cat-file', '-s', tagobject]),)
+
+ yield ' tagged by %s\n' % (tagger,)
+ yield ' on %s\n' % (tagged,)
+ yield '\n'
+
+ # Show the content of the tag message; this might contain a
+ # change log or release notes so is worth displaying.
+ yield LOGBEGIN
+ contents = list(read_git_lines(['cat-file', 'tag', self.new.sha1], keepends=True))
+ contents = contents[contents.index('\n') + 1:]
+ if contents and contents[-1][-1:] != '\n':
+ contents.append('\n')
+ for line in contents:
+ yield line
+
+ if self.show_shortlog and tagtype == 'commit':
+ # Only commit tags make sense to have rev-list operations
+ # performed on them
+ yield '\n'
+ if prevtag:
+ # Show changes since the previous release
+ revlist = read_git_output(
+ ['rev-list', '--pretty=short', '%s..%s' % (prevtag, self.new,)],
+ keepends=True,
+ )
+ else:
+ # No previous tag, show all the changes since time
+ # began
+ revlist = read_git_output(
+ ['rev-list', '--pretty=short', '%s' % (self.new,)],
+ keepends=True,
+ )
+ for line in read_git_lines(['shortlog'], input=revlist, keepends=True):
+ yield line
+
+ yield LOGEND
+ yield '\n'
+
+ def generate_create_summary(self, push):
+ """Called for the creation of an annotated tag."""
+
+ for line in self.expand_lines(TAG_CREATED_TEMPLATE):
+ yield line
+
+ for line in self.describe_tag(push):
+ yield line
+
+ def generate_update_summary(self, push):
+ """Called for the update of an annotated tag.
+
+ This is probably a rare event and may not even be allowed."""
+
+ for line in self.expand_lines(TAG_UPDATED_TEMPLATE):
+ yield line
+
+ for line in self.describe_tag(push):
+ yield line
+
+ def generate_delete_summary(self, push):
+ """Called when a non-annotated reference is updated."""
+
+ for line in self.expand_lines(TAG_DELETED_TEMPLATE):
+ yield line
+
+ yield self.expand(' tag was %(oldrev_short)s\n')
+ yield '\n'
+
+
+class NonAnnotatedTagChange(ReferenceChange):
+ refname_type = 'tag'
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ ReferenceChange.__init__(
+ self, environment,
+ refname=refname, short_refname=short_refname,
+ old=old, new=new, rev=rev,
+ )
+ self.recipients = environment.get_refchange_recipients(self)
+
+ def generate_create_summary(self, push):
+ """Called for the creation of an annotated tag."""
+
+ for line in self.expand_lines(TAG_CREATED_TEMPLATE):
+ yield line
+
+ def generate_update_summary(self, push):
+ """Called when a non-annotated reference is updated."""
+
+ for line in self.expand_lines(TAG_UPDATED_TEMPLATE):
+ yield line
+
+ def generate_delete_summary(self, push):
+ """Called when a non-annotated reference is updated."""
+
+ for line in self.expand_lines(TAG_DELETED_TEMPLATE):
+ yield line
+
+ for line in ReferenceChange.generate_delete_summary(self, push):
+ yield line
+
+
+class OtherReferenceChange(ReferenceChange):
+ refname_type = 'reference'
+
+ def __init__(self, environment, refname, short_refname, old, new, rev):
+ # We use the full refname as short_refname, because otherwise
+ # the full name of the reference would not be obvious from the
+ # text of the email.
+ ReferenceChange.__init__(
+ self, environment,
+ refname=refname, short_refname=refname,
+ old=old, new=new, rev=rev,
+ )
+ self.recipients = environment.get_refchange_recipients(self)
+
+
+class Mailer(object):
+ """An object that can send emails."""
+
+ def send(self, lines, to_addrs):
+ """Send an email consisting of lines.
+
+ lines must be an iterable over the lines constituting the
+ header and body of the email. to_addrs is a list of recipient
+ addresses (can be needed even if lines already contains a
+ "To:" field). It can be either a string (comma-separated list
+ of email addresses) or a Python list of individual email
+ addresses.
+
+ """
+
+ raise NotImplementedError()
+
+
+class SendMailer(Mailer):
+ """Send emails using 'sendmail -t'."""
+
+ SENDMAIL_CANDIDATES = [
+ '/usr/sbin/sendmail',
+ '/usr/lib/sendmail',
+ ]
+
+ @staticmethod
+ def find_sendmail():
+ for path in SendMailer.SENDMAIL_CANDIDATES:
+ if os.access(path, os.X_OK):
+ return path
+ else:
+ raise ConfigurationException(
+ 'No sendmail executable found. '
+ 'Try setting multimailhook.sendmailCommand.'
+ )
+
+ def __init__(self, command=None, envelopesender=None):
+ """Construct a SendMailer instance.
+
+ command should be the command and arguments used to invoke
+ sendmail, as a list of strings. If an envelopesender is
+ provided, it will also be passed to the command, via '-f
+ envelopesender'."""
+
+ if command:
+ self.command = command[:]
+ else:
+ self.command = [self.find_sendmail(), '-t']
+
+ if envelopesender:
+ self.command.extend(['-f', envelopesender])
+
+ def send(self, lines, to_addrs):
+ try:
+ p = subprocess.Popen(self.command, stdin=subprocess.PIPE)
+ except OSError, e:
+ sys.stderr.write(
+ '*** Cannot execute command: %s\n' % ' '.join(self.command)
+ + '*** %s\n' % str(e)
+ + '*** Try setting multimailhook.mailer to "smtp"\n'
+ '*** to send emails without using the sendmail command.\n'
+ )
+ sys.exit(1)
+ try:
+ p.stdin.writelines(lines)
+ except:
+ sys.stderr.write(
+ '*** Error while generating commit email\n'
+ '*** - mail sending aborted.\n'
+ )
+ p.terminate()
+ raise
+ else:
+ p.stdin.close()
+ retcode = p.wait()
+ if retcode:
+ raise CommandError(self.command, retcode)
+
+
+class SMTPMailer(Mailer):
+ """Send emails using Python's smtplib."""
+
+ def __init__(self, envelopesender, smtpserver):
+ if not envelopesender:
+ sys.stderr.write(
+ 'fatal: git_multimail: cannot use SMTPMailer without a sender address.\n'
+ 'please set either multimailhook.envelopeSender or user.email\n'
+ )
+ sys.exit(1)
+ self.envelopesender = envelopesender
+ self.smtpserver = smtpserver
+ try:
+ self.smtp = smtplib.SMTP(self.smtpserver)
+ except Exception, e:
+ sys.stderr.write('*** Error establishing SMTP connection to %s***\n' % self.smtpserver)
+ sys.stderr.write('*** %s\n' % str(e))
+ sys.exit(1)
+
+ def __del__(self):
+ self.smtp.quit()
+
+ def send(self, lines, to_addrs):
+ try:
+ msg = ''.join(lines)
+ # turn comma-separated list into Python list if needed.
+ if isinstance(to_addrs, basestring):
+ to_addrs = [email for (name, email) in getaddresses([to_addrs])]
+ self.smtp.sendmail(self.envelopesender, to_addrs, msg)
+ except Exception, e:
+ sys.stderr.write('*** Error sending email***\n')
+ sys.stderr.write('*** %s\n' % str(e))
+ self.smtp.quit()
+ sys.exit(1)
+
+
+class OutputMailer(Mailer):
+ """Write emails to an output stream, bracketed by lines of '=' characters.
+
+ This is intended for debugging purposes."""
+
+ SEPARATOR = '=' * 75 + '\n'
+
+ def __init__(self, f):
+ self.f = f
+
+ def send(self, lines, to_addrs):
+ self.f.write(self.SEPARATOR)
+ self.f.writelines(lines)
+ self.f.write(self.SEPARATOR)
+
+
+def get_git_dir():
+ """Determine GIT_DIR.
+
+ Determine GIT_DIR either from the GIT_DIR environment variable or
+ from the working directory, using Git's usual rules."""
+
+ try:
+ return read_git_output(['rev-parse', '--git-dir'])
+ except CommandError:
+ sys.stderr.write('fatal: git_multimail: not in a git directory\n')
+ sys.exit(1)
+
+
+class Environment(object):
+ """Describes the environment in which the push is occurring.
+
+ An Environment object encapsulates information about the local
+ environment. For example, it knows how to determine:
+
+ * the name of the repository to which the push occurred
+
+ * what user did the push
+
+ * what users want to be informed about various types of changes.
+
+ An Environment object is expected to have the following methods:
+
+ get_repo_shortname()
+
+ Return a short name for the repository, for display
+ purposes.
+
+ get_repo_path()
+
+ Return the absolute path to the Git repository.
+
+ get_emailprefix()
+
+ Return a string that will be prefixed to every email's
+ subject.
+
+ get_pusher()
+
+ Return the username of the person who pushed the changes.
+ This value is used in the email body to indicate who
+ pushed the change.
+
+ get_pusher_email() (may return None)
+
+ Return the email address of the person who pushed the
+ changes. The value should be a single RFC 2822 email
+ address as a string; e.g., "Joe User <user@example.com>"
+ if available, otherwise "user@example.com". If set, the
+ value is used as the Reply-To address for refchange
+ emails. If it is impossible to determine the pusher's
+ email, this attribute should be set to None (in which case
+ no Reply-To header will be output).
+
+ get_sender()
+
+ Return the address to be used as the 'From' email address
+ in the email envelope.
+
+ get_fromaddr()
+
+ Return the 'From' email address used in the email 'From:'
+ headers. (May be a full RFC 2822 email address like 'Joe
+ User <user@example.com>'.)
+
+ get_administrator()
+
+ Return the name and/or email of the repository
+ administrator. This value is used in the footer as the
+ person to whom requests to be removed from the
+ notification list should be sent. Ideally, it should
+ include a valid email address.
+
+ get_reply_to_refchange()
+ get_reply_to_commit()
+
+ Return the address to use in the email "Reply-To" header,
+ as a string. These can be an RFC 2822 email address, or
+ None to omit the "Reply-To" header.
+ get_reply_to_refchange() is used for refchange emails;
+ get_reply_to_commit() is used for individual commit
+ emails.
+
+ They should also define the following attributes:
+
+ announce_show_shortlog (bool)
+
+ True iff announce emails should include a shortlog.
+
+ refchange_showlog (bool)
+
+ True iff refchanges emails should include a detailed log.
+
+ diffopts (list of strings)
+
+ The options that should be passed to 'git diff' for the
+ summary email. The value should be a list of strings
+ representing words to be passed to the command.
+
+ logopts (list of strings)
+
+ Analogous to diffopts, but contains options passed to
+ 'git log' when generating the detailed log for a set of
+ commits (see refchange_showlog)
+
+ """
+
+ REPO_NAME_RE = re.compile(r'^(?P<name>.+?)(?:\.git)$')
+
+ def __init__(self, osenv=None):
+ self.osenv = osenv or os.environ
+ self.announce_show_shortlog = False
+ self.maxcommitemails = 500
+ self.diffopts = ['--stat', '--summary', '--find-copies-harder']
+ self.logopts = []
+ self.refchange_showlog = False
+
+ self.COMPUTED_KEYS = [
+ 'administrator',
+ 'charset',
+ 'emailprefix',
+ 'fromaddr',
+ 'pusher',
+ 'pusher_email',
+ 'repo_path',
+ 'repo_shortname',
+ 'sender',
+ ]
+
+ self._values = None
+
+ def get_repo_shortname(self):
+ """Use the last part of the repo path, with ".git" stripped off if present."""
+
+ basename = os.path.basename(os.path.abspath(self.get_repo_path()))
+ m = self.REPO_NAME_RE.match(basename)
+ if m:
+ return m.group('name')
+ else:
+ return basename
+
+ def get_pusher(self):
+ raise NotImplementedError()
+
+ def get_pusher_email(self):
+ return None
+
+ def get_administrator(self):
+ return 'the administrator of this repository'
+
+ def get_emailprefix(self):
+ return ''
+
+ def get_repo_path(self):
+ if read_git_output(['rev-parse', '--is-bare-repository']) == 'true':
+ path = get_git_dir()
+ else:
+ path = read_git_output(['rev-parse', '--show-toplevel'])
+ return os.path.abspath(path)
+
+ def get_charset(self):
+ return CHARSET
+
+ def get_values(self):
+ """Return a dictionary {keyword : expansion} for this Environment.
+
+ This method is called by Change._compute_values(). The keys
+ in the returned dictionary are available to be used in any of
+ the templates. The dictionary is created by calling
+ self.get_NAME() for each of the attributes named in
+ COMPUTED_KEYS and recording those that do not return None.
+ The return value is always a new dictionary."""
+
+ if self._values is None:
+ values = {}
+
+ for key in self.COMPUTED_KEYS:
+ value = getattr(self, 'get_%s' % (key,))()
+ if value is not None:
+ values[key] = value
+
+ self._values = values
+
+ return self._values.copy()
+
+ def get_refchange_recipients(self, refchange):
+ """Return the recipients for notifications about refchange.
+
+ Return the list of email addresses to which notifications
+ about the specified ReferenceChange should be sent."""
+
+ raise NotImplementedError()
+
+ def get_announce_recipients(self, annotated_tag_change):
+ """Return the recipients for notifications about annotated_tag_change.
+
+ Return the list of email addresses to which notifications
+ about the specified AnnotatedTagChange should be sent."""
+
+ raise NotImplementedError()
+
+ def get_reply_to_refchange(self, refchange):
+ return self.get_pusher_email()
+
+ def get_revision_recipients(self, revision):
+ """Return the recipients for messages about revision.
+
+ Return the list of email addresses to which notifications
+ about the specified Revision should be sent. This method
+ could be overridden, for example, to take into account the
+ contents of the revision when deciding whom to notify about
+ it. For example, there could be a scheme for users to express
+ interest in particular files or subdirectories, and only
+ receive notification emails for revisions that affecting those
+ files."""
+
+ raise NotImplementedError()
+
+ def get_reply_to_commit(self, revision):
+ return revision.author
+
+ def filter_body(self, lines):
+ """Filter the lines intended for an email body.
+
+ lines is an iterable over the lines that would go into the
+ email body. Filter it (e.g., limit the number of lines, the
+ line length, character set, etc.), returning another iterable.
+ See FilterLinesEnvironmentMixin and MaxlinesEnvironmentMixin
+ for classes implementing this functionality."""
+
+ return lines
+
+
+class ConfigEnvironmentMixin(Environment):
+ """A mixin that sets self.config to its constructor's config argument.
+
+ This class's constructor consumes the "config" argument.
+
+ Mixins that need to inspect the config should inherit from this
+ class (1) to make sure that "config" is still in the constructor
+ arguments with its own constructor runs and/or (2) to be sure that
+ self.config is set after construction."""
+
+ def __init__(self, config, **kw):
+ super(ConfigEnvironmentMixin, self).__init__(**kw)
+ self.config = config
+
+
+class ConfigOptionsEnvironmentMixin(ConfigEnvironmentMixin):
+ """An Environment that reads most of its information from "git config"."""
+
+ def __init__(self, config, **kw):
+ super(ConfigOptionsEnvironmentMixin, self).__init__(
+ config=config, **kw
+ )
+
+ self.announce_show_shortlog = config.get_bool(
+ 'announceshortlog', default=self.announce_show_shortlog
+ )
+
+ self.refchange_showlog = config.get_bool(
+ 'refchangeshowlog', default=self.refchange_showlog
+ )
+
+ maxcommitemails = config.get('maxcommitemails')
+ if maxcommitemails is not None:
+ try:
+ self.maxcommitemails = int(maxcommitemails)
+ except ValueError:
+ sys.stderr.write(
+ '*** Malformed value for multimailhook.maxCommitEmails: %s\n' % maxcommitemails
+ + '*** Expected a number. Ignoring.\n'
+ )
+
+ diffopts = config.get('diffopts')
+ if diffopts is not None:
+ self.diffopts = shlex.split(diffopts)
+
+ logopts = config.get('logopts')
+ if logopts is not None:
+ self.logopts = shlex.split(logopts)
+
+ reply_to = config.get('replyTo')
+ self.__reply_to_refchange = config.get('replyToRefchange', default=reply_to)
+ if (
+ self.__reply_to_refchange is not None
+ and self.__reply_to_refchange.lower() == 'author'
+ ):
+ raise ConfigurationException(
+ '"author" is not an allowed setting for replyToRefchange'
+ )
+ self.__reply_to_commit = config.get('replyToCommit', default=reply_to)
+
+ def get_administrator(self):
+ return (
+ self.config.get('administrator')
+ or self.get_sender()
+ or super(ConfigOptionsEnvironmentMixin, self).get_administrator()
+ )
+
+ def get_repo_shortname(self):
+ return (
+ self.config.get('reponame')
+ or super(ConfigOptionsEnvironmentMixin, self).get_repo_shortname()
+ )
+
+ def get_emailprefix(self):
+ emailprefix = self.config.get('emailprefix')
+ if emailprefix and emailprefix.strip():
+ return emailprefix.strip() + ' '
+ else:
+ return '[%s] ' % (self.get_repo_shortname(),)
+
+ def get_sender(self):
+ return self.config.get('envelopesender')
+
+ def get_fromaddr(self):
+ fromaddr = self.config.get('from')
+ if fromaddr:
+ return fromaddr
+ else:
+ config = Config('user')
+ fromname = config.get('name', default='')
+ fromemail = config.get('email', default='')
+ if fromemail:
+ return formataddr([fromname, fromemail])
+ else:
+ return self.get_sender()
+
+ def get_reply_to_refchange(self, refchange):
+ if self.__reply_to_refchange is None:
+ return super(ConfigOptionsEnvironmentMixin, self).get_reply_to_refchange(refchange)
+ elif self.__reply_to_refchange.lower() == 'pusher':
+ return self.get_pusher_email()
+ elif self.__reply_to_refchange.lower() == 'none':
+ return None
+ else:
+ return self.__reply_to_refchange
+
+ def get_reply_to_commit(self, revision):
+ if self.__reply_to_commit is None:
+ return super(ConfigOptionsEnvironmentMixin, self).get_reply_to_commit(revision)
+ elif self.__reply_to_commit.lower() == 'author':
+ return revision.get_author()
+ elif self.__reply_to_commit.lower() == 'pusher':
+ return self.get_pusher_email()
+ elif self.__reply_to_commit.lower() == 'none':
+ return None
+ else:
+ return self.__reply_to_commit
+
+
+class FilterLinesEnvironmentMixin(Environment):
+ """Handle encoding and maximum line length of body lines.
+
+ emailmaxlinelength (int or None)
+
+ The maximum length of any single line in the email body.
+ Longer lines are truncated at that length with ' [...]'
+ appended.
+
+ strict_utf8 (bool)
+
+ If this field is set to True, then the email body text is
+ expected to be UTF-8. Any invalid characters are
+ converted to U+FFFD, the Unicode replacement character
+ (encoded as UTF-8, of course).
+
+ """
+
+ def __init__(self, strict_utf8=True, emailmaxlinelength=500, **kw):
+ super(FilterLinesEnvironmentMixin, self).__init__(**kw)
+ self.__strict_utf8 = strict_utf8
+ self.__emailmaxlinelength = emailmaxlinelength
+
+ def filter_body(self, lines):
+ lines = super(FilterLinesEnvironmentMixin, self).filter_body(lines)
+ if self.__strict_utf8:
+ lines = (line.decode(ENCODING, 'replace') for line in lines)
+ # Limit the line length in Unicode-space to avoid
+ # splitting characters:
+ if self.__emailmaxlinelength:
+ lines = limit_linelength(lines, self.__emailmaxlinelength)
+ lines = (line.encode(ENCODING, 'replace') for line in lines)
+ elif self.__emailmaxlinelength:
+ lines = limit_linelength(lines, self.__emailmaxlinelength)
+
+ return lines
+
+
+class ConfigFilterLinesEnvironmentMixin(
+ ConfigEnvironmentMixin,
+ FilterLinesEnvironmentMixin,
+ ):
+ """Handle encoding and maximum line length based on config."""
+
+ def __init__(self, config, **kw):
+ strict_utf8 = config.get_bool('emailstrictutf8', default=None)
+ if strict_utf8 is not None:
+ kw['strict_utf8'] = strict_utf8
+
+ emailmaxlinelength = config.get('emailmaxlinelength')
+ if emailmaxlinelength is not None:
+ kw['emailmaxlinelength'] = int(emailmaxlinelength)
+
+ super(ConfigFilterLinesEnvironmentMixin, self).__init__(
+ config=config, **kw
+ )
+
+
+class MaxlinesEnvironmentMixin(Environment):
+ """Limit the email body to a specified number of lines."""
+
+ def __init__(self, emailmaxlines, **kw):
+ super(MaxlinesEnvironmentMixin, self).__init__(**kw)
+ self.__emailmaxlines = emailmaxlines
+
+ def filter_body(self, lines):
+ lines = super(MaxlinesEnvironmentMixin, self).filter_body(lines)
+ if self.__emailmaxlines:
+ lines = limit_lines(lines, self.__emailmaxlines)
+ return lines
+
+
+class ConfigMaxlinesEnvironmentMixin(
+ ConfigEnvironmentMixin,
+ MaxlinesEnvironmentMixin,
+ ):
+ """Limit the email body to the number of lines specified in config."""
+
+ def __init__(self, config, **kw):
+ emailmaxlines = int(config.get('emailmaxlines', default='0'))
+ super(ConfigMaxlinesEnvironmentMixin, self).__init__(
+ config=config,
+ emailmaxlines=emailmaxlines,
+ **kw
+ )
+
+
+class PusherDomainEnvironmentMixin(ConfigEnvironmentMixin):
+ """Deduce pusher_email from pusher by appending an emaildomain."""
+
+ def __init__(self, **kw):
+ super(PusherDomainEnvironmentMixin, self).__init__(**kw)
+ self.__emaildomain = self.config.get('emaildomain')
+
+ def get_pusher_email(self):
+ if self.__emaildomain:
+ # Derive the pusher's full email address in the default way:
+ return '%s@%s' % (self.get_pusher(), self.__emaildomain)
+ else:
+ return super(PusherDomainEnvironmentMixin, self).get_pusher_email()
+
+
+class StaticRecipientsEnvironmentMixin(Environment):
+ """Set recipients statically based on constructor parameters."""
+
+ def __init__(
+ self,
+ refchange_recipients, announce_recipients, revision_recipients,
+ **kw
+ ):
+ super(StaticRecipientsEnvironmentMixin, self).__init__(**kw)
+
+ # The recipients for various types of notification emails, as
+ # RFC 2822 email addresses separated by commas (or the empty
+ # string if no recipients are configured). Although there is
+ # a mechanism to choose the recipient lists based on on the
+ # actual *contents* of the change being reported, we only
+ # choose based on the *type* of the change. Therefore we can
+ # compute them once and for all:
+ self.__refchange_recipients = refchange_recipients
+ self.__announce_recipients = announce_recipients
+ self.__revision_recipients = revision_recipients
+
+ def get_refchange_recipients(self, refchange):
+ return self.__refchange_recipients
+
+ def get_announce_recipients(self, annotated_tag_change):
+ return self.__announce_recipients
+
+ def get_revision_recipients(self, revision):
+ return self.__revision_recipients
+
+
+class ConfigRecipientsEnvironmentMixin(
+ ConfigEnvironmentMixin,
+ StaticRecipientsEnvironmentMixin
+ ):
+ """Determine recipients statically based on config."""
+
+ def __init__(self, config, **kw):
+ super(ConfigRecipientsEnvironmentMixin, self).__init__(
+ config=config,
+ refchange_recipients=self._get_recipients(
+ config, 'refchangelist', 'mailinglist',
+ ),
+ announce_recipients=self._get_recipients(
+ config, 'announcelist', 'refchangelist', 'mailinglist',
+ ),
+ revision_recipients=self._get_recipients(
+ config, 'commitlist', 'mailinglist',
+ ),
+ **kw
+ )
+
+ def _get_recipients(self, config, *names):
+ """Return the recipients for a particular type of message.
+
+ Return the list of email addresses to which a particular type
+ of notification email should be sent, by looking at the config
+ value for "multimailhook.$name" for each of names. Use the
+ value from the first name that is configured. The return
+ value is a (possibly empty) string containing RFC 2822 email
+ addresses separated by commas. If no configuration could be
+ found, raise a ConfigurationException."""
+
+ for name in names:
+ retval = config.get_recipients(name)
+ if retval is not None:
+ return retval
+ if len(names) == 1:
+ hint = 'Please set "%s.%s"' % (config.section, name)
+ else:
+ hint = (
+ 'Please set one of the following:\n "%s"'
+ % ('"\n "'.join('%s.%s' % (config.section, name) for name in names))
+ )
+
+ raise ConfigurationException(
+ 'The list of recipients for %s is not configured.\n%s' % (names[0], hint)
+ )
+
+
+class ProjectdescEnvironmentMixin(Environment):
+ """Make a "projectdesc" value available for templates.
+
+ By default, it is set to the first line of $GIT_DIR/description
+ (if that file is present and appears to be set meaningfully)."""
+
+ def __init__(self, **kw):
+ super(ProjectdescEnvironmentMixin, self).__init__(**kw)
+ self.COMPUTED_KEYS += ['projectdesc']
+
+ def get_projectdesc(self):
+ """Return a one-line descripition of the project."""
+
+ git_dir = get_git_dir()
+ try:
+ projectdesc = open(os.path.join(git_dir, 'description')).readline().strip()
+ if projectdesc and not projectdesc.startswith('Unnamed repository'):
+ return projectdesc
+ except IOError:
+ pass
+
+ return 'UNNAMED PROJECT'
+
+
+class GenericEnvironmentMixin(Environment):
+ def get_pusher(self):
+ return self.osenv.get('USER', 'unknown user')
+
+
+class GenericEnvironment(
+ ProjectdescEnvironmentMixin,
+ ConfigMaxlinesEnvironmentMixin,
+ ConfigFilterLinesEnvironmentMixin,
+ ConfigRecipientsEnvironmentMixin,
+ PusherDomainEnvironmentMixin,
+ ConfigOptionsEnvironmentMixin,
+ GenericEnvironmentMixin,
+ Environment,
+ ):
+ pass
+
+
+class GitoliteEnvironmentMixin(Environment):
+ def get_repo_shortname(self):
+ # The gitolite environment variable $GL_REPO is a pretty good
+ # repo_shortname (though it's probably not as good as a value
+ # the user might have explicitly put in his config).
+ return (
+ self.osenv.get('GL_REPO', None)
+ or super(GitoliteEnvironmentMixin, self).get_repo_shortname()
+ )
+
+ def get_pusher(self):
+ return self.osenv.get('GL_USER', 'unknown user')
+
+
+class GitoliteEnvironment(
+ ProjectdescEnvironmentMixin,
+ ConfigMaxlinesEnvironmentMixin,
+ ConfigFilterLinesEnvironmentMixin,
+ ConfigRecipientsEnvironmentMixin,
+ PusherDomainEnvironmentMixin,
+ ConfigOptionsEnvironmentMixin,
+ GitoliteEnvironmentMixin,
+ Environment,
+ ):
+ pass
+
+
+class Push(object):
+ """Represent an entire push (i.e., a group of ReferenceChanges).
+
+ It is easy to figure out what commits were added to a *branch* by
+ a Reference change:
+
+ git rev-list change.old..change.new
+
+ or removed from a *branch*:
+
+ git rev-list change.new..change.old
+
+ But it is not quite so trivial to determine which entirely new
+ commits were added to the *repository* by a push and which old
+ commits were discarded by a push. A big part of the job of this
+ class is to figure out these things, and to make sure that new
+ commits are only detailed once even if they were added to multiple
+ references.
+
+ The first step is to determine the "other" references--those
+ unaffected by the current push. They are computed by
+ Push._compute_other_ref_sha1s() by listing all references then
+ removing any affected by this push.
+
+ The commits contained in the repository before this push were
+
+ git rev-list other1 other2 other3 ... change1.old change2.old ...
+
+ Where "changeN.old" is the old value of one of the references
+ affected by this push.
+
+ The commits contained in the repository after this push are
+
+ git rev-list other1 other2 other3 ... change1.new change2.new ...
+
+ The commits added by this push are the difference between these
+ two sets, which can be written
+
+ git rev-list \
+ ^other1 ^other2 ... \
+ ^change1.old ^change2.old ... \
+ change1.new change2.new ...
+
+ The commits removed by this push can be computed by
+
+ git rev-list \
+ ^other1 ^other2 ... \
+ ^change1.new ^change2.new ... \
+ change1.old change2.old ...
+
+ The last point is that it is possible that other pushes are
+ occurring simultaneously to this one, so reference values can
+ change at any time. It is impossible to eliminate all race
+ conditions, but we reduce the window of time during which problems
+ can occur by translating reference names to SHA1s as soon as
+ possible and working with SHA1s thereafter (because SHA1s are
+ immutable)."""
+
+ # A map {(changeclass, changetype) : integer} specifying the order
+ # that reference changes will be processed if multiple reference
+ # changes are included in a single push. The order is significant
+ # mostly because new commit notifications are threaded together
+ # with the first reference change that includes the commit. The
+ # following order thus causes commits to be grouped with branch
+ # changes (as opposed to tag changes) if possible.
+ SORT_ORDER = dict(
+ (value, i) for (i, value) in enumerate([
+ (BranchChange, 'update'),
+ (BranchChange, 'create'),
+ (AnnotatedTagChange, 'update'),
+ (AnnotatedTagChange, 'create'),
+ (NonAnnotatedTagChange, 'update'),
+ (NonAnnotatedTagChange, 'create'),
+ (BranchChange, 'delete'),
+ (AnnotatedTagChange, 'delete'),
+ (NonAnnotatedTagChange, 'delete'),
+ (OtherReferenceChange, 'update'),
+ (OtherReferenceChange, 'create'),
+ (OtherReferenceChange, 'delete'),
+ ])
+ )
+
+ def __init__(self, changes):
+ self.changes = sorted(changes, key=self._sort_key)
+
+ # The SHA-1s of commits referred to by references unaffected
+ # by this push:
+ other_ref_sha1s = self._compute_other_ref_sha1s()
+
+ self._old_rev_exclusion_spec = self._compute_rev_exclusion_spec(
+ other_ref_sha1s.union(
+ change.old.sha1
+ for change in self.changes
+ if change.old.type in ['commit', 'tag']
+ )
+ )
+ self._new_rev_exclusion_spec = self._compute_rev_exclusion_spec(
+ other_ref_sha1s.union(
+ change.new.sha1
+ for change in self.changes
+ if change.new.type in ['commit', 'tag']
+ )
+ )
+
+ @classmethod
+ def _sort_key(klass, change):
+ return (klass.SORT_ORDER[change.__class__, change.change_type], change.refname,)
+
+ def _compute_other_ref_sha1s(self):
+ """Return the GitObjects referred to by references unaffected by this push."""
+
+ # The refnames being changed by this push:
+ updated_refs = set(
+ change.refname
+ for change in self.changes
+ )
+
+ # The SHA-1s of commits referred to by all references in this
+ # repository *except* updated_refs:
+ sha1s = set()
+ fmt = (
+ '%(objectname) %(objecttype) %(refname)\n'
+ '%(*objectname) %(*objecttype) %(refname)'
+ )
+ for line in read_git_lines(['for-each-ref', '--format=%s' % (fmt,)]):
+ (sha1, type, name) = line.split(' ', 2)
+ if sha1 and type == 'commit' and name not in updated_refs:
+ sha1s.add(sha1)
+
+ return sha1s
+
+ def _compute_rev_exclusion_spec(self, sha1s):
+ """Return an exclusion specification for 'git rev-list'.
+
+ git_objects is an iterable over GitObject instances. Return a
+ string that can be passed to the standard input of 'git
+ rev-list --stdin' to exclude all of the commits referred to by
+ git_objects."""
+
+ return ''.join(
+ ['^%s\n' % (sha1,) for sha1 in sorted(sha1s)]
+ )
+
+ def get_new_commits(self, reference_change=None):
+ """Return a list of commits added by this push.
+
+ Return a list of the object names of commits that were added
+ by the part of this push represented by reference_change. If
+ reference_change is None, then return a list of *all* commits
+ added by this push."""
+
+ if not reference_change:
+ new_revs = sorted(
+ change.new.sha1
+ for change in self.changes
+ if change.new
+ )
+ elif not reference_change.new.commit_sha1:
+ return []
+ else:
+ new_revs = [reference_change.new.commit_sha1]
+
+ cmd = ['rev-list', '--stdin'] + new_revs
+ return read_git_lines(cmd, input=self._old_rev_exclusion_spec)
+
+ def get_discarded_commits(self, reference_change):
+ """Return a list of commits discarded by this push.
+
+ Return a list of the object names of commits that were
+ entirely discarded from the repository by the part of this
+ push represented by reference_change."""
+
+ if not reference_change.old.commit_sha1:
+ return []
+ else:
+ old_revs = [reference_change.old.commit_sha1]
+
+ cmd = ['rev-list', '--stdin'] + old_revs
+ return read_git_lines(cmd, input=self._new_rev_exclusion_spec)
+
+ def send_emails(self, mailer, body_filter=None):
+ """Use send all of the notification emails needed for this push.
+
+ Use send all of the notification emails (including reference
+ change emails and commit emails) needed for this push. Send
+ the emails using mailer. If body_filter is not None, then use
+ it to filter the lines that are intended for the email
+ body."""
+
+ # The sha1s of commits that were introduced by this push.
+ # They will be removed from this set as they are processed, to
+ # guarantee that one (and only one) email is generated for
+ # each new commit.
+ unhandled_sha1s = set(self.get_new_commits())
+ for change in self.changes:
+ # Check if we've got anyone to send to
+ if not change.recipients:
+ sys.stderr.write(
+ '*** no recipients configured so no email will be sent\n'
+ '*** for %r update %s->%s\n'
+ % (change.refname, change.old.sha1, change.new.sha1,)
+ )
+ else:
+ sys.stderr.write('Sending notification emails to: %s\n' % (change.recipients,))
+ mailer.send(change.generate_email(self, body_filter), change.recipients)
+
+ sha1s = []
+ for sha1 in reversed(list(self.get_new_commits(change))):
+ if sha1 in unhandled_sha1s:
+ sha1s.append(sha1)
+ unhandled_sha1s.remove(sha1)
+
+ max_emails = change.environment.maxcommitemails
+ if max_emails and len(sha1s) > max_emails:
+ sys.stderr.write(
+ '*** Too many new commits (%d), not sending commit emails.\n' % len(sha1s)
+ + '*** Try setting multimailhook.maxCommitEmails to a greater value\n'
+ + '*** Currently, multimailhook.maxCommitEmails=%d\n' % max_emails
+ )
+ return
+
+ for (num, sha1) in enumerate(sha1s):
+ rev = Revision(change, GitObject(sha1), num=num+1, tot=len(sha1s))
+ if rev.recipients:
+ mailer.send(rev.generate_email(self, body_filter), rev.recipients)
+
+ # Consistency check:
+ if unhandled_sha1s:
+ sys.stderr.write(
+ 'ERROR: No emails were sent for the following new commits:\n'
+ ' %s\n'
+ % ('\n '.join(sorted(unhandled_sha1s)),)
+ )
+
+
+def run_as_post_receive_hook(environment, mailer):
+ changes = []
+ for line in sys.stdin:
+ (oldrev, newrev, refname) = line.strip().split(' ', 2)
+ changes.append(
+ ReferenceChange.create(environment, oldrev, newrev, refname)
+ )
+ push = Push(changes)
+ push.send_emails(mailer, body_filter=environment.filter_body)
+
+
+def run_as_update_hook(environment, mailer, refname, oldrev, newrev):
+ changes = [
+ ReferenceChange.create(
+ environment,
+ read_git_output(['rev-parse', '--verify', oldrev]),
+ read_git_output(['rev-parse', '--verify', newrev]),
+ refname,
+ ),
+ ]
+ push = Push(changes)
+ push.send_emails(mailer, body_filter=environment.filter_body)
+
+
+def choose_mailer(config, environment):
+ mailer = config.get('mailer', default='sendmail')
+
+ if mailer == 'smtp':
+ smtpserver = config.get('smtpserver', default='localhost')
+ mailer = SMTPMailer(
+ envelopesender=(environment.get_sender() or environment.get_fromaddr()),
+ smtpserver=smtpserver,
+ )
+ elif mailer == 'sendmail':
+ command = config.get('sendmailcommand')
+ if command:
+ command = shlex.split(command)
+ mailer = SendMailer(command=command, envelopesender=environment.get_sender())
+ else:
+ sys.stderr.write(
+ 'fatal: multimailhook.mailer is set to an incorrect value: "%s"\n' % mailer
+ + 'please use one of "smtp" or "sendmail".\n'
+ )
+ sys.exit(1)
+ return mailer
+
+
+KNOWN_ENVIRONMENTS = {
+ 'generic' : GenericEnvironmentMixin,
+ 'gitolite' : GitoliteEnvironmentMixin,
+ }
+
+
+def choose_environment(config, osenv=None, env=None, recipients=None):
+ if not osenv:
+ osenv = os.environ
+
+ environment_mixins = [
+ ProjectdescEnvironmentMixin,
+ ConfigMaxlinesEnvironmentMixin,
+ ConfigFilterLinesEnvironmentMixin,
+ PusherDomainEnvironmentMixin,
+ ConfigOptionsEnvironmentMixin,
+ ]
+ environment_kw = {
+ 'osenv' : osenv,
+ 'config' : config,
+ }
+
+ if not env:
+ env = config.get('environment')
+
+ if not env:
+ if 'GL_USER' in osenv and 'GL_REPO' in osenv:
+ env = 'gitolite'
+ else:
+ env = 'generic'
+
+ environment_mixins.append(KNOWN_ENVIRONMENTS[env])
+
+ if recipients:
+ environment_mixins.insert(0, StaticRecipientsEnvironmentMixin)
+ environment_kw['refchange_recipients'] = recipients
+ environment_kw['announce_recipients'] = recipients
+ environment_kw['revision_recipients'] = recipients
+ else:
+ environment_mixins.insert(0, ConfigRecipientsEnvironmentMixin)
+
+ environment_klass = type(
+ 'EffectiveEnvironment',
+ tuple(environment_mixins) + (Environment,),
+ {},
+ )
+ return environment_klass(**environment_kw)
+
+
+def main(args):
+ parser = optparse.OptionParser(
+ description=__doc__,
+ usage='%prog [OPTIONS]\n or: %prog [OPTIONS] REFNAME OLDREV NEWREV',
+ )
+
+ parser.add_option(
+ '--environment', '--env', action='store', type='choice',
+ choices=['generic', 'gitolite'], default=None,
+ help=(
+ 'Choose type of environment is in use. Default is taken from '
+ 'multimailhook.environment if set; otherwise "generic".'
+ ),
+ )
+ parser.add_option(
+ '--stdout', action='store_true', default=False,
+ help='Output emails to stdout rather than sending them.',
+ )
+ parser.add_option(
+ '--recipients', action='store', default=None,
+ help='Set list of email recipients for all types of emails.',
+ )
+ parser.add_option(
+ '--show-env', action='store_true', default=False,
+ help=(
+ 'Write to stderr the values determined for the environment '
+ '(intended for debugging purposes).'
+ ),
+ )
+
+ (options, args) = parser.parse_args(args)
+
+ config = Config('multimailhook')
+
+ try:
+ environment = choose_environment(
+ config, osenv=os.environ,
+ env=options.environment,
+ recipients=options.recipients,
+ )
+
+ if options.show_env:
+ sys.stderr.write('Environment values:\n')
+ for (k,v) in sorted(environment.get_values().items()):
+ sys.stderr.write(' %s : %r\n' % (k,v))
+ sys.stderr.write('\n')
+
+ if options.stdout:
+ mailer = OutputMailer(sys.stdout)
+ else:
+ mailer = choose_mailer(config, environment)
+
+ # Dual mode: if arguments were specified on the command line, run
+ # like an update hook; otherwise, run as a post-receive hook.
+ if args:
+ if len(args) != 3:
+ parser.error('Need zero or three non-option arguments')
+ (refname, oldrev, newrev) = args
+ run_as_update_hook(environment, mailer, refname, oldrev, newrev)
+ else:
+ run_as_post_receive_hook(environment, mailer)
+ except ConfigurationException, e:
+ sys.exit(str(e))
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/contrib/hooks/multimail/migrate-mailhook-config b/contrib/hooks/multimail/migrate-mailhook-config
new file mode 100755
index 0000000000..04eeaac413
--- /dev/null
+++ b/contrib/hooks/multimail/migrate-mailhook-config
@@ -0,0 +1,269 @@
+#! /usr/bin/env python2
+
+"""Migrate a post-receive-email configuration to be usable with git_multimail.py.
+
+See README.migrate-from-post-receive-email for more information.
+
+"""
+
+import sys
+import optparse
+
+from git_multimail import CommandError
+from git_multimail import Config
+from git_multimail import read_output
+
+
+OLD_NAMES = [
+ 'mailinglist',
+ 'announcelist',
+ 'envelopesender',
+ 'emailprefix',
+ 'showrev',
+ 'emailmaxlines',
+ 'diffopts',
+ ]
+
+NEW_NAMES = [
+ 'environment',
+ 'reponame',
+ 'mailinglist',
+ 'refchangelist',
+ 'commitlist',
+ 'announcelist',
+ 'announceshortlog',
+ 'envelopesender',
+ 'administrator',
+ 'emailprefix',
+ 'emailmaxlines',
+ 'diffopts',
+ 'emaildomain',
+ ]
+
+
+INFO = """\
+
+SUCCESS!
+
+Your post-receive-email configuration has been converted to
+git-multimail format. Please see README and
+README.migrate-from-post-receive-email to learn about other
+git-multimail configuration possibilities.
+
+For example, git-multimail has the following new options with no
+equivalent in post-receive-email. You might want to read about them
+to see if they would be useful in your situation:
+
+"""
+
+
+def _check_old_config_exists(old):
+ """Check that at least one old configuration value is set."""
+
+ for name in OLD_NAMES:
+ if old.has_key(name):
+ return True
+
+ return False
+
+
+def _check_new_config_clear(new):
+ """Check that none of the new configuration names are set."""
+
+ retval = True
+ for name in NEW_NAMES:
+ if new.has_key(name):
+ if retval:
+ sys.stderr.write('INFO: The following configuration values already exist:\n\n')
+ sys.stderr.write(' "%s.%s"\n' % (new.section, name))
+ retval = False
+
+ return retval
+
+
+def erase_values(config, names):
+ for name in names:
+ if config.has_key(name):
+ try:
+ sys.stderr.write('...unsetting "%s.%s"\n' % (config.section, name))
+ config.unset_all(name)
+ except CommandError:
+ sys.stderr.write(
+ '\nWARNING: could not unset "%s.%s". '
+ 'Perhaps it is not set at the --local level?\n\n'
+ % (config.section, name)
+ )
+
+
+def is_section_empty(section, local):
+ """Return True iff the specified configuration section is empty.
+
+ Iff local is True, use the --local option when invoking 'git
+ config'."""
+
+ if local:
+ local_option = ['--local']
+ else:
+ local_option = []
+
+ try:
+ read_output(
+ ['git', 'config']
+ + local_option
+ + ['--get-regexp', '^%s\.' % (section,)]
+ )
+ except CommandError, e:
+ if e.retcode == 1:
+ # This means that no settings were found.
+ return True
+ else:
+ raise
+ else:
+ return False
+
+
+def remove_section_if_empty(section):
+ """If the specified configuration section is empty, delete it."""
+
+ try:
+ empty = is_section_empty(section, local=True)
+ except CommandError:
+ # Older versions of git do not support the --local option, so
+ # if the first attempt fails, try without --local.
+ try:
+ empty = is_section_empty(section, local=False)
+ except CommandError:
+ sys.stderr.write(
+ '\nINFO: If configuration section "%s.*" is empty, you might want '
+ 'to delete it.\n\n'
+ % (section,)
+ )
+ return
+
+ if empty:
+ sys.stderr.write('...removing section "%s.*"\n' % (section,))
+ read_output(['git', 'config', '--remove-section', section])
+ else:
+ sys.stderr.write(
+ '\nINFO: Configuration section "%s.*" still has contents. '
+ 'It will not be deleted.\n\n'
+ % (section,)
+ )
+
+
+def migrate_config(strict=False, retain=False, overwrite=False):
+ old = Config('hooks')
+ new = Config('multimailhook')
+ if not _check_old_config_exists(old):
+ sys.exit(
+ 'Your repository has no post-receive-email configuration. '
+ 'Nothing to do.'
+ )
+ if not _check_new_config_clear(new):
+ if overwrite:
+ sys.stderr.write('\nWARNING: Erasing the above values...\n\n')
+ erase_values(new, NEW_NAMES)
+ else:
+ sys.exit(
+ '\nERROR: Refusing to overwrite existing values. Use the --overwrite\n'
+ 'option to continue anyway.'
+ )
+
+ name = 'showrev'
+ if old.has_key(name):
+ msg = 'git-multimail does not support "%s.%s"' % (old.section, name,)
+ if strict:
+ sys.exit(
+ 'ERROR: %s.\n'
+ 'Please unset that value then try again, or run without --strict.'
+ % (msg,)
+ )
+ else:
+ sys.stderr.write('\nWARNING: %s (ignoring).\n\n' % (msg,))
+
+ for name in ['mailinglist', 'announcelist']:
+ if old.has_key(name):
+ sys.stderr.write(
+ '...copying "%s.%s" to "%s.%s"\n' % (old.section, name, new.section, name)
+ )
+ new.set_recipients(name, old.get_recipients(name))
+
+ if strict:
+ sys.stderr.write(
+ '...setting "%s.commitlist" to the empty string\n' % (new.section,)
+ )
+ new.set_recipients('commitlist', '')
+ sys.stderr.write(
+ '...setting "%s.announceshortlog" to "true"\n' % (new.section,)
+ )
+ new.set('announceshortlog', 'true')
+
+ for name in ['envelopesender', 'emailmaxlines', 'diffopts']:
+ if old.has_key(name):
+ sys.stderr.write(
+ '...copying "%s.%s" to "%s.%s"\n' % (old.section, name, new.section, name)
+ )
+ new.set(name, old.get(name))
+
+ name = 'emailprefix'
+ if old.has_key(name):
+ sys.stderr.write(
+ '...copying "%s.%s" to "%s.%s"\n' % (old.section, name, new.section, name)
+ )
+ new.set(name, old.get(name))
+ elif strict:
+ sys.stderr.write(
+ '...setting "%s.%s" to "[SCM]" to preserve old subject lines\n'
+ % (new.section, name)
+ )
+ new.set(name, '[SCM]')
+
+ if not retain:
+ erase_values(old, OLD_NAMES)
+ remove_section_if_empty(old.section)
+
+ sys.stderr.write(INFO)
+ for name in NEW_NAMES:
+ if name not in OLD_NAMES:
+ sys.stderr.write(' "%s.%s"\n' % (new.section, name,))
+ sys.stderr.write('\n')
+
+
+def main(args):
+ parser = optparse.OptionParser(
+ description=__doc__,
+ usage='%prog [OPTIONS]',
+ )
+
+ parser.add_option(
+ '--strict', action='store_true', default=False,
+ help=(
+ 'Slavishly configure git-multimail as closely as possible to '
+ 'the post-receive-email configuration. Default is to turn '
+ 'on some new features that have no equivalent in post-receive-email.'
+ ),
+ )
+ parser.add_option(
+ '--retain', action='store_true', default=False,
+ help=(
+ 'Retain the post-receive-email configuration values. '
+ 'Default is to delete them after the new values are set.'
+ ),
+ )
+ parser.add_option(
+ '--overwrite', action='store_true', default=False,
+ help=(
+ 'Overwrite any existing git-multimail configuration settings. '
+ 'Default is to abort if such settings already exist.'
+ ),
+ )
+
+ (options, args) = parser.parse_args(args)
+
+ if args:
+ parser.error('Unexpected arguments: %s' % (' '.join(args),))
+
+ migrate_config(strict=options.strict, retain=options.retain, overwrite=options.overwrite)
+
+
+main(sys.argv[1:])
diff --git a/contrib/hooks/multimail/post-receive b/contrib/hooks/multimail/post-receive
new file mode 100755
index 0000000000..93ebb437d1
--- /dev/null
+++ b/contrib/hooks/multimail/post-receive
@@ -0,0 +1,90 @@
+#! /usr/bin/env python2
+
+"""Example post-receive hook based on git-multimail.
+
+This script is a simple example of a post-receive hook implemented
+using git_multimail.py as a Python module. It is intended to be
+customized before use; see the comments in the script to help you get
+started.
+
+It is possible to use git_multimail.py itself as a post-receive or
+update hook, configured via git config settings and/or command-line
+parameters. But for more flexibility, it can also be imported as a
+Python module by a custom post-receive script as done here. The
+latter has the following advantages:
+
+* The tool's behavior can be customized using arbitrary Python code,
+ without having to edit git_multimail.py.
+
+* Configuration settings can be read from other sources; for example,
+ user names and email addresses could be read from LDAP or from a
+ database. Or the settings can even be hardcoded in the importing
+ Python script, if this is preferred.
+
+This script is a very basic example of how to use git_multimail.py as
+a module. The comments below explain some of the points at which the
+script's behavior could be changed or customized.
+
+"""
+
+import sys
+import os
+
+# If necessary, add the path to the directory containing
+# git_multimail.py to the Python path as follows. (This is not
+# necessary if git_multimail.py is in the same directory as this
+# script):
+
+#LIBDIR = 'path/to/directory/containing/module'
+#sys.path.insert(0, LIBDIR)
+
+import git_multimail
+
+
+# It is possible to modify the output templates here; e.g.:
+
+#git_multimail.FOOTER_TEMPLATE = """\
+#
+#-- \n\
+#This email was generated by the wonderful git-multimail tool.
+#"""
+
+
+# Specify which "git config" section contains the configuration for
+# git-multimail:
+config = git_multimail.Config('multimailhook')
+
+
+# Select the type of environment:
+environment = git_multimail.GenericEnvironment(config=config)
+#environment = git_multimail.GitoliteEnvironment(config=config)
+
+
+# Choose the method of sending emails based on the git config:
+mailer = git_multimail.choose_mailer(config, environment)
+
+# Alternatively, you may hardcode the mailer using code like one of
+# the following:
+
+# Use "/usr/sbin/sendmail -t" to send emails. The envelopesender
+# argument is optional:
+#mailer = git_multimail.SendMailer(
+# command=['/usr/sbin/sendmail', '-t'],
+# envelopesender='git-repo@example.com',
+# )
+
+# Use Python's smtplib to send emails. Both arguments are required.
+#mailer = git_multimail.SMTPMailer(
+# envelopesender='git-repo@example.com',
+# # The smtpserver argument can also include a port number; e.g.,
+# # smtpserver='mail.example.com:25'
+# smtpserver='mail.example.com',
+# )
+
+# OutputMailer is intended only for testing; it writes the emails to
+# the specified file stream.
+#mailer = git_multimail.OutputMailer(sys.stdout)
+
+
+# Read changes from stdin and send notification emails:
+git_multimail.run_as_post_receive_hook(environment, mailer)
diff --git a/contrib/hooks/post-receive-email b/contrib/hooks/post-receive-email
index 0e5b72d7f1..8ee410f843 100755
--- a/contrib/hooks/post-receive-email
+++ b/contrib/hooks/post-receive-email
@@ -2,10 +2,19 @@
#
# Copyright (c) 2007 Andy Parkins
#
-# An example hook script to mail out commit update information. This hook
-# sends emails listing new revisions to the repository introduced by the
-# change being reported. The rule is that (for branch updates) each commit
-# will appear on one email and one email only.
+# An example hook script to mail out commit update information.
+#
+# NOTE: This script is no longer under active development. There
+# is another script, git-multimail, which is more capable and
+# configurable and is largely backwards-compatible with this script;
+# please see "contrib/hooks/multimail/". For instructions on how to
+# migrate from post-receive-email to git-multimail, please see
+# "README.migrate-from-post-receive-email" in that directory.
+#
+# This hook sends emails listing new revisions to the repository
+# introduced by the change being reported. The rule is that (for
+# branch updates) each commit will appear on one email and one email
+# only.
#
# This hook is stored in the contrib/hooks directory. Your distribution
# will have put this somewhere standard. You should make this script
@@ -233,6 +242,9 @@ generate_email_header()
cat <<-EOF
To: $recipients
Subject: ${emailprefix}$projectdesc $refname_type $short_refname ${change_type}d. $describe
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset=utf-8
+ Content-Transfer-Encoding: 8bit
X-Git-Refname: $refname
X-Git-Reftype: $refname_type
X-Git-Oldrev: $oldrev
@@ -462,7 +474,7 @@ generate_delete_branch_email()
echo " was $oldrev"
echo ""
echo $LOGBEGIN
- git show -s --pretty=oneline $oldrev
+ git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
echo $LOGEND
}
@@ -538,11 +550,11 @@ generate_atag_email()
# performed on them
if [ -n "$prevtag" ]; then
# Show changes since the previous release
- git rev-list --pretty=short "$prevtag..$newrev" | git shortlog
+ git shortlog "$prevtag..$newrev"
else
# No previous tag, show all the changes since time
# began
- git rev-list --pretty=short $newrev | git shortlog
+ git shortlog $newrev
fi
;;
*)
@@ -562,7 +574,7 @@ generate_delete_atag_email()
echo " was $oldrev"
echo ""
echo $LOGBEGIN
- git show -s --pretty=oneline $oldrev
+ git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
echo $LOGEND
}
@@ -608,7 +620,7 @@ generate_general_email()
echo ""
if [ "$newrev_type" = "commit" ]; then
echo $LOGBEGIN
- git show --no-color --root -s --pretty=medium $newrev
+ git diff-tree -s --always --encoding=UTF-8 --pretty=medium $newrev
echo $LOGEND
else
# What can we do here? The tag marks an object that is not
@@ -627,7 +639,7 @@ generate_delete_general_email()
echo " was $oldrev"
echo ""
echo $LOGBEGIN
- git show -s --pretty=oneline $oldrev
+ git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
echo $LOGEND
}
diff --git a/contrib/mw-to-git/.gitignore b/contrib/mw-to-git/.gitignore
index b9196555e5..ae545b013d 100644
--- a/contrib/mw-to-git/.gitignore
+++ b/contrib/mw-to-git/.gitignore
@@ -1 +1,2 @@
git-remote-mediawiki
+git-mw
diff --git a/contrib/mw-to-git/.perlcriticrc b/contrib/mw-to-git/.perlcriticrc
new file mode 100644
index 0000000000..5a9955d757
--- /dev/null
+++ b/contrib/mw-to-git/.perlcriticrc
@@ -0,0 +1,28 @@
+# These 3 rules demand to add the s, m and x flag to *every* regexp. This is
+# overkill and would be harmful for readability.
+[-RegularExpressions::RequireExtendedFormatting]
+[-RegularExpressions::RequireDotMatchAnything]
+[-RegularExpressions::RequireLineBoundaryMatching]
+
+# This rule says that builtin functions should not be called with parentheses
+# e.g.: (taken from CPAN's documentation)
+# open($handle, '>', $filename); #not ok
+# open $handle, '>', $filename; #ok
+# Applying such a rule would mean modifying a huge number of lines for a
+# question of style.
+[-CodeLayout::ProhibitParensWithBuiltins]
+
+# This rule states that each system call should have its return value checked
+# The problem is that it includes the print call. Checking every print call's
+# return value would be harmful to the code readabilty.
+# This configuration keeps all default function but print.
+[InputOutput::RequireCheckedSyscalls]
+functions = open say close
+
+# This rules demands to add a dependancy for the Readonly module. This is not
+# wished.
+[-ValuesAndExpressions::ProhibitConstantPragma]
+
+# This rule is not really useful (rather a question of style) and produces many
+# warnings among the code.
+[-ValuesAndExpressions::ProhibitNoisyQuotes]
diff --git a/contrib/mw-to-git/Git/Mediawiki.pm b/contrib/mw-to-git/Git/Mediawiki.pm
new file mode 100644
index 0000000000..d13c4dfa7d
--- /dev/null
+++ b/contrib/mw-to-git/Git/Mediawiki.pm
@@ -0,0 +1,100 @@
+package Git::Mediawiki;
+
+use 5.008;
+use strict;
+use Git;
+
+BEGIN {
+
+our ($VERSION, @ISA, @EXPORT, @EXPORT_OK);
+
+# Totally unstable API.
+$VERSION = '0.01';
+
+require Exporter;
+
+@ISA = qw(Exporter);
+
+@EXPORT = ();
+
+# Methods which can be called as standalone functions as well:
+@EXPORT_OK = qw(clean_filename smudge_filename connect_maybe
+ EMPTY HTTP_CODE_OK HTTP_CODE_PAGE_NOT_FOUND);
+}
+
+# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
+use constant SLASH_REPLACEMENT => '%2F';
+
+# Used to test for empty strings
+use constant EMPTY => q{};
+
+# HTTP codes
+use constant HTTP_CODE_OK => 200;
+use constant HTTP_CODE_PAGE_NOT_FOUND => 404;
+
+sub clean_filename {
+ my $filename = shift;
+ $filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
+ # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
+ # Do a variant of URL-encoding, i.e. looks like URL-encoding,
+ # but with _ added to prevent MediaWiki from thinking this is
+ # an actual special character.
+ $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
+ # If we use the uri escape before
+ # we should unescape here, before anything
+
+ return $filename;
+}
+
+sub smudge_filename {
+ my $filename = shift;
+ $filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g;
+ $filename =~ s/ /_/g;
+ # Decode forbidden characters encoded in clean_filename
+ $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf('%c', hex($1))/ge;
+ return $filename;
+}
+
+sub connect_maybe {
+ my $wiki = shift;
+ if ($wiki) {
+ return $wiki;
+ }
+
+ my $remote_name = shift;
+ my $remote_url = shift;
+ my ($wiki_login, $wiki_password, $wiki_domain);
+
+ $wiki_login = Git::config("remote.${remote_name}.mwLogin");
+ $wiki_password = Git::config("remote.${remote_name}.mwPassword");
+ $wiki_domain = Git::config("remote.${remote_name}.mwDomain");
+
+ $wiki = MediaWiki::API->new;
+ $wiki->{config}->{api_url} = "${remote_url}/api.php";
+ if ($wiki_login) {
+ my %credential = (
+ 'url' => $remote_url,
+ 'username' => $wiki_login,
+ 'password' => $wiki_password
+ );
+ Git::credential(\%credential);
+ my $request = {lgname => $credential{username},
+ lgpassword => $credential{password},
+ lgdomain => $wiki_domain};
+ if ($wiki->login($request)) {
+ Git::credential(\%credential, 'approve');
+ print {*STDERR} qq(Logged in mediawiki user "$credential{username}".\n);
+ } else {
+ print {*STDERR} qq(Failed to log in mediawiki user "$credential{username}" on ${remote_url}\n);
+ print {*STDERR} ' (error ' .
+ $wiki->{error}->{code} . ': ' .
+ $wiki->{error}->{details} . ")\n";
+ Git::credential(\%credential, 'reject');
+ exit 1;
+ }
+ }
+
+ return $wiki;
+}
+
+1; # Famous last words
diff --git a/contrib/mw-to-git/Makefile b/contrib/mw-to-git/Makefile
index f14971987c..f206f9655b 100644
--- a/contrib/mw-to-git/Makefile
+++ b/contrib/mw-to-git/Makefile
@@ -2,16 +2,51 @@
# Copyright (C) 2013
# Matthieu Moy <Matthieu.Moy@imag.fr>
#
-## Build git-remote-mediawiki
+# To build and test:
+#
+# make
+# bin-wrapper/git mw preview Some_page.mw
+# bin-wrapper/git clone mediawiki::http://example.com/wiki/
+#
+# To install, run Git's toplevel 'make install' then run:
+#
+# make install
+GIT_MEDIAWIKI_PM=Git/Mediawiki.pm
SCRIPT_PERL=git-remote-mediawiki.perl
+SCRIPT_PERL+=git-mw.perl
GIT_ROOT_DIR=../..
HERE=contrib/mw-to-git/
SCRIPT_PERL_FULL=$(patsubst %,$(HERE)/%,$(SCRIPT_PERL))
+INSTLIBDIR=$(shell $(MAKE) -C $(GIT_ROOT_DIR)/perl \
+ -s --no-print-directory instlibdir)
all: build
-build install clean:
- $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL=$(SCRIPT_PERL_FULL) \
- $@-perl-script
+test: all
+ $(MAKE) -C t
+
+check: perlcritic test
+
+install_pm:
+ install $(GIT_MEDIAWIKI_PM) $(INSTLIBDIR)/$(GIT_MEDIAWIKI_PM)
+
+build:
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+ build-perl-script
+
+install: install_pm
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+ install-perl-script
+
+clean:
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+ clean-perl-script
+ rm $(INSTLIBDIR)/$(GIT_MEDIAWIKI_PM)
+
+perlcritic:
+ perlcritic -5 $(SCRIPT_PERL)
+ -perlcritic -2 $(SCRIPT_PERL)
+
+.PHONY: all test check install_pm install clean perlcritic
diff --git a/contrib/mw-to-git/bin-wrapper/git b/contrib/mw-to-git/bin-wrapper/git
new file mode 100755
index 0000000000..6663ae57e8
--- /dev/null
+++ b/contrib/mw-to-git/bin-wrapper/git
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# git executable wrapper script for Git-Mediawiki to run tests without
+# installing all the scripts and perl packages.
+
+GIT_ROOT_DIR=../../..
+GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd ${GIT_ROOT_DIR} && pwd)
+
+GITPERLLIB="$GIT_EXEC_PATH"'/contrib/mw-to-git'"${GITPERLLIB:+:$GITPERLLIB}"
+PATH="$GIT_EXEC_PATH"'/contrib/mw-to-git:'"$PATH"
+
+export GITPERLLIB PATH
+
+exec "${GIT_EXEC_PATH}/bin-wrappers/git" "$@"
diff --git a/contrib/mw-to-git/git-mw.perl b/contrib/mw-to-git/git-mw.perl
new file mode 100755
index 0000000000..28df3ee321
--- /dev/null
+++ b/contrib/mw-to-git/git-mw.perl
@@ -0,0 +1,368 @@
+#!/usr/bin/perl
+
+# Copyright (C) 2013
+# Benoit Person <benoit.person@ensimag.imag.fr>
+# Celestin Matte <celestin.matte@ensimag.imag.fr>
+# License: GPL v2 or later
+
+# Set of tools for git repo with a mediawiki remote.
+# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
+
+use strict;
+use warnings;
+
+use Getopt::Long;
+use URI::URL qw(url);
+use LWP::UserAgent;
+use HTML::TreeBuilder;
+
+use Git;
+use MediaWiki::API;
+use Git::Mediawiki qw(clean_filename connect_maybe
+ EMPTY HTTP_CODE_PAGE_NOT_FOUND);
+
+# By default, use UTF-8 to communicate with Git and the user
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
+
+# Global parameters
+my $verbose = 0;
+sub v_print {
+ if ($verbose) {
+ return print {*STDERR} @_;
+ }
+ return;
+}
+
+# Preview parameters
+my $file_name = EMPTY;
+my $remote_name = EMPTY;
+my $preview_file_name = EMPTY;
+my $autoload = 0;
+sub file {
+ $file_name = shift;
+ return $file_name;
+}
+
+my %commands = (
+ 'help' =>
+ [\&help, {}, \&help],
+ 'preview' =>
+ [\&preview, {
+ '<>' => \&file,
+ 'output|o=s' => \$preview_file_name,
+ 'remote|r=s' => \$remote_name,
+ 'autoload|a' => \$autoload
+ }, \&preview_help]
+);
+
+# Search for sub-command
+my $cmd = $commands{'help'};
+for (0..@ARGV-1) {
+ if (defined $commands{$ARGV[$_]}) {
+ $cmd = $commands{$ARGV[$_]};
+ splice @ARGV, $_, 1;
+ last;
+ }
+};
+GetOptions( %{$cmd->[1]},
+ 'help|h' => \&{$cmd->[2]},
+ 'verbose|v' => \$verbose);
+
+# Launch command
+&{$cmd->[0]};
+
+############################# Preview Functions ################################
+
+sub preview_help {
+ print {*STDOUT} <<'END';
+USAGE: git mw preview [--remote|-r <remote name>] [--autoload|-a]
+ [--output|-o <output filename>] [--verbose|-v]
+ <blob> | <filename>
+
+DESCRIPTION:
+Preview is an utiliy to preview local content of a mediawiki repo as if it was
+pushed on the remote.
+
+For that, preview searches for the remote name of the current branch's
+upstream if --remote is not set. If that remote is not found or if it
+is not a mediawiki, it lists all mediawiki remotes configured and asks
+you to replay your command with the --remote option set properly.
+
+Then, it searches for a file named 'filename'. If it's not found in
+the current dir, it will assume it's a blob.
+
+The content retrieved in the file (or in the blob) will then be parsed
+by the remote mediawiki and combined with a template retrieved from
+the mediawiki.
+
+Finally, preview will save the HTML result in a file. and autoload it
+in your default web browser if the option --autoload is present.
+
+OPTIONS:
+ -r <remote name>, --remote <remote name>
+ If the remote is a mediawiki, the template and the parse engine
+ used for the preview will be those of that remote.
+ If not, a list of valid remotes will be shown.
+
+ -a, --autoload
+ Try to load the HTML output in a new tab (or new window) of your
+ default web browser.
+
+ -o <output filename>, --output <output filename>
+ Change the HTML output filename. Default filename is based on the
+ input filename with its extension replaced by '.html'.
+
+ -v, --verbose
+ Show more information on what's going on under the hood.
+END
+ exit;
+}
+
+sub preview {
+ my $wiki;
+ my ($remote_url, $wiki_page_name);
+ my ($new_content, $template);
+ my $file_content;
+
+ if ($file_name eq EMPTY) {
+ die "Missing file argument, see `git mw help`\n";
+ }
+
+ v_print("### Selecting remote\n");
+ if ($remote_name eq EMPTY) {
+ $remote_name = find_upstream_remote_name();
+ if ($remote_name) {
+ $remote_url = mediawiki_remote_url_maybe($remote_name);
+ }
+
+ if (! $remote_url) {
+ my @valid_remotes = find_mediawiki_remotes();
+
+ if ($#valid_remotes == 0) {
+ print {*STDERR} "No mediawiki remote in this repo. \n";
+ exit 1;
+ } else {
+ my $remotes_list = join("\n\t", @valid_remotes);
+ print {*STDERR} <<"MESSAGE";
+There are multiple mediawiki remotes, which of:
+ ${remotes_list}
+do you want ? Use the -r option to specify the remote.
+MESSAGE
+ }
+
+ exit 1;
+ }
+ } else {
+ if (!is_valid_remote($remote_name)) {
+ die "${remote_name} is not a remote\n";
+ }
+
+ $remote_url = mediawiki_remote_url_maybe($remote_name);
+ if (! $remote_url) {
+ die "${remote_name} is not a mediawiki remote\n";
+ }
+ }
+ v_print("selected remote:\n\tname: ${remote_name}\n\turl: ${remote_url}\n");
+
+ $wiki = connect_maybe($wiki, $remote_name, $remote_url);
+
+ # Read file content
+ if (! -e $file_name) {
+ $file_content = git_cmd_try {
+ Git::command('cat-file', 'blob', $file_name); }
+ "%s failed w/ code %d";
+
+ if ($file_name =~ /(.+):(.+)/) {
+ $file_name = $2;
+ }
+ } else {
+ open my $read_fh, "<", $file_name
+ or die "could not open ${file_name}: $!\n";
+ $file_content = do { local $/ = undef; <$read_fh> };
+ close $read_fh
+ or die "unable to close: $!\n";
+ }
+
+ v_print("### Retrieving template\n");
+ ($wiki_page_name = clean_filename($file_name)) =~ s/\.[^.]+$//;
+ $template = get_template($remote_url, $wiki_page_name);
+
+ v_print("### Parsing local content\n");
+ $new_content = $wiki->api({
+ action => 'parse',
+ text => $file_content,
+ title => $wiki_page_name
+ }, {
+ skip_encoding => 1
+ }) or die "No response from remote mediawiki\n";
+ $new_content = $new_content->{'parse'}->{'text'}->{'*'};
+
+ v_print("### Merging contents\n");
+ if ($preview_file_name eq EMPTY) {
+ ($preview_file_name = $file_name) =~ s/\.[^.]+$/.html/;
+ }
+ open(my $save_fh, '>:encoding(UTF-8)', $preview_file_name)
+ or die "Could not open: $!\n";
+ print {$save_fh} merge_contents($template, $new_content, $remote_url);
+ close($save_fh)
+ or die "Could not close: $!\n";
+
+ v_print("### Results\n");
+ if ($autoload) {
+ v_print("Launching browser w/ file: ${preview_file_name}");
+ system('git', 'web--browse', $preview_file_name);
+ } else {
+ print {*STDERR} "Preview file saved as: ${preview_file_name}\n";
+ }
+
+ exit;
+}
+
+# uses global scope variable: $remote_name
+sub merge_contents {
+ my $template = shift;
+ my $content = shift;
+ my $remote_url = shift;
+ my ($content_tree, $html_tree, $mw_content_text);
+ my $template_content_id = 'bodyContent';
+
+ $html_tree = HTML::TreeBuilder->new;
+ $html_tree->parse($template);
+
+ $content_tree = HTML::TreeBuilder->new;
+ $content_tree->parse($content);
+
+ $template_content_id = Git::config("remote.${remote_name}.mwIDcontent")
+ || $template_content_id;
+ v_print("Using '${template_content_id}' as the content ID\n");
+
+ $mw_content_text = $html_tree->look_down('id', $template_content_id);
+ if (!defined $mw_content_text) {
+ print {*STDERR} <<"CONFIG";
+Could not combine the new content with the template. You might want to
+configure `mediawiki.IDContent` in your config:
+ git config --add remote.${remote_name}.mwIDcontent <id>
+and re-run the command afterward.
+CONFIG
+ exit 1;
+ }
+ $mw_content_text->delete_content();
+ $mw_content_text->push_content($content_tree);
+
+ make_links_absolute($html_tree, $remote_url);
+
+ return $html_tree->as_HTML;
+}
+
+sub make_links_absolute {
+ my $html_tree = shift;
+ my $remote_url = shift;
+ for (@{ $html_tree->extract_links() }) {
+ my ($link, $element, $attr) = @{ $_ };
+ my $url = url($link)->canonical;
+ if ($url !~ /#/) {
+ $element->attr($attr, URI->new_abs($url, $remote_url));
+ }
+ }
+ return $html_tree;
+}
+
+sub is_valid_remote {
+ my $remote = shift;
+ my @remotes = git_cmd_try {
+ Git::command('remote') }
+ "%s failed w/ code %d";
+ my $found_remote = 0;
+ foreach my $remote (@remotes) {
+ if ($remote eq $remote) {
+ $found_remote = 1;
+ last;
+ }
+ }
+ return $found_remote;
+}
+
+sub find_mediawiki_remotes {
+ my @remotes = git_cmd_try {
+ Git::command('remote'); }
+ "%s failed w/ code %d";
+ my $remote_url;
+ my @valid_remotes = ();
+ foreach my $remote (@remotes) {
+ $remote_url = mediawiki_remote_url_maybe($remote);
+ if ($remote_url) {
+ push(@valid_remotes, $remote);
+ }
+ }
+ return @valid_remotes;
+}
+
+sub find_upstream_remote_name {
+ my $current_branch = git_cmd_try {
+ Git::command_oneline('symbolic-ref', '--short', 'HEAD') }
+ "%s failed w/ code %d";
+ return Git::config("branch.${current_branch}.remote");
+}
+
+sub mediawiki_remote_url_maybe {
+ my $remote = shift;
+
+ # Find remote url
+ my $remote_url = Git::config("remote.${remote}.url");
+ if ($remote_url =~ s/mediawiki::(.*)/$1/) {
+ return url($remote_url)->canonical;
+ }
+
+ return;
+}
+
+sub get_template {
+ my $url = shift;
+ my $page_name = shift;
+ my ($req, $res, $code, $url_after);
+
+ $req = LWP::UserAgent->new;
+ if ($verbose) {
+ $req->show_progress(1);
+ }
+
+ $res = $req->get("${url}/index.php?title=${page_name}");
+ if (!$res->is_success) {
+ $code = $res->code;
+ $url_after = $res->request()->uri(); # resolve all redirections
+ if ($code == HTTP_CODE_PAGE_NOT_FOUND) {
+ if ($verbose) {
+ print {*STDERR} <<"WARNING";
+Warning: Failed to retrieve '$page_name'. Create it on the mediawiki if you want
+all the links to work properly.
+Trying to use the mediawiki homepage as a fallback template ...
+WARNING
+ }
+
+ # LWP automatically redirects GET request
+ $res = $req->get("${url}/index.php");
+ if (!$res->is_success) {
+ $url_after = $res->request()->uri(); # resolve all redirections
+ die "Failed to get homepage @ ${url_after} w/ code ${code}\n";
+ }
+ } else {
+ die "Failed to get '${page_name}' @ ${url_after} w/ code ${code}\n";
+ }
+ }
+
+ return $res->decoded_content;
+}
+
+############################## Help Functions ##################################
+
+sub help {
+ print {*STDOUT} <<'END';
+usage: git mw <command> <args>
+
+git mw commands are:
+ help Display help information about git mw
+ preview Parse and render local file into HTML
+END
+ exit;
+}
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
index 9c14c1f88d..3f8d993afa 100755
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -13,19 +13,17 @@
use strict;
use MediaWiki::API;
+use Git;
+use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
+ EMPTY HTTP_CODE_OK);
use DateTime::Format::ISO8601;
+use warnings;
# By default, use UTF-8 to communicate with Git and the user
-binmode STDERR, ":utf8";
-binmode STDOUT, ":utf8";
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
use URI::Escape;
-use IPC::Open2;
-
-use warnings;
-
-# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
-use constant SLASH_REPLACEMENT => "%2F";
# It's not always possible to delete pages (may require some
# privileges). Deleted pages are replaced with this content.
@@ -36,45 +34,57 @@ use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
use constant EMPTY_CONTENT => "<!-- empty page -->\n";
# used to reflect file creation or deletion in diff.
-use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
# Used on Git's side to reflect empty edit messages on the wiki
use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+# Number of pages taken into account at once in submodule get_mw_page_list
+use constant SLICE_SIZE => 50;
+
+# Number of linked mediafile to get at once in get_linked_mediafiles
+# The query is split in small batches because of the MW API limit of
+# the number of links to be returned (500 links max).
+use constant BATCH_SIZE => 10;
+
+if (@ARGV != 2) {
+ exit_error_usage();
+}
+
my $remotename = $ARGV[0];
my $url = $ARGV[1];
# Accept both space-separated and multiple keys in config file.
# Spaces should be written as _ anyway because we'll use chomp.
-my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
+my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
chomp(@tracked_pages);
# Just like @tracked_pages, but for MediaWiki categories.
-my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
+my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
chomp(@tracked_categories);
# Import media files on pull
-my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
+my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
chomp($import_media);
-$import_media = ($import_media eq "true");
+$import_media = ($import_media eq 'true');
# Export media files on push
-my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
+my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
chomp($export_media);
-$export_media = !($export_media eq "false");
+$export_media = !($export_media eq 'false');
-my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
+my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
# Note: mwPassword is discourraged. Use the credential system instead.
-my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
-my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
+my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
+my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
chomp($wiki_login);
chomp($wiki_passwd);
chomp($wiki_domain);
# Import only last revisions (both for clone and fetch)
-my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
+my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
chomp($shallow_import);
-$shallow_import = ($shallow_import eq "true");
+$shallow_import = ($shallow_import eq 'true');
# Fetch (clone and pull) by revisions instead of by pages. This behavior
# is more efficient when we have a wiki with lots of pages and we fetch
@@ -82,15 +92,18 @@ $shallow_import = ($shallow_import eq "true");
# Possible values:
# - by_rev: perform one query per new revision on the remote wiki
# - by_page: query each tracked page for new revision
-my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
-unless ($fetch_strategy) {
- $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
+my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
+if (!$fetch_strategy) {
+ $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
}
chomp($fetch_strategy);
-unless ($fetch_strategy) {
- $fetch_strategy = "by_page";
+if (!$fetch_strategy) {
+ $fetch_strategy = 'by_page';
}
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
# Dumb push: don't update notes and mediawiki ref to reflect the last push.
#
# Configurable with mediawiki.dumbPush, or per-remote with
@@ -105,48 +118,25 @@ unless ($fetch_strategy) {
# will get the history with information lost). If the import is
# deterministic, this means everybody gets the same sha1 for each
# MediaWiki revision.
-my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
-unless ($dumb_push) {
- $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
+my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
+if (!$dumb_push) {
+ $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
}
chomp($dumb_push);
-$dumb_push = ($dumb_push eq "true");
+$dumb_push = ($dumb_push eq 'true');
my $wiki_name = $url;
-$wiki_name =~ s/[^\/]*:\/\///;
+$wiki_name =~ s{[^/]*://}{};
# If URL is like http://user:password@example.com/, we clearly don't
# want the password in $wiki_name. While we're there, also remove user
# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
$wiki_name =~ s/^.*@//;
# Commands parser
-my $entry;
-my @cmd;
while (<STDIN>) {
chomp;
- @cmd = split(/ /);
- if (defined($cmd[0])) {
- # Line not blank
- if ($cmd[0] eq "capabilities") {
- die("Too many arguments for capabilities") unless (!defined($cmd[1]));
- mw_capabilities();
- } elsif ($cmd[0] eq "list") {
- die("Too many arguments for list") unless (!defined($cmd[2]));
- mw_list($cmd[1]);
- } elsif ($cmd[0] eq "import") {
- die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
- mw_import($cmd[1]);
- } elsif ($cmd[0] eq "option") {
- die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
- mw_option($cmd[1],$cmd[2]);
- } elsif ($cmd[0] eq "push") {
- mw_push($cmd[1]);
- } else {
- print STDERR "Unknown command. Aborting...\n";
- last;
- }
- } else {
- # blank line: we should terminate
+
+ if (!parse_command($_)) {
last;
}
@@ -156,107 +146,91 @@ while (<STDIN>) {
########################## Functions ##############################
-## credential API management (generic functions)
-
-sub credential_read {
- my %credential;
- my $reader = shift;
- my $op = shift;
- while (<$reader>) {
- my ($key, $value) = /([^=]*)=(.*)/;
- if (not defined $key) {
- die "ERROR receiving response from git credential $op:\n$_\n";
- }
- $credential{$key} = $value;
- }
- return %credential;
+## error handling
+sub exit_error_usage {
+ die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
+ "parameters\n" .
+ "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
+ "module directly.\n" .
+ "This module can be used the following way:\n" .
+ "\tgit clone mediawiki://<address of a mediawiki>\n" .
+ "Then, use git commit, push and pull as with every normal git repository.\n";
}
-sub credential_write {
- my $credential = shift;
- my $writer = shift;
- # url overwrites other fields, so it must come first
- print $writer "url=$credential->{url}\n" if exists $credential->{url};
- while (my ($key, $value) = each(%$credential) ) {
- if (length $value && $key ne 'url') {
- print $writer "$key=$value\n";
- }
+sub parse_command {
+ my ($line) = @_;
+ my @cmd = split(/ /, $line);
+ if (!defined $cmd[0]) {
+ return 0;
}
-}
-
-sub credential_run {
- my $op = shift;
- my $credential = shift;
- my $pid = open2(my $reader, my $writer, "git credential $op");
- credential_write($credential, $writer);
- print $writer "\n";
- close($writer);
-
- if ($op eq "fill") {
- %$credential = credential_read($reader, $op);
+ if ($cmd[0] eq 'capabilities') {
+ die("Too many arguments for capabilities\n")
+ if (defined($cmd[1]));
+ mw_capabilities();
+ } elsif ($cmd[0] eq 'list') {
+ die("Too many arguments for list\n") if (defined($cmd[2]));
+ mw_list($cmd[1]);
+ } elsif ($cmd[0] eq 'import') {
+ die("Invalid argument for import\n")
+ if ($cmd[1] eq EMPTY);
+ die("Too many arguments for import\n")
+ if (defined($cmd[2]));
+ mw_import($cmd[1]);
+ } elsif ($cmd[0] eq 'option') {
+ die("Invalid arguments for option\n")
+ if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
+ die("Too many arguments for option\n")
+ if (defined($cmd[3]));
+ mw_option($cmd[1],$cmd[2]);
+ } elsif ($cmd[0] eq 'push') {
+ mw_push($cmd[1]);
} else {
- if (<$reader>) {
- die "ERROR while running git credential $op:\n$_";
- }
- }
- close($reader);
- waitpid($pid, 0);
- my $child_exit_status = $? >> 8;
- if ($child_exit_status != 0) {
- die "'git credential $op' failed with code $child_exit_status.";
+ print {*STDERR} "Unknown command. Aborting...\n";
+ return 0;
}
+ return 1;
}
# MediaWiki API instance, created lazily.
my $mediawiki;
-sub mw_connect_maybe {
- if ($mediawiki) {
- return;
- }
- $mediawiki = MediaWiki::API->new;
- $mediawiki->{config}->{api_url} = "$url/api.php";
- if ($wiki_login) {
- my %credential = (url => $url);
- $credential{username} = $wiki_login;
- $credential{password} = $wiki_passwd;
- credential_run("fill", \%credential);
- my $request = {lgname => $credential{username},
- lgpassword => $credential{password},
- lgdomain => $wiki_domain};
- if ($mediawiki->login($request)) {
- credential_run("approve", \%credential);
- print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
- } else {
- print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
- print STDERR " (error " .
- $mediawiki->{error}->{code} . ': ' .
- $mediawiki->{error}->{details} . ")\n";
- credential_run("reject", \%credential);
- exit 1;
- }
+sub fatal_mw_error {
+ my $action = shift;
+ print STDERR "fatal: could not $action.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ if ($url =~ /^https/) {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
+ print STDERR "fatal: and the SSL certificate is correct.\n";
+ } else {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
}
+ print STDERR "fatal: (error " .
+ $mediawiki->{error}->{code} . ': ' .
+ $mediawiki->{error}->{details} . ")\n";
+ exit 1;
}
## Functions for listing pages on the remote wiki
sub get_mw_tracked_pages {
my $pages = shift;
get_mw_page_list(\@tracked_pages, $pages);
+ return;
}
sub get_mw_page_list {
my $page_list = shift;
my $pages = shift;
- my @some_pages = @$page_list;
+ my @some_pages = @{$page_list};
while (@some_pages) {
- my $last = 50;
- if ($#some_pages < $last) {
- $last = $#some_pages;
+ my $last_page = SLICE_SIZE;
+ if ($#some_pages < $last_page) {
+ $last_page = $#some_pages;
}
- my @slice = @some_pages[0..$last];
+ my @slice = @some_pages[0..$last_page];
get_mw_first_pages(\@slice, $pages);
- @some_pages = @some_pages[51..$#some_pages];
+ @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
}
+ return;
}
sub get_mw_tracked_categories {
@@ -266,7 +240,7 @@ sub get_mw_tracked_categories {
# Mediawiki requires the Category
# prefix, but let's not force the user
# to specify it.
- $category = "Category:" . $category;
+ $category = "Category:${category}";
}
my $mw_pages = $mediawiki->list( {
action => 'query',
@@ -274,11 +248,12 @@ sub get_mw_tracked_categories {
cmtitle => $category,
cmlimit => 'max' } )
|| die $mediawiki->{error}->{code} . ': '
- . $mediawiki->{error}->{details};
+ . $mediawiki->{error}->{details} . "\n";
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
}
+ return;
}
sub get_mw_all_pages {
@@ -290,14 +265,12 @@ sub get_mw_all_pages {
aplimit => 'max'
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of wiki pages.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
- exit 1;
+ fatal_mw_error("get the list of wiki pages");
}
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
+ return;
}
# queries the wiki for a set of pages. Meant to be used within a loop
@@ -316,25 +289,23 @@ sub get_mw_first_pages {
titles => $titles,
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not query the list of wiki pages.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
- exit 1;
+ fatal_mw_error("query the list of wiki pages");
}
while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
if ($id < 0) {
- print STDERR "Warning: page $page->{title} not found on wiki\n";
+ print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
} else {
$pages->{$page->{title}} = $page;
}
}
+ return;
}
# Get the list of pages to be fetched according to configuration.
sub get_mw_pages {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
- print STDERR "Listing pages on remote wiki...\n";
+ print {*STDERR} "Listing pages on remote wiki...\n";
my %pages; # hash on page titles to avoid duplicates
my $user_defined;
@@ -352,14 +323,14 @@ sub get_mw_pages {
get_mw_all_pages(\%pages);
}
if ($import_media) {
- print STDERR "Getting media files for selected pages...\n";
+ print {*STDERR} "Getting media files for selected pages...\n";
if ($user_defined) {
get_linked_mediafiles(\%pages);
} else {
get_all_mediafiles(\%pages);
}
}
- print STDERR (scalar keys %pages) . " pages found.\n";
+ print {*STDERR} (scalar keys %pages) . " pages found.\n";
return %pages;
}
@@ -367,9 +338,13 @@ sub get_mw_pages {
# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
sub run_git {
my $args = shift;
- my $encoding = (shift || "encoding(UTF-8)");
- open(my $git, "-|:$encoding", "git " . $args);
- my $res = do { local $/; <$git> };
+ my $encoding = (shift || 'encoding(UTF-8)');
+ open(my $git, "-|:${encoding}", "git ${args}")
+ or die "Unable to fork: $!\n";
+ my $res = do {
+ local $/ = undef;
+ <$git>
+ };
close($git);
return $res;
@@ -384,27 +359,26 @@ sub get_all_mediafiles {
my $mw_pages = $mediawiki->list({
action => 'query',
list => 'allpages',
- apnamespace => get_mw_namespace_id("File"),
+ apnamespace => get_mw_namespace_id('File'),
aplimit => 'max'
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of pages for media files.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ print {*STDERR} "fatal: could not get the list of pages for media files.\n";
+ print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
+ print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
exit 1;
}
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
+ return;
}
sub get_linked_mediafiles {
my $pages = shift;
- my @titles = map $_->{title}, values(%{$pages});
+ my @titles = map { $_->{title} } values(%{$pages});
- # The query is split in small batches because of the MW API limit of
- # the number of links to be returned (500 links max).
- my $batch = 10;
+ my $batch = BATCH_SIZE;
while (@titles) {
if ($#titles < $batch) {
$batch = $#titles;
@@ -420,7 +394,7 @@ sub get_linked_mediafiles {
action => 'query',
prop => 'links|images',
titles => $mw_titles,
- plnamespace => get_mw_namespace_id("File"),
+ plnamespace => get_mw_namespace_id('File'),
pllimit => 'max'
};
my $result = $mediawiki->api($query);
@@ -428,11 +402,13 @@ sub get_linked_mediafiles {
while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
my @media_titles;
if (defined($page->{links})) {
- my @link_titles = map $_->{title}, @{$page->{links}};
+ my @link_titles
+ = map { $_->{title} } @{$page->{links}};
push(@media_titles, @link_titles);
}
if (defined($page->{images})) {
- my @image_titles = map $_->{title}, @{$page->{images}};
+ my @image_titles
+ = map { $_->{title} } @{$page->{images}};
push(@media_titles, @image_titles);
}
if (@media_titles) {
@@ -442,6 +418,7 @@ sub get_linked_mediafiles {
@titles = @titles[($batch+1)..$#titles];
}
+ return;
}
sub get_mw_mediafile_for_page_revision {
@@ -455,7 +432,7 @@ sub get_mw_mediafile_for_page_revision {
my $query = {
action => 'query',
prop => 'imageinfo',
- titles => "File:" . $filename,
+ titles => "File:${filename}",
iistart => $timestamp,
iiend => $timestamp,
iiprop => 'timestamp|archivename|url',
@@ -473,53 +450,50 @@ sub get_mw_mediafile_for_page_revision {
$mediafile{timestamp} = $fileinfo->{timestamp};
# Mediawiki::API's download function doesn't support https URLs
# and can't download old versions of files.
- print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+ print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
$mediafile{content} = download_mw_mediafile($fileinfo->{url});
}
return %mediafile;
}
sub download_mw_mediafile {
- my $url = shift;
+ my $download_url = shift;
- my $response = $mediawiki->{ua}->get($url);
- if ($response->code == 200) {
+ my $response = $mediawiki->{ua}->get($download_url);
+ if ($response->code == HTTP_CODE_OK) {
return $response->decoded_content;
} else {
- print STDERR "Error downloading mediafile from :\n";
- print STDERR "URL: $url\n";
- print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
+ print {*STDERR} "Error downloading mediafile from :\n";
+ print {*STDERR} "URL: ${download_url}\n";
+ print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
exit 1;
}
}
sub get_last_local_revision {
# Get note regarding last mediawiki revision
- my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
+ my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
my @note_info = split(/ /, $note);
my $lastrevision_number;
- if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
- print STDERR "No previous mediawiki revision found";
+ if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
+ print {*STDERR} 'No previous mediawiki revision found';
$lastrevision_number = 0;
} else {
# Notes are formatted : mediawiki_revision: #number
$lastrevision_number = $note_info[1];
chomp($lastrevision_number);
- print STDERR "Last local mediawiki revision found is $lastrevision_number";
+ print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
}
return $lastrevision_number;
}
-# Remember the timestamp corresponding to a revision id.
-my %basetimestamps;
-
# Get the last remote revision without taking in account which pages are
# tracked or not. This function makes a single request to the wiki thus
# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
# option.
sub get_last_global_remote_rev {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $query = {
action => 'query',
@@ -535,14 +509,14 @@ sub get_last_global_remote_rev {
# Get the last remote revision concerning the tracked pages and the tracked
# categories.
sub get_last_remote_revision {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my %pages_hash = get_mw_pages();
my @pages = values(%pages_hash);
my $max_rev_num = 0;
- print STDERR "Getting last revision id on tracked pages...\n";
+ print {*STDERR} "Getting last revision id on tracked pages...\n";
foreach my $page (@pages) {
my $id = $page->{pageid};
@@ -563,7 +537,7 @@ sub get_last_remote_revision {
$max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
}
- print STDERR "Last remote revision found is $max_rev_num.\n";
+ print {*STDERR} "Last remote revision found is $max_rev_num.\n";
return $max_rev_num;
}
@@ -574,7 +548,7 @@ sub mediawiki_clean {
# Mediawiki does not allow blank space at the end of a page and ends with a single \n.
# This function right trims a string and adds a \n at the end to follow this rule
$string =~ s/\s+$//;
- if ($string eq "" && $page_created) {
+ if ($string eq EMPTY && $page_created) {
# Creating empty pages is forbidden.
$string = EMPTY_CONTENT;
}
@@ -585,38 +559,16 @@ sub mediawiki_clean {
sub mediawiki_smudge {
my $string = shift;
if ($string eq EMPTY_CONTENT) {
- $string = "";
+ $string = EMPTY;
}
# This \n is important. This is due to mediawiki's way to handle end of files.
- return $string."\n";
-}
-
-sub mediawiki_clean_filename {
- my $filename = shift;
- $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
- # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
- # Do a variant of URL-encoding, i.e. looks like URL-encoding,
- # but with _ added to prevent MediaWiki from thinking this is
- # an actual special character.
- $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
- # If we use the uri escape before
- # we should unescape here, before anything
-
- return $filename;
-}
-
-sub mediawiki_smudge_filename {
- my $filename = shift;
- $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
- $filename =~ s/ /_/g;
- # Decode forbidden characters encoded in mediawiki_clean_filename
- $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
- return $filename;
+ return "${string}\n";
}
sub literal_data {
my ($content) = @_;
- print STDOUT "data ", bytes::length($content), "\n", $content;
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ return;
}
sub literal_data_raw {
@@ -624,33 +576,40 @@ sub literal_data_raw {
my ($content) = @_;
# Avoid confusion between size in bytes and in characters
utf8::downgrade($content);
- binmode STDOUT, ":raw";
- print STDOUT "data ", bytes::length($content), "\n", $content;
- binmode STDOUT, ":utf8";
+ binmode STDOUT, ':raw';
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ binmode STDOUT, ':encoding(UTF-8)';
+ return;
}
sub mw_capabilities {
# Revisions are imported to the private namespace
# refs/mediawiki/$remotename/ by the helper and fetched into
# refs/remotes/$remotename later by fetch.
- print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
- print STDOUT "import\n";
- print STDOUT "list\n";
- print STDOUT "push\n";
- print STDOUT "\n";
+ print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
+ print {*STDOUT} "import\n";
+ print {*STDOUT} "list\n";
+ print {*STDOUT} "push\n";
+ if ($dumb_push) {
+ print {*STDOUT} "no-private-update\n";
+ }
+ print {*STDOUT} "\n";
+ return;
}
sub mw_list {
# MediaWiki do not have branches, we consider one branch arbitrarily
# called master, and HEAD pointing to it.
- print STDOUT "? refs/heads/master\n";
- print STDOUT "\@refs/heads/master HEAD\n";
- print STDOUT "\n";
+ print {*STDOUT} "? refs/heads/master\n";
+ print {*STDOUT} "\@refs/heads/master HEAD\n";
+ print {*STDOUT} "\n";
+ return;
}
sub mw_option {
- print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
- print STDOUT "unsupported\n";
+ print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
+ print {*STDOUT} "unsupported\n";
+ return;
}
sub fetch_mw_revisions_for_page {
@@ -666,6 +625,9 @@ sub fetch_mw_revisions_for_page {
rvstartid => $fetch_from,
rvlimit => 500,
pageids => $id,
+
+ # Let MediaWiki know that we support the latest API.
+ continue => '',
};
my $revnum = 0;
@@ -681,15 +643,22 @@ sub fetch_mw_revisions_for_page {
push(@page_revs, $page_rev_ids);
$revnum++;
}
- last unless $result->{'query-continue'};
- $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
+
+ if ($result->{'query-continue'}) { # For legacy APIs
+ $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
+ } elsif ($result->{continue}) { # For newer APIs
+ $query->{rvstartid} = $result->{continue}->{rvcontinue};
+ $query->{continue} = $result->{continue}->{continue};
+ } else {
+ last;
+ }
}
if ($shallow_import && @page_revs) {
- print STDERR " Found 1 revision (shallow import).\n";
+ print {*STDERR} " Found 1 revision (shallow import).\n";
@page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
return $page_revs[0];
}
- print STDERR " Found ", $revnum, " revision(s).\n";
+ print {*STDERR} " Found ${revnum} revision(s).\n";
return @page_revs;
}
@@ -701,8 +670,7 @@ sub fetch_mw_revisions {
my $n = 1;
foreach my $page (@pages) {
my $id = $page->{pageid};
-
- print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
+ print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
$n++;
my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
@revisions = (@page_revs, @revisions);
@@ -716,7 +684,7 @@ sub fe_escape_path {
$path =~ s/\\/\\\\/g;
$path =~ s/"/\\"/g;
$path =~ s/\n/\\n/g;
- return '"' . $path . '"';
+ return qq("${path}");
}
sub import_file_revision {
@@ -736,42 +704,43 @@ sub import_file_revision {
my $author = $commit{author};
my $date = $commit{date};
- print STDOUT "commit refs/mediawiki/$remotename/master\n";
- print STDOUT "mark :$n\n";
- print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
+ print {*STDOUT} "mark :${n}\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
literal_data($comment);
# If it's not a clone, we need to know where to start from
if (!$full_import && $n == 1) {
- print STDOUT "from refs/mediawiki/$remotename/master^0\n";
+ print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
}
if ($content ne DELETED_CONTENT) {
- print STDOUT "M 644 inline " .
- fe_escape_path($title . ".mw") . "\n";
+ print {*STDOUT} 'M 644 inline ' .
+ fe_escape_path("${title}.mw") . "\n";
literal_data($content);
if (%mediafile) {
- print STDOUT "M 644 inline "
+ print {*STDOUT} 'M 644 inline '
. fe_escape_path($mediafile{title}) . "\n";
literal_data_raw($mediafile{content});
}
- print STDOUT "\n\n";
+ print {*STDOUT} "\n\n";
} else {
- print STDOUT "D " . fe_escape_path($title . ".mw") . "\n";
+ print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
}
# mediawiki revision number in the git note
if ($full_import && $n == 1) {
- print STDOUT "reset refs/notes/$remotename/mediawiki\n";
+ print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
}
- print STDOUT "commit refs/notes/$remotename/mediawiki\n";
- print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
- literal_data("Note added by git-mediawiki during import");
+ print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+ literal_data('Note added by git-mediawiki during import');
if (!$full_import && $n == 1) {
- print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
+ print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
}
- print STDOUT "N inline :$n\n";
- literal_data("mediawiki_revision: " . $commit{mw_revision});
- print STDOUT "\n\n";
+ print {*STDOUT} "N inline :${n}\n";
+ literal_data("mediawiki_revision: $commit{mw_revision}");
+ print {*STDOUT} "\n\n";
+ return;
}
# parse a sequence of
@@ -784,23 +753,25 @@ sub get_more_refs {
my @refs;
while (1) {
my $line = <STDIN>;
- if ($line =~ m/^$cmd (.*)$/) {
+ if ($line =~ /^$cmd (.*)$/) {
push(@refs, $1);
} elsif ($line eq "\n") {
return @refs;
} else {
- die("Invalid command in a '$cmd' batch: ". $_);
+ die("Invalid command in a '$cmd' batch: $_\n");
}
}
+ return;
}
sub mw_import {
# multiple import commands can follow each other.
- my @refs = (shift, get_more_refs("import"));
+ my @refs = (shift, get_more_refs('import'));
foreach my $ref (@refs) {
mw_import_ref($ref);
}
- print STDOUT "done\n";
+ print {*STDOUT} "done\n";
+ return;
}
sub mw_import_ref {
@@ -810,40 +781,41 @@ sub mw_import_ref {
# Since HEAD is a symbolic ref to master (by convention,
# followed by the output of the command "list" that we gave),
# we don't need to do anything in this case.
- if ($ref eq "HEAD") {
+ if ($ref eq 'HEAD') {
return;
}
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
- print STDERR "Searching revisions...\n";
+ print {*STDERR} "Searching revisions...\n";
my $last_local = get_last_local_revision();
my $fetch_from = $last_local + 1;
if ($fetch_from == 1) {
- print STDERR ", fetching from beginning.\n";
+ print {*STDERR} ", fetching from beginning.\n";
} else {
- print STDERR ", fetching from here.\n";
+ print {*STDERR} ", fetching from here.\n";
}
my $n = 0;
- if ($fetch_strategy eq "by_rev") {
- print STDERR "Fetching & writing export data by revs...\n";
+ if ($fetch_strategy eq 'by_rev') {
+ print {*STDERR} "Fetching & writing export data by revs...\n";
$n = mw_import_ref_by_revs($fetch_from);
- } elsif ($fetch_strategy eq "by_page") {
- print STDERR "Fetching & writing export data by pages...\n";
+ } elsif ($fetch_strategy eq 'by_page') {
+ print {*STDERR} "Fetching & writing export data by pages...\n";
$n = mw_import_ref_by_pages($fetch_from);
} else {
- print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
- print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
+ print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
+ print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
exit 1;
}
if ($fetch_from == 1 && $n == 0) {
- print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
# Something has to be done remote-helper side. If nothing is done, an error is
# thrown saying that HEAD is referring to unknown object 0000000000000000000
# and the clone fails.
}
+ return;
}
sub mw_import_ref_by_pages {
@@ -855,7 +827,7 @@ sub mw_import_ref_by_pages {
my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
@revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
- my @revision_ids = map $_->{revid}, @revisions;
+ my @revision_ids = map { $_->{revid} } @revisions;
return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
}
@@ -882,7 +854,7 @@ sub mw_import_revids {
my $n_actual = 0;
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
- foreach my $pagerevid (@$revision_ids) {
+ foreach my $pagerevid (@{$revision_ids}) {
# Count page even if we skip it, since we display
# $n/$total and $total includes skipped pages.
$n++;
@@ -898,7 +870,7 @@ sub mw_import_revids {
my $result = $mediawiki->api($query);
if (!$result) {
- die "Failed to retrieve modified page for revision $pagerevid";
+ die "Failed to retrieve modified page for revision $pagerevid\n";
}
if (defined($result->{query}->{badrevids}->{$pagerevid})) {
@@ -907,7 +879,7 @@ sub mw_import_revids {
}
if (!defined($result->{query}->{pages})) {
- die "Invalid revision $pagerevid.";
+ die "Invalid revision ${pagerevid}.\n";
}
my @result_pages = values(%{$result->{query}->{pages}});
@@ -917,8 +889,8 @@ sub mw_import_revids {
my $page_title = $result_page->{title};
if (!exists($pages->{$page_title})) {
- print STDERR "$n/", scalar(@$revision_ids),
- ": Skipping revision #$rev->{revid} of $page_title\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}),
+ ": Skipping revision #$rev->{revid} of ${page_title}\n";
next;
}
@@ -927,7 +899,7 @@ sub mw_import_revids {
my %commit;
$commit{author} = $rev->{user} || 'Anonymous';
$commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
- $commit{title} = mediawiki_smudge_filename($page_title);
+ $commit{title} = smudge_filename($page_title);
$commit{mw_revision} = $rev->{revid};
$commit{content} = mediawiki_smudge($rev->{'*'});
@@ -943,14 +915,14 @@ sub mw_import_revids {
my %mediafile;
if ($namespace) {
my $id = get_mw_namespace_id($namespace);
- if ($id && $id == get_mw_namespace_id("File")) {
+ if ($id && $id == get_mw_namespace_id('File')) {
%mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
}
}
# If this is a revision of the media page for new version
# of a file do one common commit for both file and media page.
# Else do commit only for that page.
- print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}
@@ -958,17 +930,17 @@ sub mw_import_revids {
}
sub error_non_fast_forward {
- my $advice = run_git("config --bool advice.pushNonFastForward");
+ my $advice = run_git('config --bool advice.pushNonFastForward');
chomp($advice);
- if ($advice ne "false") {
+ if ($advice ne 'false') {
# Native git-push would show this after the summary.
# We can't ask it to display it cleanly, so print it
# ourselves before.
- print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
- print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
- print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
+ print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
+ print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+ print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
}
- print STDOUT "error $_[0] \"non-fast-forward\"\n";
+ print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
return 0;
}
@@ -979,34 +951,34 @@ sub mw_upload_file {
my $file_deleted = shift;
my $summary = shift;
my $newrevid;
- my $path = "File:" . $complete_file_name;
+ my $path = "File:${complete_file_name}";
my %hashFiles = get_allowed_file_extensions();
if (!exists($hashFiles{$extension})) {
- print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
- print STDERR "Check the configuration of file uploads in your mediawiki.\n";
+ print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
+ print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
return $newrevid;
}
# Deleting and uploading a file requires a priviledged user
if ($file_deleted) {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $query = {
action => 'delete',
title => $path,
reason => $summary
};
if (!$mediawiki->edit($query)) {
- print STDERR "Failed to delete file on remote wiki\n";
- print STDERR "Check your permissions on the remote site. Error code:\n";
- print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+ print {*STDERR} "Failed to delete file on remote wiki\n";
+ print {*STDERR} "Check your permissions on the remote site. Error code:\n";
+ print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
exit 1;
}
} else {
# Don't let perl try to interpret file content as UTF-8 => use "raw"
- my $content = run_git("cat-file blob $new_sha1", "raw");
- if ($content ne "") {
- mw_connect_maybe();
+ my $content = run_git("cat-file blob ${new_sha1}", 'raw');
+ if ($content ne EMPTY) {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
$mediawiki->{config}->{upload_url} =
- "$url/index.php/Special:Upload";
+ "${url}/index.php/Special:Upload";
$mediawiki->edit({
action => 'upload',
filename => $complete_file_name,
@@ -1018,12 +990,12 @@ sub mw_upload_file {
}, {
skip_encoding => 1
} ) || die $mediawiki->{error}->{code} . ':'
- . $mediawiki->{error}->{details};
+ . $mediawiki->{error}->{details} . "\n";
my $last_file_page = $mediawiki->get_page({title => $path});
$newrevid = $last_file_page->{revid};
- print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
} else {
- print STDERR "Empty file $complete_file_name not pushed.\n";
+ print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
}
}
return $newrevid;
@@ -1045,24 +1017,24 @@ sub mw_push_file {
my $newrevid;
if ($summary eq EMPTY_MESSAGE) {
- $summary = '';
+ $summary = EMPTY;
}
my $new_sha1 = $diff_info_split[3];
my $old_sha1 = $diff_info_split[2];
my $page_created = ($old_sha1 eq NULL_SHA1);
my $page_deleted = ($new_sha1 eq NULL_SHA1);
- $complete_file_name = mediawiki_clean_filename($complete_file_name);
+ $complete_file_name = clean_filename($complete_file_name);
my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
if (!defined($extension)) {
- $extension = "";
+ $extension = EMPTY;
}
- if ($extension eq "mw") {
+ if ($extension eq 'mw') {
my $ns = get_mw_namespace_id_for_page($complete_file_name);
- if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
- print STDERR "Ignoring media file related page: $complete_file_name\n";
- return ($oldrevid, "ok");
+ if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
+ print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
+ return ($oldrevid, 'ok');
}
my $file_content;
if ($page_deleted) {
@@ -1072,10 +1044,10 @@ sub mw_push_file {
# with this content instead:
$file_content = DELETED_CONTENT;
} else {
- $file_content = run_git("cat-file blob $new_sha1");
+ $file_content = run_git("cat-file blob ${new_sha1}");
}
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $result = $mediawiki->edit( {
action => 'edit',
@@ -1089,49 +1061,49 @@ sub mw_push_file {
if (!$result) {
if ($mediawiki->{error}->{code} == 3) {
# edit conflicts, considered as non-fast-forward
- print STDERR 'Warning: Error ' .
+ print {*STDERR} 'Warning: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details} .
+ ' from mediawiki: ' . $mediawiki->{error}->{details} .
".\n";
- return ($oldrevid, "non-fast-forward");
+ return ($oldrevid, 'non-fast-forward');
} else {
# Other errors. Shouldn't happen => just die()
die 'Fatal: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details};
+ ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
}
}
$newrevid = $result->{edit}->{newrevid};
- print STDERR "Pushed file: $new_sha1 - $title\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
} elsif ($export_media) {
$newrevid = mw_upload_file($complete_file_name, $new_sha1,
$extension, $page_deleted,
$summary);
} else {
- print STDERR "Ignoring media file $title\n";
+ print {*STDERR} "Ignoring media file ${title}\n";
}
$newrevid = ($newrevid or $oldrevid);
- return ($newrevid, "ok");
+ return ($newrevid, 'ok');
}
sub mw_push {
# multiple push statements can follow each other
- my @refsspecs = (shift, get_more_refs("push"));
+ my @refsspecs = (shift, get_more_refs('push'));
my $pushed;
for my $refspec (@refsspecs) {
my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
- or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
+ or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
if ($force) {
- print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
+ print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
}
- if ($local eq "") {
- print STDERR "Cannot delete remote branch on a MediaWiki\n";
- print STDOUT "error $remote cannot delete\n";
+ if ($local eq EMPTY) {
+ print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} cannot delete\n";
next;
}
- if ($remote ne "refs/heads/master") {
- print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
- print STDOUT "error $remote only master allowed\n";
+ if ($remote ne 'refs/heads/master') {
+ print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} only master allowed\n";
next;
}
if (mw_push_revision($local, $remote)) {
@@ -1140,30 +1112,32 @@ sub mw_push {
}
# Notify Git that the push is done
- print STDOUT "\n";
+ print {*STDOUT} "\n";
if ($pushed && $dumb_push) {
- print STDERR "Just pushed some revisions to MediaWiki.\n";
- print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
- print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
- print STDERR "\n";
- print STDERR " git pull --rebase\n";
- print STDERR "\n";
+ print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
+ print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
+ print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
+ print {*STDERR} "\n";
+ print {*STDERR} " git pull --rebase\n";
+ print {*STDERR} "\n";
}
+ return;
}
sub mw_push_revision {
my $local = shift;
my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
my $last_local_revid = get_last_local_revision();
- print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
+ print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
my $last_remote_revid = get_last_remote_revision();
my $mw_revision = $last_remote_revid;
# Get sha1 of commit pointed by local HEAD
- my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
+ my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
+ chomp($HEAD_sha1);
# Get sha1 of commit pointed by remotes/$remotename/master
- my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
+ my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
chomp($remoteorigin_sha1);
if ($last_local_revid > 0 &&
@@ -1182,22 +1156,22 @@ sub mw_push_revision {
if ($last_local_revid > 0) {
my $parsed_sha1 = $remoteorigin_sha1;
# Find a path from last MediaWiki commit to pushed commit
- print STDERR "Computing path from local to remote ...\n";
- my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
+ print {*STDERR} "Computing path from local to remote ...\n";
+ my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
my %local_ancestry;
foreach my $line (@local_ancestry) {
- if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
- foreach my $parent (split(' ', $parents)) {
+ if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
+ foreach my $parent (split(/ /, $parents)) {
$local_ancestry{$parent} = $child;
}
- } elsif (!$line =~ m/^([a-f0-9]+)/) {
- die "Unexpected output from git rev-list: $line";
+ } elsif (!$line =~ /^([a-f0-9]+)/) {
+ die "Unexpected output from git rev-list: ${line}\n";
}
}
while ($parsed_sha1 ne $HEAD_sha1) {
my $child = $local_ancestry{$parsed_sha1};
if (!$child) {
- printf STDERR "Cannot find a path in history from remote commit to last commit\n";
+ print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
return error_non_fast_forward($remote);
}
push(@commit_pairs, [$parsed_sha1, $child]);
@@ -1206,12 +1180,12 @@ sub mw_push_revision {
} else {
# No remote mediawiki revision. Export the whole
# history (linearized with --first-parent)
- print STDERR "Warning: no common ancestor, pushing complete history\n";
- my $history = run_git("rev-list --first-parent --children $local");
- my @history = split('\n', $history);
+ print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
+ my $history = run_git("rev-list --first-parent --children ${local}");
+ my @history = split(/\n/, $history);
@history = @history[1..$#history];
foreach my $line (reverse @history) {
- my @commit_info_split = split(/ |\n/, $line);
+ my @commit_info_split = split(/[ \n]/, $line);
push(@commit_pairs, \@commit_info_split);
}
}
@@ -1219,12 +1193,12 @@ sub mw_push_revision {
foreach my $commit_info_split (@commit_pairs) {
my $sha1_child = @{$commit_info_split}[0];
my $sha1_commit = @{$commit_info_split}[1];
- my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
+ my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
# TODO: we could detect rename, and encode them with a #redirect on the wiki.
# TODO: for now, it's just a delete+add
my @diff_info_list = split(/\0/, $diff_infos);
# Keep the subject line of the commit message as mediawiki comment for the revision
- my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
+ my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
chomp($commit_msg);
# Push every blob
while (@diff_info_list) {
@@ -1236,7 +1210,7 @@ sub mw_push_revision {
my $info = shift(@diff_info_list);
my $file = shift(@diff_info_list);
($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
- if ($status eq "non-fast-forward") {
+ if ($status eq 'non-fast-forward') {
# we may already have sent part of the
# commit to MediaWiki, but it's too
# late to cancel it. Stop the push in
@@ -1244,22 +1218,21 @@ sub mw_push_revision {
# accurate error message.
return error_non_fast_forward($remote);
}
- if ($status ne "ok") {
- die("Unknown error from mw_push_file()");
+ if ($status ne 'ok') {
+ die("Unknown error from mw_push_file()\n");
}
}
- unless ($dumb_push) {
- run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
- run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
+ if (!$dumb_push) {
+ run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
}
}
- print STDOUT "ok $remote\n";
+ print {*STDOUT} "ok ${remote}\n";
return 1;
}
sub get_allowed_file_extensions {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $query = {
action => 'query',
@@ -1267,8 +1240,8 @@ sub get_allowed_file_extensions {
siprop => 'fileextensions'
};
my $result = $mediawiki->api($query);
- my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
- my %hashFile = map {$_ => 1}@file_extensions;
+ my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
+ my %hashFile = map { $_ => 1 } @file_extensions;
return %hashFile;
}
@@ -1283,15 +1256,15 @@ my %cached_mw_namespace_id;
# Return MediaWiki id for a canonical namespace name.
# Ex.: "File", "Project".
sub get_mw_namespace_id {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $name = shift;
if (!exists $namespace_id{$name}) {
# Look at configuration file, if the record for that namespace is
# already cached. Namespaces are stored in form:
# "Name_of_namespace:Id_namespace", ex.: "File:6".
- my @temp = split(/[\n]/, run_git("config --get-all remote."
- . $remotename .".namespaceCache"));
+ my @temp = split(/\n/,
+ run_git("config --get-all remote.${remotename}.namespaceCache"));
chomp(@temp);
foreach my $ns (@temp) {
my ($n, $id) = split(/:/, $ns);
@@ -1305,7 +1278,7 @@ sub get_mw_namespace_id {
}
if (!exists $namespace_id{$name}) {
- print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
+ print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
# NS not found => get namespace id from MW and store it in
# configuration file.
my $query = {
@@ -1329,8 +1302,8 @@ sub get_mw_namespace_id {
my $ns = $namespace_id{$name};
my $id;
- unless (defined $ns) {
- print STDERR "No such namespace $name on MediaWiki.\n";
+ if (!defined $ns) {
+ print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
$ns = {is_namespace => 0};
$namespace_id{$name} = $ns;
}
@@ -1342,17 +1315,17 @@ sub get_mw_namespace_id {
# Store "notANameSpace" as special value for inexisting namespaces
my $store_id = ($id || 'notANameSpace');
- # Store explicitely requested namespaces on disk
+ # Store explicitly requested namespaces on disk
if (!exists $cached_mw_namespace_id{$name}) {
- run_git("config --add remote.". $remotename
- .".namespaceCache \"". $name .":". $store_id ."\"");
+ run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
$cached_mw_namespace_id{$name} = 1;
}
return $id;
}
sub get_mw_namespace_id_for_page {
- if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
+ my $namespace = shift;
+ if ($namespace =~ /^([^:]*):/) {
return get_mw_namespace_id($namespace);
} else {
return;
diff --git a/contrib/mw-to-git/t/t9365-continuing-queries.sh b/contrib/mw-to-git/t/t9365-continuing-queries.sh
new file mode 100755
index 0000000000..27e267f532
--- /dev/null
+++ b/contrib/mw-to-git/t/t9365-continuing-queries.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+test_description='Test the Git Mediawiki remote helper: queries w/ more than 500 results'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_expect_success 'creating page w/ >500 revisions' '
+ wiki_reset &&
+ for i in `test_seq 501`
+ do
+ echo "creating revision $i" &&
+ wiki_editpage foo "revision $i<br/>" true
+ done
+'
+
+test_expect_success 'cloning page w/ >500 revisions' '
+ git clone mediawiki::'"$WIKI_URL"' mw_dir
+'
+
+test_done
diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh
index 3b2cfacf51..3372b2af34 100755
--- a/contrib/mw-to-git/t/test-gitmw-lib.sh
+++ b/contrib/mw-to-git/t/test-gitmw-lib.sh
@@ -62,12 +62,8 @@ test_check_precond () {
test_done
fi
- if [ ! -f "$GIT_BUILD_DIR"/git-remote-mediawiki ];
- then
- echo "No remote mediawiki for git found. Copying it in git"
- echo "cp $GIT_BUILD_DIR/contrib/mw-to-git/git-remote-mediawiki $GIT_BUILD_DIR/"
- ln -s "$GIT_BUILD_DIR"/contrib/mw-to-git/git-remote-mediawiki "$GIT_BUILD_DIR"
- fi
+ GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd "../.." && pwd)
+ PATH="$GIT_EXEC_PATH"'/bin-wrapper:'"$PATH"
if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
then
@@ -95,7 +91,7 @@ test_diff_directories () {
# Check that <dir> contains exactly <N> files
test_contains_N_files () {
if test `ls -- "$1" | wc -l` -ne "$2"; then
- echo "directory $1 sould contain $2 files"
+ echo "directory $1 should contain $2 files"
echo "it contains these files:"
ls "$1"
false
@@ -336,20 +332,21 @@ wiki_install () {
fi
# Fetch MediaWiki's archive if not already present in the TMP directory
+ MW_FILENAME="mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
cd "$TMP"
- if [ ! -f "$MW_VERSION.tar.gz" ] ; then
- echo "Downloading $MW_VERSION sources ..."
- wget "http://download.wikimedia.org/mediawiki/1.19/mediawiki-1.19.0.tar.gz" ||
+ if [ ! -f $MW_FILENAME ] ; then
+ echo "Downloading $MW_VERSION_MAJOR.$MW_VERSION_MINOR sources ..."
+ wget "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/$MW_FILENAME" ||
error "Unable to download "\
- "http://download.wikimedia.org/mediawiki/1.19/"\
- "mediawiki-1.19.0.tar.gz. "\
+ "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\
+ "$MW_FILENAME. "\
"Please fix your connection and launch the script again."
- echo "$MW_VERSION.tar.gz downloaded in `pwd`. "\
+ echo "$MW_FILENAME downloaded in `pwd`. "\
"You can delete it later if you want."
else
- echo "Reusing existing $MW_VERSION.tar.gz downloaded in `pwd`."
+ echo "Reusing existing $MW_FILENAME downloaded in `pwd`."
fi
- archive_abs_path=$(pwd)/"$MW_VERSION.tar.gz"
+ archive_abs_path=$(pwd)/$MW_FILENAME
cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
tar xzf "$archive_abs_path" --strip-components=1 ||
@@ -431,5 +428,5 @@ wiki_delete () {
# Delete the wiki's SQLite database
rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
rm -f "$FILES_FOLDER/$DB_FILE"
- rm -rf "$TMP/$MW_VERSION"
+ rm -rf "$TMP/mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
}
diff --git a/contrib/mw-to-git/t/test.config b/contrib/mw-to-git/t/test.config
index 958b37b4a7..5ba0684162 100644
--- a/contrib/mw-to-git/t/test.config
+++ b/contrib/mw-to-git/t/test.config
@@ -12,7 +12,7 @@ SERVER_ADDR=localhost
TMP=/tmp
DB_FILE=wikidb.sqlite
-# If LIGHTTPD is not set to true, the script will use the defaut
+# If LIGHTTPD is not set to true, the script will use the default
# web server running in WIKI_DIR_INST.
WIKI_DIR_INST=/var/www
@@ -30,6 +30,8 @@ WEB_WWW=$WEB/www
# The variables below are used by the script to install a wiki.
# You should not modify these unless you are modifying the script itself.
-MW_VERSION=mediawiki-1.19.0
+# tested versions: 1.19.X -> 1.21.1
+MW_VERSION_MAJOR=1.21
+MW_VERSION_MINOR=1
FILES_FOLDER=install-wiki
DB_INSTALL_SCRIPT=db_install.php
diff --git a/contrib/patches/docbook-xsl-manpages-charmap.patch b/contrib/patches/docbook-xsl-manpages-charmap.patch
deleted file mode 100644
index f2b08b4f4a..0000000000
--- a/contrib/patches/docbook-xsl-manpages-charmap.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-From: Ismail Dönmez <ismail@pardus.org.tr>
-
-Trying to build the documentation with docbook-xsl 1.73 may result in
-the following error. This patch fixes it.
-
-$ xmlto -m callouts.xsl man git-add.xml
-runtime error: file
-file:///usr/share/sgml/docbook/xsl-stylesheets-1.73.0/manpages/other.xsl line
-129 element call-template
-The called template 'read-character-map' was not found.
-
---- docbook-xsl-1.73.0/manpages/docbook.xsl.manpages-charmap 2007-07-23 16:24:23.000000000 +0100
-+++ docbook-xsl-1.73.0/manpages/docbook.xsl 2007-07-23 16:25:16.000000000 +0100
-@@ -37,6 +37,7 @@
- <xsl:include href="lists.xsl"/>
- <xsl:include href="endnotes.xsl"/>
- <xsl:include href="table.xsl"/>
-+ <xsl:include href="../common/charmap.xsl"/>
-
- <!-- * we rename the following just to avoid using params with "man" -->
- <!-- * prefixes in the table.xsl stylesheet (because that stylesheet -->
diff --git a/contrib/remote-helpers/Makefile b/contrib/remote-helpers/Makefile
index 9a76575f78..239161de33 100644
--- a/contrib/remote-helpers/Makefile
+++ b/contrib/remote-helpers/Makefile
@@ -3,6 +3,7 @@ TESTS := $(wildcard test*.sh)
export T := $(addprefix $(CURDIR)/,$(TESTS))
export MAKE := $(MAKE) -e
export PATH := $(CURDIR):$(PATH)
+export TEST_LINT := test-lint-executable test-lint-shell-syntax
test:
$(MAKE) -C ../../t $@
diff --git a/contrib/remote-helpers/git-remote-bzr b/contrib/remote-helpers/git-remote-bzr
index c5822e4ac9..054161ae21 100755
--- a/contrib/remote-helpers/git-remote-bzr
+++ b/contrib/remote-helpers/git-remote-bzr
@@ -13,6 +13,12 @@
# or
# % git clone bzr::lp:myrepo
#
+# If you want to specify which branches you want to track (per repo):
+# % git config remote.origin.bzr-branches 'trunk, devel, test'
+#
+# Where 'origin' is the name of the repository you want to specify the
+# branches.
+#
import sys
@@ -25,15 +31,21 @@ bzrlib.plugin.load_plugins()
import bzrlib.generate_ids
import bzrlib.transport
+import bzrlib.errors
+import bzrlib.ui
+import bzrlib.urlutils
+import bzrlib.branch
import sys
import os
import json
import re
import StringIO
+import atexit, shutil, hashlib, urlparse, subprocess
NAME_RE = re.compile('^([^<>]+)')
AUTHOR_RE = re.compile('^([^<>]+?)? ?<([^<>]*)>$')
+EMAIL_RE = re.compile('^([^<>]+[^ \\\t<>])?\\b(?:[ \\t<>]*?)\\b([^ \\t<>]+@[^ \\t<>]+)')
RAW_AUTHOR_RE = re.compile('^(\w+) (.+)? <(.*)> (\d+) ([+-]\d+)')
def die(msg, *args):
@@ -46,6 +58,12 @@ def warn(msg, *args):
def gittz(tz):
return '%+03d%02d' % (tz / 3600, tz % 3600 / 60)
+def get_config(config):
+ cmd = ['git', 'config', '--get', config]
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ output, _ = process.communicate()
+ return output
+
class Marks:
def __init__(self, path):
@@ -81,7 +99,7 @@ class Marks:
return self.marks[rev]
def to_rev(self, mark):
- return self.rev_marks[mark]
+ return str(self.rev_marks[mark])
def next_mark(self):
self.last_mark += 1
@@ -93,7 +111,7 @@ class Marks:
return self.last_mark
def is_marked(self, rev):
- return self.marks.has_key(rev)
+ return rev in self.marks
def new_mark(self, rev, mark):
self.marks[rev] = mark
@@ -101,7 +119,10 @@ class Marks:
self.last_mark = mark
def get_tip(self, branch):
- return self.tips.get(branch, None)
+ try:
+ return str(self.tips[branch])
+ except KeyError:
+ return None
def set_tip(self, branch, tip):
self.tips[branch] = tip
@@ -150,17 +171,16 @@ class Parser:
if not m:
return None
_, name, email, date, tz = m.groups()
+ name = name.decode('utf-8')
committer = '%s <%s>' % (name, email)
tz = int(tz)
tz = ((tz / 100) * 3600) + ((tz % 100) * 60)
return (committer, int(date), tz)
def rev_to_mark(rev):
- global marks
return marks.from_rev(rev)
def mark_to_rev(mark):
- global marks
return marks.to_rev(mark)
def fixup_user(user):
@@ -171,9 +191,19 @@ def fixup_user(user):
name = m.group(1)
mail = m.group(2).strip()
else:
- m = NAME_RE.match(user)
+ m = EMAIL_RE.match(user)
if m:
- name = m.group(1).strip()
+ name = m.group(1)
+ mail = m.group(2)
+ else:
+ m = NAME_RE.match(user)
+ if m:
+ name = m.group(1).strip()
+
+ if not name:
+ name = 'unknown'
+ if not mail:
+ mail = 'Unknown'
return '%s <%s>' % (name, mail)
@@ -183,21 +213,28 @@ def get_filechanges(cur, prev):
changes = cur.changes_from(prev)
+ def u(s):
+ return s.encode('utf-8')
+
for path, fid, kind in changes.added:
- modified[path] = fid
+ modified[u(path)] = fid
for path, fid, kind in changes.removed:
- removed[path] = None
+ removed[u(path)] = None
for path, fid, kind, mod, _ in changes.modified:
- modified[path] = fid
+ modified[u(path)] = fid
for oldpath, newpath, fid, kind, mod, _ in changes.renamed:
- removed[oldpath] = None
- modified[newpath] = fid
+ removed[u(oldpath)] = None
+ if kind == 'directory':
+ lst = cur.list_files(from_dir=newpath, recursive=True)
+ for path, file_class, kind, fid, entry in lst:
+ if kind != 'directory':
+ modified[u(newpath + '/' + path)] = fid
+ else:
+ modified[u(newpath)] = fid
return modified, removed
def export_files(tree, files):
- global marks, filenodes
-
final = []
for path, fid in files.iteritems():
kind = tree.kind(fid)
@@ -214,7 +251,7 @@ def export_files(tree, files):
else:
mode = '100644'
- # is the blog already exported?
+ # is the blob already exported?
if h in filenodes:
mark = filenodes[h]
final.append((mode, mark, path))
@@ -238,29 +275,42 @@ def export_files(tree, files):
return final
-def export_branch(branch, name):
- global prefix, dirname
-
+def export_branch(repo, name):
ref = '%s/heads/%s' % (prefix, name)
tip = marks.get_tip(name)
+ branch = get_remote_branch(name)
repo = branch.repository
- repo.lock_read()
+
+ branch.lock_read()
revs = branch.iter_merge_sorted_revisions(None, tip, 'exclude', 'forward')
- count = 0
+ try:
+ tip_revno = branch.revision_id_to_revno(tip)
+ last_revno, _ = branch.last_revision_info()
+ total = last_revno - tip_revno
+ except bzrlib.errors.NoSuchRevision:
+ tip_revno = 0
+ total = 0
- revs = [revid for revid, _, _, _ in revs if not marks.is_marked(revid)]
+ for revid, _, seq, _ in revs:
- for revid in revs:
+ if marks.is_marked(revid):
+ continue
rev = repo.get_revision(revid)
+ revno = seq[0]
parents = rev.parent_ids
time = rev.timestamp
tz = rev.timezone
committer = rev.committer.encode('utf-8')
committer = "%s %u %s" % (fixup_user(committer), time, gittz(tz))
- author = committer
+ authors = rev.get_apparent_authors()
+ if authors:
+ author = authors[0].encode('utf-8')
+ author = "%s %u %s" % (fixup_user(author), time, gittz(tz))
+ else:
+ author = committer
msg = rev.message.encode('utf-8')
msg += '\n'
@@ -297,18 +347,24 @@ def export_branch(branch, name):
else:
print "merge :%s" % m
+ for f in removed:
+ print "D %s" % (f,)
for f in modified_final:
print "M %s :%u %s" % f
- for f in removed:
- print "D %s" % (f)
print
- count += 1
- if (count % 100 == 0):
- print "progress revision %s (%d/%d)" % (revid, count, len(revs))
- print "#############################################################"
+ if len(seq) > 1:
+ # let's skip branch revisions from the progress report
+ continue
+
+ progress = (revno - tip_revno)
+ if (progress % 100 == 0):
+ if total:
+ print "progress revision %d '%s' (%d/%d)" % (revno, name, progress, total)
+ else:
+ print "progress revision %d '%s' (%d)" % (revno, name, progress)
- repo.unlock()
+ branch.unlock()
revid = branch.last_revision()
@@ -320,34 +376,30 @@ def export_branch(branch, name):
marks.set_tip(name, revid)
def export_tag(repo, name):
- global tags
- try:
- print "reset refs/tags/%s" % name
- print "from :%u" % rev_to_mark(tags[name])
- print
- except KeyError:
- warn("TODO: fetch tag '%s'" % name)
+ ref = '%s/tags/%s' % (prefix, name)
+ print "reset %s" % ref
+ print "from :%u" % rev_to_mark(tags[name])
+ print
def do_import(parser):
- global dirname
-
- branch = parser.repo
+ repo = parser.repo
path = os.path.join(dirname, 'marks-git')
print "feature done"
if os.path.exists(path):
print "feature import-marks=%s" % path
print "feature export-marks=%s" % path
+ print "feature force"
sys.stdout.flush()
while parser.check('import'):
ref = parser[1]
if ref.startswith('refs/heads/'):
name = ref[len('refs/heads/'):]
- export_branch(branch, name)
+ export_branch(repo, name)
if ref.startswith('refs/tags/'):
name = ref[len('refs/tags/'):]
- export_tag(branch, name)
+ export_tag(repo, name)
parser.next()
print 'done'
@@ -355,8 +407,6 @@ def do_import(parser):
sys.stdout.flush()
def parse_blob(parser):
- global blob_marks
-
parser.next()
mark = parser.get_mark()
parser.next()
@@ -366,23 +416,19 @@ def parse_blob(parser):
class CustomTree():
- def __init__(self, repo, revid, parents, files):
- global files_cache
-
- self.repo = repo
- self.revid = revid
- self.parents = parents
+ def __init__(self, branch, revid, parents, files):
self.updates = {}
+ self.branch = branch
def copy_tree(revid):
files = files_cache[revid] = {}
- tree = repo.repository.revision_tree(revid)
- repo.lock_read()
+ branch.lock_read()
+ tree = branch.repository.revision_tree(revid)
try:
for path, entry in tree.iter_entries_by_dir():
- files[path] = entry.file_id
+ files[path] = [entry.file_id, None]
finally:
- repo.unlock()
+ branch.unlock()
return files
if len(parents) == 0:
@@ -395,12 +441,18 @@ class CustomTree():
self.base_files = copy_tree(self.base_id)
self.files = files_cache[revid] = self.base_files.copy()
+ self.rev_files = {}
+
+ for path, data in self.files.iteritems():
+ fid, mark = data
+ self.rev_files[fid] = [path, mark]
for path, f in files.iteritems():
- fid = self.files.get(path, None)
+ fid, mark = self.files.get(path, [None, None])
if not fid:
fid = bzrlib.generate_ids.gen_file_id(path)
f['path'] = path
+ self.rev_files[fid] = [path, mark]
self.updates[fid] = f
def last_revision(self):
@@ -410,19 +462,19 @@ class CustomTree():
changes = []
def get_parent(dirname, basename):
- parent_fid = self.base_files.get(dirname, None)
+ parent_fid, mark = self.base_files.get(dirname, [None, None])
if parent_fid:
return parent_fid
- parent_fid = self.files.get(dirname, None)
+ parent_fid, mark = self.files.get(dirname, [None, None])
if parent_fid:
return parent_fid
if basename == '':
return None
fid = bzrlib.generate_ids.gen_file_id(path)
- d = add_entry(fid, dirname, 'directory')
+ add_entry(fid, dirname, 'directory')
return fid
- def add_entry(fid, path, kind, mode = None):
+ def add_entry(fid, path, kind, mode=None):
dirname, basename = os.path.split(path)
parent_fid = get_parent(dirname, basename)
@@ -440,11 +492,10 @@ class CustomTree():
(None, basename),
(None, kind),
(None, executable))
- self.files[path] = change[0]
+ self.files[path] = [change[0], None]
changes.append(change)
- return change
- def update_entry(fid, path, kind, mode = None):
+ def update_entry(fid, path, kind, mode=None):
dirname, basename = os.path.split(path)
parent_fid = get_parent(dirname, basename)
@@ -462,9 +513,8 @@ class CustomTree():
(None, basename),
(None, kind),
(None, executable))
- self.files[path] = change[0]
+ self.files[path] = [change[0], None]
changes.append(change)
- return change
def remove_entry(fid, path, kind):
dirname, basename = os.path.split(path)
@@ -479,7 +529,6 @@ class CustomTree():
(None, None))
del self.files[path]
changes.append(change)
- return change
for fid, f in self.updates.iteritems():
path = f['path']
@@ -493,25 +542,47 @@ class CustomTree():
else:
add_entry(fid, path, 'file', f['mode'])
+ self.files[path][1] = f['mark']
+ self.rev_files[fid][1] = f['mark']
+
return changes
+ def get_content(self, file_id):
+ path, mark = self.rev_files[file_id]
+ if mark:
+ return blob_marks[mark]
+
+ # last resort
+ tree = self.branch.repository.revision_tree(self.base_id)
+ return tree.get_file_text(file_id)
+
def get_file_with_stat(self, file_id, path=None):
- return (StringIO.StringIO(self.updates[file_id]['data']), None)
+ content = self.get_content(file_id)
+ return (StringIO.StringIO(content), None)
def get_symlink_target(self, file_id):
- return self.updates[file_id]['data']
+ return self.get_content(file_id)
-def parse_commit(parser):
- global marks, blob_marks, bmarks, parsed_refs
- global mode
+ def id2path(self, file_id):
+ path, mark = self.rev_files[file_id]
+ return path
+
+def c_style_unescape(string):
+ if string[0] == string[-1] == '"':
+ return string.decode('string-escape')[1:-1]
+ return string
+def parse_commit(parser):
parents = []
ref = parser[1]
parser.next()
- if ref != 'refs/heads/master':
- die("bzr doesn't support multiple branches; use 'master'")
+ if ref.startswith('refs/heads/'):
+ name = ref[len('refs/heads/'):]
+ branch = get_remote_branch(name)
+ else:
+ die('unknown ref')
commit_mark = parser.get_mark()
parser.next()
@@ -528,34 +599,37 @@ def parse_commit(parser):
parents.append(parser.get_mark())
parser.next()
+ # fast-export adds an extra newline
+ if data[-1] == '\n':
+ data = data[:-1]
+
files = {}
for line in parser:
if parser.check('M'):
t, m, mark_ref, path = line.split(' ', 3)
mark = int(mark_ref[1:])
- f = { 'mode' : m, 'data' : blob_marks[mark] }
+ f = { 'mode' : m, 'mark' : mark }
elif parser.check('D'):
- t, path = line.split(' ')
+ t, path = line.split(' ', 1)
f = { 'deleted' : True }
else:
die('Unknown file command: %s' % line)
+ path = c_style_unescape(path).decode('utf-8')
files[path] = f
- repo = parser.repo
-
committer, date, tz = committer
- parents = [str(mark_to_rev(p)) for p in parents]
+ parents = [mark_to_rev(p) for p in parents]
revid = bzrlib.generate_ids.gen_revision_id(committer, date)
props = {}
- props['branch-nick'] = repo.nick
+ props['branch-nick'] = branch.nick
- mtree = CustomTree(repo, revid, parents, files)
+ mtree = CustomTree(branch, revid, parents, files)
changes = mtree.iter_changes()
- repo.lock_write()
+ branch.lock_write()
try:
- builder = repo.get_commit_builder(parents, None, date, tz, committer, props, revid)
+ builder = branch.get_commit_builder(parents, None, date, tz, committer, props, revid)
try:
list(builder.record_iter_changes(mtree, mtree.last_revision(), changes))
builder.finish_inventory()
@@ -564,20 +638,15 @@ def parse_commit(parser):
builder.abort()
raise
finally:
- repo.unlock()
+ branch.unlock()
parsed_refs[ref] = revid
marks.new_mark(revid, commit_mark)
def parse_reset(parser):
- global parsed_refs
-
ref = parser[1]
parser.next()
- if ref != 'refs/heads/master':
- die("bzr doesn't support multiple branches; use 'master'")
-
# ugh
if parser.check('commit'):
parse_commit(parser)
@@ -590,8 +659,6 @@ def parse_reset(parser):
parsed_refs[ref] = mark_to_rev(from_mark)
def do_export(parser):
- global parsed_refs, dirname, peer
-
parser.next()
for line in parser.each_block('done'):
@@ -608,30 +675,43 @@ def do_export(parser):
else:
die('unhandled export command: %s' % line)
- repo = parser.repo
-
for ref, revid in parsed_refs.iteritems():
- if ref == 'refs/heads/master':
- repo.generate_revision_history(revid, marks.get_tip('master'))
- revno, revid = repo.last_revision_info()
- if peer:
- if hasattr(peer, "import_last_revision_info_and_tags"):
- peer.import_last_revision_info_and_tags(repo, revno, revid)
- else:
- peer.import_last_revision_info(repo.repository, revno, revid)
- wt = peer.bzrdir.open_workingtree()
- else:
- wt = repo.bzrdir.open_workingtree()
- wt.update()
+ if ref.startswith('refs/heads/'):
+ name = ref[len('refs/heads/'):]
+ branch = get_remote_branch(name)
+ branch.generate_revision_history(revid, marks.get_tip(name))
+
+ if name in peers:
+ peer = bzrlib.branch.Branch.open(peers[name],
+ possible_transports=transports)
+ try:
+ peer.bzrdir.push_branch(branch, revision_id=revid)
+ except bzrlib.errors.DivergedBranches:
+ print "error %s non-fast forward" % ref
+ continue
+
+ try:
+ wt = branch.bzrdir.open_workingtree()
+ wt.update()
+ except bzrlib.errors.NoWorkingTree:
+ pass
+ elif ref.startswith('refs/tags/'):
+ # TODO: implement tag push
+ print "error %s pushing tags not supported" % ref
+ continue
+ else:
+ # transport-helper/fast-export bugs
+ continue
+
print "ok %s" % ref
+
print
def do_capabilities(parser):
- global dirname
-
print "import"
print "export"
print "refspec refs/heads/*:%s/heads/*" % prefix
+ print "refspec refs/tags/*:%s/tags/*" % prefix
path = os.path.join(dirname, 'marks-git')
@@ -641,66 +721,201 @@ def do_capabilities(parser):
print
+def ref_is_valid(name):
+ return not True in [c in name for c in '~^: \\']
+
def do_list(parser):
- global tags
- print "? refs/heads/%s" % 'master'
- for tag, revid in parser.repo.tags.get_tag_dict().items():
+ master_branch = None
+
+ for name in branches:
+ if not master_branch:
+ master_branch = name
+ print "? refs/heads/%s" % name
+
+ branch = get_remote_branch(master_branch)
+ branch.lock_read()
+ for tag, revid in branch.tags.get_tag_dict().items():
+ try:
+ branch.revision_id_to_dotted_revno(revid)
+ except bzrlib.errors.NoSuchRevision:
+ continue
+ if not ref_is_valid(tag):
+ continue
print "? refs/tags/%s" % tag
tags[tag] = revid
- print "@refs/heads/%s HEAD" % 'master'
+ branch.unlock()
+
+ print "@refs/heads/%s HEAD" % master_branch
print
+def clone(path, remote_branch):
+ try:
+ bdir = bzrlib.bzrdir.BzrDir.create(path, possible_transports=transports)
+ except bzrlib.errors.AlreadyControlDirError:
+ bdir = bzrlib.bzrdir.BzrDir.open(path, possible_transports=transports)
+ repo = bdir.find_repository()
+ repo.fetch(remote_branch.repository)
+ return remote_branch.sprout(bdir, repository=repo)
+
+def get_remote_branch(name):
+ remote_branch = bzrlib.branch.Branch.open(branches[name],
+ possible_transports=transports)
+ if isinstance(remote_branch.user_transport, bzrlib.transport.local.LocalTransport):
+ return remote_branch
+
+ branch_path = os.path.join(dirname, 'clone', name)
+
+ try:
+ branch = bzrlib.branch.Branch.open(branch_path,
+ possible_transports=transports)
+ except bzrlib.errors.NotBranchError:
+ # clone
+ branch = clone(branch_path, remote_branch)
+ else:
+ # pull
+ try:
+ branch.pull(remote_branch, overwrite=True)
+ except bzrlib.errors.DivergedBranches:
+ # use remote branch for now
+ return remote_branch
+
+ return branch
+
+def find_branches(repo):
+ transport = repo.bzrdir.root_transport
+
+ for fn in transport.iter_files_recursive():
+ if not fn.endswith('.bzr/branch-format'):
+ continue
+
+ name = subdir = fn[:-len('/.bzr/branch-format')]
+ name = name if name != '' else 'master'
+ name = name.replace('/', '+')
+
+ try:
+ cur = transport.clone(subdir)
+ branch = bzrlib.branch.Branch.open_from_transport(cur)
+ except bzrlib.errors.NotBranchError:
+ continue
+ else:
+ yield name, branch.base
+
def get_repo(url, alias):
- global dirname, peer
+ normal_url = bzrlib.urlutils.normalize_url(url)
+ origin = bzrlib.bzrdir.BzrDir.open(url, possible_transports=transports)
+ is_local = isinstance(origin.transport, bzrlib.transport.local.LocalTransport)
- origin = bzrlib.bzrdir.BzrDir.open(url)
- branch = origin.open_branch()
+ shared_path = os.path.join(gitdir, 'bzr')
+ try:
+ shared_dir = bzrlib.bzrdir.BzrDir.open(shared_path,
+ possible_transports=transports)
+ except bzrlib.errors.NotBranchError:
+ shared_dir = bzrlib.bzrdir.BzrDir.create(shared_path,
+ possible_transports=transports)
+ try:
+ shared_repo = shared_dir.open_repository()
+ except bzrlib.errors.NoRepositoryPresent:
+ shared_repo = shared_dir.create_repository(shared=True)
- if not isinstance(origin.transport, bzrlib.transport.local.LocalTransport):
+ if not is_local:
clone_path = os.path.join(dirname, 'clone')
- remote_branch = branch
- if os.path.exists(clone_path):
- # pull
- d = bzrlib.bzrdir.BzrDir.open(clone_path)
- branch = d.open_branch()
- result = branch.pull(remote_branch, [], None, False)
+ if not os.path.exists(clone_path):
+ os.mkdir(clone_path)
else:
- # clone
- d = origin.sprout(clone_path, None,
- hardlink=True, create_tree_if_local=False,
- source_branch=remote_branch)
- branch = d.open_branch()
- branch.bind(remote_branch)
-
- peer = remote_branch
+ # check and remove old organization
+ try:
+ bdir = bzrlib.bzrdir.BzrDir.open(clone_path,
+ possible_transports=transports)
+ bdir.destroy_repository()
+ except bzrlib.errors.NotBranchError:
+ pass
+ except bzrlib.errors.NoRepositoryPresent:
+ pass
+
+ wanted = get_config('remote.%s.bzr-branches' % alias).rstrip().split(', ')
+ # stupid python
+ wanted = [e for e in wanted if e]
+ if not wanted:
+ wanted = get_config('remote-bzr.branches').rstrip().split(', ')
+ # stupid python
+ wanted = [e for e in wanted if e]
+
+ if not wanted:
+ try:
+ repo = origin.open_repository()
+ if not repo.user_transport.listable():
+ # this repository is not usable for us
+ raise bzrlib.errors.NoRepositoryPresent(repo.bzrdir)
+ except bzrlib.errors.NoRepositoryPresent:
+ wanted = ['master']
+
+ if wanted:
+ def list_wanted(url, wanted):
+ for name in wanted:
+ subdir = name if name != 'master' else ''
+ yield name, bzrlib.urlutils.join(url, subdir)
+
+ branch_list = list_wanted(url, wanted)
else:
- peer = None
+ branch_list = find_branches(repo)
- return branch
+ for name, url in branch_list:
+ if not is_local:
+ peers[name] = url
+ branches[name] = url
+
+ return origin
+
+def fix_path(alias, orig_url):
+ url = urlparse.urlparse(orig_url, 'file')
+ if url.scheme != 'file' or os.path.isabs(url.path):
+ return
+ abs_url = urlparse.urljoin("%s/" % os.getcwd(), orig_url)
+ cmd = ['git', 'config', 'remote.%s.url' % alias, "bzr::%s" % abs_url]
+ subprocess.call(cmd)
def main(args):
- global marks, prefix, dirname
+ global marks, prefix, gitdir, dirname
global tags, filenodes
global blob_marks
global parsed_refs
global files_cache
+ global is_tmp
+ global branches, peers
+ global transports
alias = args[1]
url = args[2]
- prefix = 'refs/bzr/%s' % alias
tags = {}
filenodes = {}
blob_marks = {}
parsed_refs = {}
files_cache = {}
+ marks = None
+ branches = {}
+ peers = {}
+ transports = []
+
+ if alias[5:] == url:
+ is_tmp = True
+ alias = hashlib.sha1(alias).hexdigest()
+ else:
+ is_tmp = False
+ prefix = 'refs/bzr/%s' % alias
gitdir = os.environ['GIT_DIR']
dirname = os.path.join(gitdir, 'bzr', alias)
+ if not is_tmp:
+ fix_path(alias, url)
+
if not os.path.exists(dirname):
os.makedirs(dirname)
+ if hasattr(bzrlib.ui.ui_factory, 'be_quiet'):
+ bzrlib.ui.ui_factory.be_quiet(True)
+
repo = get_repo(url, alias)
marks_path = os.path.join(dirname, 'marks-int')
@@ -720,6 +935,13 @@ def main(args):
die('unhandled command: %s' % line)
sys.stdout.flush()
- marks.store()
+def bye():
+ if not marks:
+ return
+ if not is_tmp:
+ marks.store()
+ else:
+ shutil.rmtree(dirname)
+atexit.register(bye)
sys.exit(main(sys.argv))
diff --git a/contrib/remote-helpers/git-remote-hg b/contrib/remote-helpers/git-remote-hg
index 45f6c80d45..c6026b9bed 100755
--- a/contrib/remote-helpers/git-remote-hg
+++ b/contrib/remote-helpers/git-remote-hg
@@ -8,8 +8,11 @@
# Just copy to your ~/bin, or anywhere in your $PATH.
# Then you can clone with:
# git clone hg::/path/to/mercurial/repo/
+#
+# For remote repositories a local clone is stored in
+# "$GIT_DIR/hg/origin/clone/.hg/".
-from mercurial import hg, ui, bookmarks, context, util, encoding
+from mercurial import hg, ui, bookmarks, context, encoding, node, error, extensions, discovery, util
import re
import sys
@@ -18,8 +21,21 @@ import json
import shutil
import subprocess
import urllib
+import atexit
+import urlparse, hashlib
+import time as ptime
#
+# If you want to see Mercurial revisions as Git commit notes:
+# git config core.notesRef refs/notes/hg
+#
+# If you are not in hg-git-compat mode and want to disable the tracking of
+# named branches:
+# git config --global remote-hg.track-branches false
+#
+# If you want the equivalent of hg's clone/pull--insecure option:
+# git config --global remote-hg.insecure true
+#
# If you want to switch to hg-git compatibility mode:
# git config --global remote-hg.hg-git-compat true
#
@@ -36,9 +52,12 @@ import urllib
NAME_RE = re.compile('^([^<>]+)')
AUTHOR_RE = re.compile('^([^<>]+?)? ?<([^<>]*)>$')
+EMAIL_RE = re.compile('^([^<>]+[^ \\\t<>])?\\b(?:[ \\t<>]*?)\\b([^ \\t<>]+@[^ \\t<>]+)')
AUTHOR_HG_RE = re.compile('^(.*?) ?<(.*?)(?:>(.+)?)?$')
RAW_AUTHOR_RE = re.compile('^(\w+) (?:(.+)? )?<(.*)> (\d+) ([+-]\d+)')
+VERSION = 2
+
def die(msg, *args):
sys.stderr.write('ERROR: %s\n' % (msg % args))
sys.exit(1)
@@ -56,22 +75,62 @@ def hgmode(mode):
m = { '100755': 'x', '120000': 'l' }
return m.get(mode, '')
+def hghex(n):
+ return node.hex(n)
+
+def hgbin(n):
+ return node.bin(n)
+
+def hgref(ref):
+ return ref.replace('___', ' ')
+
+def gitref(ref):
+ return ref.replace(' ', '___')
+
+def check_version(*check):
+ if not hg_version:
+ return True
+ return hg_version >= check
+
def get_config(config):
cmd = ['git', 'config', '--get', config]
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
output, _ = process.communicate()
return output
+def get_config_bool(config, default=False):
+ value = get_config(config).rstrip('\n')
+ if value == "true":
+ return True
+ elif value == "false":
+ return False
+ else:
+ return default
+
class Marks:
- def __init__(self, path):
+ def __init__(self, path, repo):
self.path = path
+ self.repo = repo
+ self.clear()
+ self.load()
+
+ if self.version < VERSION:
+ if self.version == 1:
+ self.upgrade_one()
+
+ # upgraded?
+ if self.version < VERSION:
+ self.clear()
+ self.version = VERSION
+
+ def clear(self):
self.tips = {}
self.marks = {}
self.rev_marks = {}
self.last_mark = 0
-
- self.load()
+ self.version = 0
+ self.last_note = 0
def load(self):
if not os.path.exists(self.path):
@@ -82,12 +141,22 @@ class Marks:
self.tips = tmp['tips']
self.marks = tmp['marks']
self.last_mark = tmp['last-mark']
+ self.version = tmp.get('version', 1)
+ self.last_note = tmp.get('last-note', 0)
for rev, mark in self.marks.iteritems():
- self.rev_marks[mark] = int(rev)
+ self.rev_marks[mark] = rev
+
+ def upgrade_one(self):
+ def get_id(rev):
+ return hghex(self.repo.changelog.node(int(rev)))
+ self.tips = dict((name, get_id(rev)) for name, rev in self.tips.iteritems())
+ self.marks = dict((get_id(rev), mark) for rev, mark in self.marks.iteritems())
+ self.rev_marks = dict((mark, get_id(rev)) for mark, rev in self.rev_marks.iteritems())
+ self.version = 2
def dict(self):
- return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark }
+ return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark, 'version' : self.version, 'last-note' : self.last_note }
def store(self):
json.dump(self.dict(), open(self.path, 'w'))
@@ -96,26 +165,30 @@ class Marks:
return str(self.dict())
def from_rev(self, rev):
- return self.marks[str(rev)]
+ return self.marks[rev]
def to_rev(self, mark):
- return self.rev_marks[mark]
+ return str(self.rev_marks[mark])
+
+ def next_mark(self):
+ self.last_mark += 1
+ return self.last_mark
def get_mark(self, rev):
self.last_mark += 1
- self.marks[str(rev)] = self.last_mark
+ self.marks[rev] = self.last_mark
return self.last_mark
def new_mark(self, rev, mark):
- self.marks[str(rev)] = mark
+ self.marks[rev] = mark
self.rev_marks[mark] = rev
self.last_mark = mark
def is_marked(self, rev):
- return self.marks.has_key(str(rev))
+ return rev in self.marks
def get_tip(self, branch):
- return self.tips.get(branch, 0)
+ return str(self.tips[branch])
def set_tip(self, branch, tip):
self.tips[branch] = tip
@@ -160,8 +233,6 @@ class Parser:
return sys.stdin.read(size)
def get_author(self):
- global bad_mail
-
ex = None
m = RAW_AUTHOR_RE.match(self.line)
if not m:
@@ -188,19 +259,41 @@ class Parser:
tz = ((tz / 100) * 3600) + ((tz % 100) * 60)
return (user, int(date), -tz)
-def export_file(fc):
- d = fc.data()
- print "M %s inline %s" % (gitmode(fc.flags()), fc.path())
- print "data %d" % len(d)
- print d
+def fix_file_path(path):
+ if not os.path.isabs(path):
+ return path
+ return os.path.relpath(path, '/')
+
+def export_files(files):
+ final = []
+ for f in files:
+ fid = node.hex(f.filenode())
+
+ if fid in filenodes:
+ mark = filenodes[fid]
+ else:
+ mark = marks.next_mark()
+ filenodes[fid] = mark
+ d = f.data()
+
+ print "blob"
+ print "mark :%u" % mark
+ print "data %d" % len(d)
+ print d
+
+ path = fix_file_path(f.path())
+ final.append((gitmode(f.flags()), mark, path))
+
+ return final
def get_filechanges(repo, ctx, parent):
modified = set()
added = set()
removed = set()
+ # load earliest manifest first for caching reasons
+ prev = parent.manifest().copy()
cur = ctx.manifest()
- prev = repo[parent].manifest().copy()
for fn in cur:
if fn in prev:
@@ -221,9 +314,14 @@ def fixup_user_git(user):
name = m.group(1)
mail = m.group(2).strip()
else:
- m = NAME_RE.match(user)
+ m = EMAIL_RE.match(user)
if m:
- name = m.group(1).strip()
+ name = m.group(1)
+ mail = m.group(2)
+ else:
+ m = NAME_RE.match(user)
+ if m:
+ name = m.group(1).strip()
return (name, mail)
def fixup_user_hg(user):
@@ -248,8 +346,6 @@ def fixup_user_hg(user):
return (name, mail)
def fixup_user(user):
- global mode, bad_mail
-
if mode == 'git':
name, mail = fixup_user_git(user)
else:
@@ -262,53 +358,102 @@ def fixup_user(user):
return '%s <%s>' % (name, mail)
+def updatebookmarks(repo, peer):
+ remotemarks = peer.listkeys('bookmarks')
+ localmarks = repo._bookmarks
+
+ if not remotemarks:
+ return
+
+ for k, v in remotemarks.iteritems():
+ localmarks[k] = hgbin(v)
+
+ if hasattr(localmarks, 'write'):
+ localmarks.write()
+ else:
+ bookmarks.write(repo)
+
def get_repo(url, alias):
- global dirname, peer
+ global peer
myui = ui.ui()
myui.setconfig('ui', 'interactive', 'off')
+ myui.fout = sys.stderr
+
+ if get_config_bool('remote-hg.insecure'):
+ myui.setconfig('web', 'cacerts', '')
+
+ extensions.loadall(myui)
- if hg.islocal(url):
+ if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'):
repo = hg.repository(myui, url)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
else:
+ shared_path = os.path.join(gitdir, 'hg')
+
+ # check and upgrade old organization
+ hg_path = os.path.join(shared_path, '.hg')
+ if os.path.exists(shared_path) and not os.path.exists(hg_path):
+ repos = os.listdir(shared_path)
+ for x in repos:
+ local_hg = os.path.join(shared_path, x, 'clone', '.hg')
+ if not os.path.exists(local_hg):
+ continue
+ if not os.path.exists(hg_path):
+ shutil.move(local_hg, hg_path)
+ shutil.rmtree(os.path.join(shared_path, x, 'clone'))
+
+ # setup shared repo (if not there)
+ try:
+ hg.peer(myui, {}, shared_path, create=True)
+ except error.RepoError:
+ pass
+
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
local_path = os.path.join(dirname, 'clone')
if not os.path.exists(local_path):
- peer, dstpeer = hg.clone(myui, {}, url, local_path, update=False, pull=True)
- repo = dstpeer.local()
- else:
- repo = hg.repository(myui, local_path)
+ hg.share(myui, shared_path, local_path, update=False)
+
+ repo = hg.repository(myui, local_path)
+ try:
peer = hg.peer(myui, {}, url)
- repo.pull(peer, heads=None, force=True)
+ except:
+ die('Repository error')
+ repo.pull(peer, heads=None, force=True)
+
+ updatebookmarks(repo, peer)
return repo
def rev_to_mark(rev):
- global marks
- return marks.from_rev(rev)
+ return marks.from_rev(rev.hex())
def mark_to_rev(mark):
- global marks
return marks.to_rev(mark)
def export_ref(repo, name, kind, head):
- global prefix, marks, mode
-
ename = '%s/%s' % (kind, name)
- tip = marks.get_tip(ename)
+ try:
+ tip = marks.get_tip(ename)
+ tip = repo[tip].rev()
+ except:
+ tip = 0
- # mercurial takes too much time checking this
- if tip and tip == head.rev():
- # nothing to do
- return
revs = xrange(tip, head.rev() + 1)
- count = 0
-
- revs = [rev for rev in revs if not marks.is_marked(rev)]
+ total = len(revs)
for rev in revs:
c = repo[rev]
- (manifest, user, (time, tz), files, desc, extra) = repo.changelog.read(c.node())
+ node = c.node()
+
+ if marks.is_marked(c.hex()):
+ continue
+
+ (manifest, user, (time, tz), files, desc, extra) = repo.changelog.read(node)
rev_branch = extra['branch']
author = "%s %d %s" % (fixup_user(user), time, gittz(tz))
@@ -318,7 +463,7 @@ def export_ref(repo, name, kind, head):
else:
committer = author
- parents = [p for p in repo.changelog.parentrevs(rev) if p >= 0]
+ parents = [repo[p] for p in repo.changelog.parentrevs(rev) if p >= 0]
if len(parents) == 0:
modified = c.manifest().keys()
@@ -357,8 +502,10 @@ def export_ref(repo, name, kind, head):
if len(parents) == 0 and rev:
print 'reset %s/%s' % (prefix, ename)
+ modified_final = export_files(c.filectx(f) for f in modified)
+
print "commit %s/%s" % (prefix, ename)
- print "mark :%d" % (marks.get_mark(rev))
+ print "mark :%d" % (marks.get_mark(c.hex()))
print "author %s" % (author)
print "committer %s" % (committer)
print "data %d" % (len(desc))
@@ -369,29 +516,53 @@ def export_ref(repo, name, kind, head):
if len(parents) > 1:
print "merge :%s" % (rev_to_mark(parents[1]))
- for f in modified:
- export_file(c.filectx(f))
for f in removed:
- print "D %s" % (f)
+ print "D %s" % (fix_file_path(f))
+ for f in modified_final:
+ print "M %s :%u %s" % f
print
- count += 1
- if (count % 100 == 0):
- print "progress revision %d '%s' (%d/%d)" % (rev, name, count, len(revs))
- print "#############################################################"
+ progress = (rev - tip)
+ if (progress % 100 == 0):
+ print "progress revision %d '%s' (%d/%d)" % (rev, name, progress, total)
# make sure the ref is updated
print "reset %s/%s" % (prefix, ename)
- print "from :%u" % rev_to_mark(rev)
+ print "from :%u" % rev_to_mark(head)
print
- marks.set_tip(ename, rev)
+ pending_revs = set(revs) - notes
+ if pending_revs:
+ note_mark = marks.next_mark()
+ ref = "refs/notes/hg"
+
+ print "commit %s" % ref
+ print "mark :%d" % (note_mark)
+ print "committer remote-hg <> %s" % (ptime.strftime('%s %z'))
+ desc = "Notes for %s\n" % (name)
+ print "data %d" % (len(desc))
+ print desc
+ if marks.last_note:
+ print "from :%u" % marks.last_note
+
+ for rev in pending_revs:
+ notes.add(rev)
+ c = repo[rev]
+ print "N inline :%u" % rev_to_mark(c)
+ msg = c.hex()
+ print "data %d" % (len(msg))
+ print msg
+ print
+
+ marks.last_note = note_mark
+
+ marks.set_tip(ename, head.hex())
def export_tag(repo, tag):
- export_ref(repo, tag, 'tags', repo[tag])
+ export_ref(repo, tag, 'tags', repo[hgref(tag)])
def export_bookmark(repo, bmark):
- head = bmarks[bmark]
+ head = bmarks[hgref(bmark)]
export_ref(repo, bmark, 'bookmarks', head)
def export_branch(repo, branch):
@@ -400,12 +571,9 @@ def export_branch(repo, branch):
export_ref(repo, branch, 'branches', head)
def export_head(repo):
- global g_head
export_ref(repo, g_head[0], 'bookmarks', g_head[1])
def do_capabilities(parser):
- global prefix, dirname
-
print "import"
print "export"
print "refspec refs/heads/branches/*:%s/branches/*" % prefix
@@ -417,73 +585,68 @@ def do_capabilities(parser):
if os.path.exists(path):
print "*import-marks %s" % path
print "*export-marks %s" % path
+ print "option"
print
-def get_branch_tip(repo, branch):
- global branches
+def branch_tip(branch):
+ return branches[branch][-1]
- heads = branches.get(branch, None)
+def get_branch_tip(repo, branch):
+ heads = branches.get(hgref(branch), None)
if not heads:
return None
# verify there's only one head
if (len(heads) > 1):
warn("Branch '%s' has more than one head, consider merging" % branch)
- # older versions of mercurial don't have this
- if hasattr(repo, "branchtip"):
- return repo.branchtip(branch)
+ return branch_tip(hgref(branch))
return heads[0]
def list_head(repo, cur):
- global g_head, bmarks
+ global g_head, fake_bmark
- head = bookmarks.readcurrent(repo)
- if head:
- node = repo[head]
- else:
- # fake bookmark from current branch
- head = cur
- node = repo['.']
- if not node:
- node = repo['tip']
- if not node:
- return
- if head == 'default':
- head = 'master'
- bmarks[head] = node
+ if 'default' not in branches:
+ # empty repo
+ return
+
+ node = repo[branch_tip('default')]
+ head = 'master' if not 'master' in bmarks else 'default'
+ fake_bmark = head
+ bmarks[head] = node
+ head = gitref(head)
print "@refs/heads/%s HEAD" % head
g_head = (head, node)
def do_list(parser):
- global branches, bmarks, mode, track_branches
-
repo = parser.repo
for bmark, node in bookmarks.listbookmarks(repo).iteritems():
bmarks[bmark] = repo[node]
cur = repo.dirstate.branch()
+ orig = peer if peer else repo
+
+ for branch, heads in orig.branchmap().iteritems():
+ # only open heads
+ heads = [h for h in heads if 'close' not in repo.changelog.read(h)[5]]
+ if heads:
+ branches[branch] = heads
list_head(repo, cur)
if track_branches:
- for branch in repo.branchmap():
- heads = repo.branchheads(branch)
- if len(heads):
- branches[branch] = heads
-
for branch in branches:
- print "? refs/heads/branches/%s" % branch
+ print "? refs/heads/branches/%s" % gitref(branch)
for bmark in bmarks:
- print "? refs/heads/%s" % bmark
+ print "? refs/heads/%s" % gitref(bmark)
for tag, node in repo.tagslist():
if tag == 'tip':
continue
- print "? refs/tags/%s" % tag
+ print "? refs/tags/%s" % gitref(tag)
print
@@ -496,6 +659,7 @@ def do_import(parser):
if os.path.exists(path):
print "feature import-marks=%s" % path
print "feature export-marks=%s" % path
+ print "feature force"
sys.stdout.flush()
tmp = encoding.encoding
@@ -524,15 +688,12 @@ def do_import(parser):
print 'done'
def parse_blob(parser):
- global blob_marks
-
parser.next()
mark = parser.get_mark()
parser.next()
data = parser.get_data()
blob_marks[mark] = data
parser.next()
- return
def get_merge_files(repo, p1, p2, files):
for e in repo[p1].files():
@@ -542,10 +703,12 @@ def get_merge_files(repo, p1, p2, files):
f = { 'ctx' : repo[p1][e] }
files[e] = f
-def parse_commit(parser):
- global marks, blob_marks, bmarks, parsed_refs
- global mode
+def c_style_unescape(string):
+ if string[0] == string[-1] == '"':
+ return string.decode('string-escape')[1:-1]
+ return string
+def parse_commit(parser):
from_mark = merge_mark = None
ref = parser[1]
@@ -568,6 +731,10 @@ def parse_commit(parser):
if parser.check('merge'):
die('octopus merges are not supported yet')
+ # fast-export adds an extra newline
+ if data[-1] == '\n':
+ data = data[:-1]
+
files = {}
for line in parser:
@@ -576,12 +743,18 @@ def parse_commit(parser):
mark = int(mark_ref[1:])
f = { 'mode' : hgmode(m), 'data' : blob_marks[mark] }
elif parser.check('D'):
- t, path = line.split(' ')
+ t, path = line.split(' ', 1)
f = { 'deleted' : True }
else:
die('Unknown file command: %s' % line)
+ path = c_style_unescape(path)
files[path] = f
+ # only export the commits if we are on an internal proxy repo
+ if dry_run and not peer:
+ parsed_refs[ref] = None
+ return
+
def getfilectx(repo, memctx, f):
of = files[f]
if 'deleted' in of:
@@ -603,14 +776,14 @@ def parse_commit(parser):
extra['committer'] = "%s %u %u" % committer
if from_mark:
- p1 = repo.changelog.node(mark_to_rev(from_mark))
+ p1 = mark_to_rev(from_mark)
else:
- p1 = '\0' * 20
+ p1 = '0' * 40
if merge_mark:
- p2 = repo.changelog.node(mark_to_rev(merge_mark))
+ p2 = mark_to_rev(merge_mark)
else:
- p2 = '\0' * 20
+ p2 = '0' * 40
#
# If files changed from any of the parents, hg wants to know, but in git if
@@ -619,11 +792,16 @@ def parse_commit(parser):
if merge_mark:
get_merge_files(repo, p1, p2, files)
+ # Check if the ref is supposed to be a named branch
+ if ref.startswith('refs/heads/branches/'):
+ branch = ref[len('refs/heads/branches/'):]
+ extra['branch'] = hgref(branch)
+
if mode == 'hg':
i = data.find('\n--HG--\n')
if i >= 0:
tmp = data[i + len('\n--HG--\n'):].strip()
- for k, v in [e.split(' : ') for e in tmp.split('\n')]:
+ for k, v in [e.split(' : ', 1) for e in tmp.split('\n')]:
if k == 'rename':
old, new = v.split(' => ', 1)
files[new]['rename'] = old
@@ -641,15 +819,12 @@ def parse_commit(parser):
tmp = encoding.encoding
encoding.encoding = 'utf-8'
- node = repo.commitctx(ctx)
+ node = hghex(repo.commitctx(ctx))
encoding.encoding = tmp
- rev = repo[node].rev()
-
parsed_refs[ref] = node
-
- marks.new_mark(rev, commit_mark)
+ marks.new_mark(node, commit_mark)
def parse_reset(parser):
ref = parser[1]
@@ -663,8 +838,11 @@ def parse_reset(parser):
from_mark = parser.get_mark()
parser.next()
- node = parser.repo.changelog.node(mark_to_rev(from_mark))
- parsed_refs[ref] = node
+ try:
+ rev = mark_to_rev(from_mark)
+ except KeyError:
+ rev = None
+ parsed_refs[ref] = rev
def parse_tag(parser):
name = parser[1]
@@ -676,10 +854,173 @@ def parse_tag(parser):
data = parser.get_data()
parser.next()
- # nothing to do
+ parsed_tags[name] = (tagger, data)
+
+def write_tag(repo, tag, node, msg, author):
+ branch = repo[node].branch()
+ tip = branch_tip(branch)
+ tip = repo[tip]
+
+ def getfilectx(repo, memctx, f):
+ try:
+ fctx = tip.filectx(f)
+ data = fctx.data()
+ except error.ManifestLookupError:
+ data = ""
+ content = data + "%s %s\n" % (node, tag)
+ return context.memfilectx(f, content, False, False, None)
+
+ p1 = tip.hex()
+ p2 = '0' * 40
+ if author:
+ user, date, tz = author
+ date_tz = (date, tz)
+ else:
+ cmd = ['git', 'var', 'GIT_COMMITTER_IDENT']
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ output, _ = process.communicate()
+ m = re.match('^.* <.*>', output)
+ if m:
+ user = m.group(0)
+ else:
+ user = repo.ui.username()
+ date_tz = None
+
+ ctx = context.memctx(repo, (p1, p2), msg,
+ ['.hgtags'], getfilectx,
+ user, date_tz, {'branch' : branch})
+
+ tmp = encoding.encoding
+ encoding.encoding = 'utf-8'
+
+ tagnode = repo.commitctx(ctx)
+
+ encoding.encoding = tmp
+
+ return (tagnode, branch)
+
+def checkheads_bmark(repo, ref, ctx):
+ bmark = ref[len('refs/heads/'):]
+ if not bmark in bmarks:
+ # new bmark
+ return True
+
+ ctx_old = bmarks[bmark]
+ ctx_new = ctx
+ if not repo.changelog.descendant(ctx_old.rev(), ctx_new.rev()):
+ if force_push:
+ print "ok %s forced update" % ref
+ else:
+ print "error %s non-fast forward" % ref
+ return False
+
+ return True
+
+def checkheads(repo, remote, p_revs):
+
+ remotemap = remote.branchmap()
+ if not remotemap:
+ # empty repo
+ return True
+
+ new = {}
+ ret = True
+
+ for node, ref in p_revs.iteritems():
+ ctx = repo[node]
+ branch = ctx.branch()
+ if not branch in remotemap:
+ # new branch
+ continue
+ if not ref.startswith('refs/heads/branches'):
+ if ref.startswith('refs/heads/'):
+ if not checkheads_bmark(repo, ref, ctx):
+ ret = False
+
+ # only check branches
+ continue
+ new.setdefault(branch, []).append(ctx.rev())
+
+ for branch, heads in new.iteritems():
+ old = [repo.changelog.rev(x) for x in remotemap[branch]]
+ for rev in heads:
+ if check_version(2, 3):
+ ancestors = repo.changelog.ancestors([rev], stoprev=min(old))
+ else:
+ ancestors = repo.changelog.ancestors(rev)
+ found = False
+
+ for x in old:
+ if x in ancestors:
+ found = True
+ break
+
+ if found:
+ continue
+
+ node = repo.changelog.node(rev)
+ ref = p_revs[node]
+ if force_push:
+ print "ok %s forced update" % ref
+ else:
+ print "error %s non-fast forward" % ref
+ ret = False
+
+ return ret
+
+def push_unsafe(repo, remote, parsed_refs, p_revs):
+
+ force = force_push
+
+ fci = discovery.findcommonincoming
+ commoninc = fci(repo, remote, force=force)
+ common, _, remoteheads = commoninc
+
+ if not checkheads(repo, remote, p_revs):
+ return None
+
+ cg = repo.getbundle('push', heads=list(p_revs), common=common)
+
+ unbundle = remote.capable('unbundle')
+ if unbundle:
+ if force:
+ remoteheads = ['force']
+ return remote.unbundle(cg, remoteheads, 'push')
+ else:
+ return remote.addchangegroup(cg, 'push', repo.url())
+
+def push(repo, remote, parsed_refs, p_revs):
+ if hasattr(remote, 'canpush') and not remote.canpush():
+ print "error cannot push"
+
+ if not p_revs:
+ # nothing to push
+ return
+
+ lock = None
+ unbundle = remote.capable('unbundle')
+ if not unbundle:
+ lock = remote.lock()
+ try:
+ ret = push_unsafe(repo, remote, parsed_refs, p_revs)
+ finally:
+ if lock is not None:
+ lock.release()
+
+ return ret
+
+def check_tip(ref, kind, name, heads):
+ try:
+ ename = '%s/%s' % (kind, name)
+ tip = marks.get_tip(ename)
+ except KeyError:
+ return True
+ else:
+ return tip in heads
def do_export(parser):
- global parsed_refs, bmarks, peer
+ p_bmarks = []
+ p_revs = {}
parser.next()
@@ -697,58 +1038,140 @@ def do_export(parser):
else:
die('unhandled export command: %s' % line)
+ need_fetch = False
+
for ref, node in parsed_refs.iteritems():
+ bnode = hgbin(node) if node else None
if ref.startswith('refs/heads/branches'):
- pass
+ branch = ref[len('refs/heads/branches/'):]
+ if branch in branches and bnode in branches[branch]:
+ # up to date
+ continue
+
+ if peer:
+ remotemap = peer.branchmap()
+ if remotemap and branch in remotemap:
+ heads = [hghex(e) for e in remotemap[branch]]
+ if not check_tip(ref, 'branches', branch, heads):
+ print "error %s fetch first" % ref
+ need_fetch = True
+ continue
+
+ p_revs[bnode] = ref
+ print "ok %s" % ref
elif ref.startswith('refs/heads/'):
bmark = ref[len('refs/heads/'):]
- if bmark in bmarks:
- old = bmarks[bmark].hex()
- else:
- old = ''
- if not bookmarks.pushbookmark(parser.repo, bmark, old, node):
+ new = node
+ old = bmarks[bmark].hex() if bmark in bmarks else ''
+
+ if old == new:
continue
+
+ print "ok %s" % ref
+ if bmark != fake_bmark and \
+ not (bmark == 'master' and bmark not in parser.repo._bookmarks):
+ p_bmarks.append((ref, bmark, old, new))
+
+ if peer:
+ remote_old = peer.listkeys('bookmarks').get(bmark)
+ if remote_old:
+ if not check_tip(ref, 'bookmarks', bmark, remote_old):
+ print "error %s fetch first" % ref
+ need_fetch = True
+ continue
+
+ p_revs[bnode] = ref
elif ref.startswith('refs/tags/'):
+ if dry_run:
+ print "ok %s" % ref
+ continue
tag = ref[len('refs/tags/'):]
- parser.repo.tag([tag], node, None, True, None, {})
+ tag = hgref(tag)
+ author, msg = parsed_tags.get(tag, (None, None))
+ if mode == 'git':
+ if not msg:
+ msg = 'Added tag %s for changeset %s' % (tag, node[:12])
+ tagnode, branch = write_tag(parser.repo, tag, node, msg, author)
+ p_revs[tagnode] = 'refs/heads/branches/' + gitref(branch)
+ else:
+ fp = parser.repo.opener('localtags', 'a')
+ fp.write('%s %s\n' % (node, tag))
+ fp.close()
+ p_revs[bnode] = ref
+ print "ok %s" % ref
else:
# transport-helper/fast-export bugs
continue
- print "ok %s" % ref
- print
+ if need_fetch:
+ print
+ return
+
+ if dry_run:
+ if peer and not force_push:
+ checkheads(parser.repo, peer, p_revs)
+ print
+ return
if peer:
- parser.repo.push(peer, force=False)
+ if not push(parser.repo, peer, parsed_refs, p_revs):
+ # do not update bookmarks
+ print
+ return
+
+ # update remote bookmarks
+ remote_bmarks = peer.listkeys('bookmarks')
+ for ref, bmark, old, new in p_bmarks:
+ if force_push:
+ old = remote_bmarks.get(bmark, '')
+ if not peer.pushkey('bookmarks', bmark, old, new):
+ print "error %s" % ref
+ else:
+ # update local bookmarks
+ for ref, bmark, old, new in p_bmarks:
+ if not bookmarks.pushbookmark(parser.repo, bmark, old, new):
+ print "error %s" % ref
+
+ print
+
+def do_option(parser):
+ global dry_run, force_push
+ _, key, value = parser.line.split(' ')
+ if key == 'dry-run':
+ dry_run = (value == 'true')
+ print 'ok'
+ elif key == 'force':
+ force_push = (value == 'true')
+ print 'ok'
+ else:
+ print 'unsupported'
def fix_path(alias, repo, orig_url):
- repo_url = util.url(repo.url())
- url = util.url(orig_url)
- if str(url) == str(repo_url):
+ url = urlparse.urlparse(orig_url, 'file')
+ if url.scheme != 'file' or os.path.isabs(os.path.expanduser(url.path)):
return
- cmd = ['git', 'config', 'remote.%s.url' % alias, "hg::%s" % repo_url]
+ abs_url = urlparse.urljoin("%s/" % os.getcwd(), orig_url)
+ cmd = ['git', 'config', 'remote.%s.url' % alias, "hg::%s" % abs_url]
subprocess.call(cmd)
def main(args):
- global prefix, dirname, branches, bmarks
+ global prefix, gitdir, dirname, branches, bmarks
global marks, blob_marks, parsed_refs
global peer, mode, bad_mail, bad_name
- global track_branches
+ global track_branches, force_push, is_tmp
+ global parsed_tags
+ global filenodes
+ global fake_bmark, hg_version
+ global dry_run
+ global notes, alias
alias = args[1]
url = args[2]
peer = None
- hg_git_compat = False
- track_branches = True
- try:
- if get_config('remote-hg.hg-git-compat') == 'true\n':
- hg_git_compat = True
- track_branches = False
- if get_config('remote-hg.track-branches') == 'false\n':
- track_branches = False
- except subprocess.CalledProcessError:
- pass
+ hg_git_compat = get_config_bool('remote-hg.hg-git-compat')
+ track_branches = get_config_bool('remote-hg.track-branches', True)
+ force_push = False
if hg_git_compat:
mode = 'hg'
@@ -761,7 +1184,7 @@ def main(args):
if alias[4:] == url:
is_tmp = True
- alias = util.sha1(alias).hexdigest()
+ alias = hashlib.sha1(alias).hexdigest()
else:
is_tmp = False
@@ -771,6 +1194,16 @@ def main(args):
bmarks = {}
blob_marks = {}
parsed_refs = {}
+ marks = None
+ parsed_tags = {}
+ filenodes = {}
+ fake_bmark = None
+ try:
+ hg_version = tuple(int(e) for e in util.version().split('.'))
+ except:
+ hg_version = None
+ dry_run = False
+ notes = set()
repo = get_repo(url, alias)
prefix = 'refs/hg/%s' % alias
@@ -778,11 +1211,12 @@ def main(args):
if not is_tmp:
fix_path(alias, peer or repo, url)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
-
marks_path = os.path.join(dirname, 'marks-hg')
- marks = Marks(marks_path)
+ marks = Marks(marks_path, repo)
+
+ if sys.platform == 'win32':
+ import msvcrt
+ msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
parser = Parser(repo)
for line in parser:
@@ -794,13 +1228,19 @@ def main(args):
do_import(parser)
elif parser.check('export'):
do_export(parser)
+ elif parser.check('option'):
+ do_option(parser)
else:
die('unhandled command: %s' % line)
sys.stdout.flush()
+def bye():
+ if not marks:
+ return
if not is_tmp:
marks.store()
else:
shutil.rmtree(dirname)
+atexit.register(bye)
sys.exit(main(sys.argv))
diff --git a/contrib/remote-helpers/test-bzr.sh b/contrib/remote-helpers/test-bzr.sh
index 70aa8a010a..5c50251783 100755
--- a/contrib/remote-helpers/test-bzr.sh
+++ b/contrib/remote-helpers/test-bzr.sh
@@ -7,137 +7,387 @@ test_description='Test remote-bzr'
. ./test-lib.sh
-if ! test_have_prereq PYTHON; then
+if ! test_have_prereq PYTHON
+then
skip_all='skipping remote-bzr tests; python not available'
test_done
fi
-if ! "$PYTHON_PATH" -c 'import bzrlib'; then
+if ! python -c 'import bzrlib'
+then
skip_all='skipping remote-bzr tests; bzr not available'
test_done
fi
-cmd='
-import bzrlib
-bzrlib.initialize()
-import bzrlib.plugin
-bzrlib.plugin.load_plugins()
-import bzrlib.plugins.fastimport
-'
-
-if ! "$PYTHON_PATH" -c "$cmd"; then
- echo "consider setting BZR_PLUGIN_PATH=$HOME/.bazaar/plugins" 1>&2
- skip_all='skipping remote-bzr tests; bzr-fastimport not available'
- test_done
-fi
-
check () {
- (cd $1 &&
- git log --format='%s' -1 &&
- git symbolic-ref HEAD) > actual &&
- (echo $2 &&
- echo "refs/heads/$3") > expected &&
+ echo $3 >expected &&
+ git --git-dir=$1/.git log --format='%s' -1 $2 >actual
test_cmp expected actual
}
bzr whoami "A U Thor <author@example.com>"
test_expect_success 'cloning' '
- (bzr init bzrrepo &&
- cd bzrrepo &&
- echo one > content &&
- bzr add content &&
- bzr commit -m one
- ) &&
-
- git clone "bzr::$PWD/bzrrepo" gitrepo &&
- check gitrepo one master
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+ echo one >content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ git clone "bzr::bzrrepo" gitrepo &&
+ check gitrepo HEAD one
'
test_expect_success 'pulling' '
- (cd bzrrepo &&
- echo two > content &&
- bzr commit -m two
- ) &&
+ (
+ cd bzrrepo &&
+ echo two >content &&
+ bzr commit -m two
+ ) &&
- (cd gitrepo && git pull) &&
+ (cd gitrepo && git pull) &&
- check gitrepo two master
+ check gitrepo HEAD two
'
test_expect_success 'pushing' '
- (cd gitrepo &&
- echo three > content &&
- git commit -a -m three &&
- git push
- ) &&
-
- echo three > expected &&
- cat bzrrepo/content > actual &&
- test_cmp expected actual
+ (
+ cd gitrepo &&
+ echo three >content &&
+ git commit -a -m three &&
+ git push
+ ) &&
+
+ echo three >expected &&
+ cat bzrrepo/content >actual &&
+ test_cmp expected actual
'
test_expect_success 'roundtrip' '
- (cd gitrepo &&
- git pull &&
- git log --format="%s" -1 origin/master > actual) &&
- echo three > expected &&
- test_cmp expected actual &&
+ (
+ cd gitrepo &&
+ git pull &&
+ git log --format="%s" -1 origin/master >actual
+ ) &&
+ echo three >expected &&
+ test_cmp expected actual &&
- (cd gitrepo && git push && git pull) &&
+ (cd gitrepo && git push && git pull) &&
- (cd bzrrepo &&
- echo four > content &&
- bzr commit -m four
- ) &&
+ (
+ cd bzrrepo &&
+ echo four >content &&
+ bzr commit -m four
+ ) &&
- (cd gitrepo && git pull && git push) &&
+ (cd gitrepo && git pull && git push) &&
- check gitrepo four master &&
+ check gitrepo HEAD four &&
- (cd gitrepo &&
- echo five > content &&
- git commit -a -m five &&
- git push && git pull
- ) &&
+ (
+ cd gitrepo &&
+ echo five >content &&
+ git commit -a -m five &&
+ git push && git pull
+ ) &&
- (cd bzrrepo && bzr revert) &&
+ (cd bzrrepo && bzr revert) &&
- echo five > expected &&
- cat bzrrepo/content > actual &&
- test_cmp expected actual
+ echo five >expected &&
+ cat bzrrepo/content >actual &&
+ test_cmp expected actual
'
-cat > expected <<EOF
+cat >expected <<\EOF
100644 blob 54f9d6da5c91d556e6b54340b1327573073030af content
100755 blob 68769579c3eaadbe555379b9c3538e6628bae1eb executable
120000 blob 6b584e8ece562ebffc15d38808cd6b98fc3d97ea link
EOF
test_expect_success 'special modes' '
- (cd bzrrepo &&
- echo exec > executable
- chmod +x executable &&
- bzr add executable
- bzr commit -m exec &&
- ln -s content link
- bzr add link
- bzr commit -m link &&
- mkdir dir &&
- bzr add dir &&
- bzr commit -m dir) &&
-
- (cd gitrepo &&
- git pull
- git ls-tree HEAD > ../actual) &&
-
- test_cmp expected actual &&
-
- (cd gitrepo &&
- git cat-file -p HEAD:link > ../actual) &&
-
- echo -n content > expected &&
- test_cmp expected actual
+ (
+ cd bzrrepo &&
+ echo exec >executable
+ chmod +x executable &&
+ bzr add executable
+ bzr commit -m exec &&
+ ln -s content link
+ bzr add link
+ bzr commit -m link &&
+ mkdir dir &&
+ bzr add dir &&
+ bzr commit -m dir
+ ) &&
+
+ (
+ cd gitrepo &&
+ git pull
+ git ls-tree HEAD >../actual
+ ) &&
+
+ test_cmp expected actual &&
+
+ (
+ cd gitrepo &&
+ git cat-file -p HEAD:link >../actual
+ ) &&
+
+ printf content >expected &&
+ test_cmp expected actual
+'
+
+cat >expected <<\EOF
+100644 blob 54f9d6da5c91d556e6b54340b1327573073030af content
+100755 blob 68769579c3eaadbe555379b9c3538e6628bae1eb executable
+120000 blob 6b584e8ece562ebffc15d38808cd6b98fc3d97ea link
+040000 tree 35c0caa46693cef62247ac89a680f0c5ce32b37b movedir-new
+EOF
+
+test_expect_success 'moving directory' '
+ (
+ cd bzrrepo &&
+ mkdir movedir &&
+ echo one >movedir/one &&
+ echo two >movedir/two &&
+ bzr add movedir &&
+ bzr commit -m movedir &&
+ bzr mv movedir movedir-new &&
+ bzr commit -m movedir-new
+ ) &&
+
+ (
+ cd gitrepo &&
+ git pull &&
+ git ls-tree HEAD >../actual
+ ) &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'different authors' '
+ (
+ cd bzrrepo &&
+ echo john >>content &&
+ bzr commit -m john \
+ --author "Jane Rey <jrey@example.com>" \
+ --author "John Doe <jdoe@example.com>"
+ ) &&
+
+ (
+ cd gitrepo &&
+ git pull &&
+ git show --format="%an <%ae>, %cn <%ce>" --quiet >../actual
+ ) &&
+
+ echo "Jane Rey <jrey@example.com>, A U Thor <author@example.com>" >expected &&
+ test_cmp expected actual
+'
+
+# cleanup previous stuff
+rm -rf bzrrepo gitrepo
+
+test_expect_success 'fetch utf-8 filenames' '
+ test_when_finished "rm -rf bzrrepo gitrepo && LC_ALL=C" &&
+
+ LC_ALL=en_US.UTF-8
+ export LC_ALL
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+
+ echo test >>"ærø" &&
+ bzr add "ærø" &&
+ echo test >>"ø~?" &&
+ bzr add "ø~?" &&
+ bzr commit -m add-utf-8 &&
+ echo test >>"ærø" &&
+ bzr commit -m test-utf-8 &&
+ bzr rm "ø~?" &&
+ bzr mv "ærø" "ø~?" &&
+ bzr commit -m bzr-mv-utf-8
+ ) &&
+
+ (
+ git clone "bzr::bzrrepo" gitrepo &&
+ cd gitrepo &&
+ git -c core.quotepath=false ls-files >../actual
+ ) &&
+ echo "ø~?" >expected &&
+ test_cmp expected actual
+'
+
+test_expect_success 'push utf-8 filenames' '
+ test_when_finished "rm -rf bzrrepo gitrepo && LC_ALL=C" &&
+
+ mkdir -p tmp && cd tmp &&
+
+ LC_ALL=en_US.UTF-8
+ export LC_ALL
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+
+ echo one >>content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ (
+ git clone "bzr::bzrrepo" gitrepo &&
+ cd gitrepo &&
+
+ echo test >>"ærø" &&
+ git add "ærø" &&
+ git commit -m utf-8 &&
+
+ git push
+ ) &&
+
+ (cd bzrrepo && bzr ls >../actual) &&
+ printf "content\nærø\n" >expected &&
+ test_cmp expected actual
+'
+
+test_expect_success 'pushing a merge' '
+ test_when_finished "rm -rf bzrrepo gitrepo" &&
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+ echo one >content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ git clone "bzr::bzrrepo" gitrepo &&
+
+ (
+ cd bzrrepo &&
+ echo two >content &&
+ bzr commit -m two
+ ) &&
+
+ (
+ cd gitrepo &&
+ echo three >content &&
+ git commit -a -m three &&
+ git fetch &&
+ git merge origin/master || true &&
+ echo three >content &&
+ git commit -a --no-edit &&
+ git push
+ ) &&
+
+ echo three >expected &&
+ cat bzrrepo/content >actual &&
+ test_cmp expected actual
+'
+
+cat >expected <<\EOF
+origin/HEAD
+origin/branch
+origin/trunk
+EOF
+
+test_expect_success 'proper bzr repo' '
+ test_when_finished "rm -rf bzrrepo gitrepo" &&
+
+ bzr init-repo bzrrepo &&
+
+ (
+ bzr init bzrrepo/trunk &&
+ cd bzrrepo/trunk &&
+ echo one >>content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ (
+ bzr branch bzrrepo/trunk bzrrepo/branch &&
+ cd bzrrepo/branch &&
+ echo two >>content &&
+ bzr commit -m one
+ ) &&
+
+ (
+ git clone "bzr::bzrrepo" gitrepo &&
+ cd gitrepo &&
+ git for-each-ref --format "%(refname:short)" refs/remotes/origin >../actual
+ ) &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'strip' '
+ test_when_finished "rm -rf bzrrepo gitrepo" &&
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+
+ echo one >>content &&
+ bzr add content &&
+ bzr commit -m one &&
+
+ echo two >>content &&
+ bzr commit -m two
+ ) &&
+
+ git clone "bzr::bzrrepo" gitrepo &&
+
+ (
+ cd bzrrepo &&
+ bzr uncommit --force &&
+
+ echo three >>content &&
+ bzr commit -m three &&
+
+ echo four >>content &&
+ bzr commit -m four &&
+ bzr log --line | sed -e "s/^[0-9][0-9]*: //" >../expected
+ ) &&
+
+ (
+ cd gitrepo &&
+ git fetch &&
+ git log --format="%an %ad %s" --date=short origin/master >../actual
+ ) &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'export utf-8 authors' '
+ test_when_finished "rm -rf bzrrepo gitrepo && LC_ALL=C && unset GIT_COMMITTER_NAME" &&
+
+ LC_ALL=en_US.UTF-8
+ export LC_ALL
+
+ GIT_COMMITTER_NAME="Grégoire"
+ export GIT_COMMITTER_NAME
+
+ bzr init bzrrepo &&
+
+ (
+ git init gitrepo &&
+ cd gitrepo &&
+ echo greg >>content &&
+ git add content &&
+ git commit -m one &&
+ git remote add bzr "bzr::../bzrrepo" &&
+ git push bzr
+ ) &&
+
+ (
+ cd bzrrepo &&
+ bzr log | grep "^committer: " >../actual
+ ) &&
+
+ echo "committer: Grégoire <committer@example.com>" >expected &&
+ test_cmp expected actual
'
test_done
diff --git a/contrib/remote-helpers/test-hg-bidi.sh b/contrib/remote-helpers/test-hg-bidi.sh
index 2a5d85dd72..e24c51daad 100755
--- a/contrib/remote-helpers/test-hg-bidi.sh
+++ b/contrib/remote-helpers/test-hg-bidi.sh
@@ -10,20 +10,21 @@ test_description='Test bidirectionality of remote-hg'
. ./test-lib.sh
-if ! test_have_prereq PYTHON; then
+if ! test_have_prereq PYTHON
+then
skip_all='skipping remote-hg tests; python not available'
test_done
fi
-if ! "$PYTHON_PATH" -c 'import mercurial'; then
+if ! python -c 'import mercurial'
+then
skip_all='skipping remote-hg tests; mercurial not available'
test_done
fi
# clone to a git repo
git_clone () {
- hg -R $1 bookmark -f -r tip master &&
- git clone -q "hg::$PWD/$1" $2
+ git clone -q "hg::$1" $2
}
# clone to an hg repo
@@ -31,7 +32,7 @@ hg_clone () {
(
hg init $2 &&
cd $1 &&
- git push -q "hg::$PWD/../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
+ git push -q "hg::../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
) &&
(cd $2 && hg -q update)
@@ -41,16 +42,15 @@ hg_clone () {
hg_push () {
(
cd $2
- old=$(git symbolic-ref --short HEAD)
git checkout -q -b tmp &&
- git fetch -q "hg::$PWD/../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
- git checkout -q $old &&
- git branch -q -D tmp 2> /dev/null || true
+ git fetch -q "hg::../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
+ git checkout -q @{-1} &&
+ git branch -q -D tmp 2>/dev/null || true
)
}
hg_log () {
- hg -R $1 log --graph --debug | grep -v 'tag: *default/'
+ hg -R $1 log --graph --debug
}
setup () {
@@ -62,8 +62,11 @@ setup () {
echo "commit = -d \"0 0\""
echo "debugrawcommit = -d \"0 0\""
echo "tag = -d \"0 0\""
- ) >> "$HOME"/.hgrc &&
+ echo "[extensions]"
+ echo "graphlog ="
+ ) >>"$HOME"/.hgrc &&
git config --global remote-hg.hg-git-compat true
+ git config --global remote-hg.track-branches true
HGEDITOR=/usr/bin/true
GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230"
@@ -74,29 +77,28 @@ setup () {
setup
test_expect_success 'encoding' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add älphà" &&
GIT_AUTHOR_NAME="tést èncödîng" &&
export GIT_AUTHOR_NAME &&
- echo beta > beta &&
+ echo beta >beta &&
git add beta &&
git commit -m "add beta" &&
- echo gamma > gamma &&
+ echo gamma >gamma &&
git add gamma &&
git commit -m "add gämmâ" &&
: TODO git config i18n.commitencoding latin-1 &&
- echo delta > delta &&
+ echo delta >delta &&
git add delta &&
git commit -m "add déltà"
) &&
@@ -105,27 +107,26 @@ test_expect_success 'encoding' '
git_clone hgrepo gitrepo2 &&
hg_clone gitrepo2 hgrepo2 &&
- HGENCODING=utf-8 hg_log hgrepo > expected &&
- HGENCODING=utf-8 hg_log hgrepo2 > actual &&
+ HGENCODING=utf-8 hg_log hgrepo >expected &&
+ HGENCODING=utf-8 hg_log hgrepo2 >actual &&
test_cmp expected actual
'
test_expect_success 'file removal' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add alpha" &&
- echo beta > beta &&
+ echo beta >beta &&
git add beta &&
git commit -m "add beta"
mkdir foo &&
- echo blah > foo/bar &&
+ echo blah >foo/bar &&
git add foo &&
git commit -m "add foo" &&
git rm alpha &&
@@ -138,26 +139,25 @@ test_expect_success 'file removal' '
git_clone hgrepo gitrepo2 &&
hg_clone gitrepo2 hgrepo2 &&
- hg_log hgrepo > expected &&
- hg_log hgrepo2 > actual &&
+ hg_log hgrepo >expected &&
+ hg_log hgrepo2 >actual &&
test_cmp expected actual
'
test_expect_success 'git tags' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
git config receive.denyCurrentBranch ignore &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add alpha" &&
git tag alpha &&
- echo beta > beta &&
+ echo beta >beta &&
git add beta &&
git commit -m "add beta" &&
git tag -a -m "added tag beta" beta
@@ -167,21 +167,20 @@ test_expect_success 'git tags' '
git_clone hgrepo gitrepo2 &&
hg_clone gitrepo2 hgrepo2 &&
- hg_log hgrepo > expected &&
- hg_log hgrepo2 > actual &&
+ hg_log hgrepo >expected &&
+ hg_log hgrepo2 >actual &&
test_cmp expected actual
'
test_expect_success 'hg branch' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -q -m "add alpha" &&
git checkout -q -b not-master
@@ -191,7 +190,7 @@ test_expect_success 'hg branch' '
hg_clone gitrepo hgrepo &&
cd hgrepo &&
- hg -q co master &&
+ hg -q co default &&
hg mv alpha beta &&
hg -q commit -m "rename alpha to beta" &&
hg branch gamma | grep -v "permanent and global" &&
@@ -201,24 +200,23 @@ test_expect_success 'hg branch' '
hg_push hgrepo gitrepo &&
hg_clone gitrepo hgrepo2 &&
- : TODO, avoid "master" bookmark &&
- (cd hgrepo2 && hg checkout gamma) &&
+ : Back to the common revision &&
+ (cd hgrepo && hg checkout default) &&
- hg_log hgrepo > expected &&
- hg_log hgrepo2 > actual &&
+ hg_log hgrepo >expected &&
+ hg_log hgrepo2 >actual &&
test_cmp expected actual
'
test_expect_success 'hg tags' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add alpha" &&
git checkout -q -b not-master
@@ -228,15 +226,15 @@ test_expect_success 'hg tags' '
hg_clone gitrepo hgrepo &&
cd hgrepo &&
- hg co master &&
+ hg co default &&
hg tag alpha
) &&
hg_push hgrepo gitrepo &&
hg_clone gitrepo hgrepo2 &&
- hg_log hgrepo > expected &&
- hg_log hgrepo2 > actual &&
+ hg_log hgrepo >expected &&
+ hg_log hgrepo2 >actual &&
test_cmp expected actual
'
diff --git a/contrib/remote-helpers/test-hg-hg-git.sh b/contrib/remote-helpers/test-hg-hg-git.sh
index 9aaf043669..6dcd95d10f 100755
--- a/contrib/remote-helpers/test-hg-hg-git.sh
+++ b/contrib/remote-helpers/test-hg-hg-git.sh
@@ -10,33 +10,37 @@ test_description='Test remote-hg output compared to hg-git'
. ./test-lib.sh
-if ! test_have_prereq PYTHON; then
+if ! test_have_prereq PYTHON
+then
skip_all='skipping remote-hg tests; python not available'
test_done
fi
-if ! "$PYTHON_PATH" -c 'import mercurial'; then
+if ! python -c 'import mercurial'
+then
skip_all='skipping remote-hg tests; mercurial not available'
test_done
fi
-if ! "$PYTHON_PATH" -c 'import hggit'; then
+if ! python -c 'import hggit'
+then
skip_all='skipping remote-hg tests; hg-git not available'
test_done
fi
# clone to a git repo with git
git_clone_git () {
- hg -R $1 bookmark -f -r tip master &&
- git clone -q "hg::$PWD/$1" $2
+ git clone -q "hg::$1" $2 &&
+ (cd $2 && git checkout master && git branch -D default)
}
# clone to an hg repo with git
hg_clone_git () {
(
hg init $2 &&
+ hg -R $2 bookmark -i master &&
cd $1 &&
- git push -q "hg::$PWD/../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
+ git push -q "hg::../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
) &&
(cd $2 && hg -q update)
@@ -47,7 +51,7 @@ git_clone_hg () {
(
git init -q $2 &&
cd $1 &&
- hg bookmark -f -r tip master &&
+ hg bookmark -i -f -r tip master &&
hg -q push -r master ../$2 || true
)
}
@@ -61,11 +65,11 @@ hg_clone_hg () {
hg_push_git () {
(
cd $2
- old=$(git symbolic-ref --short HEAD)
git checkout -q -b tmp &&
- git fetch -q "hg::$PWD/../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
- git checkout -q $old &&
- git branch -q -D tmp 2> /dev/null || true
+ git fetch -q "hg::../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
+ git branch -D default &&
+ git checkout -q @{-1} &&
+ git branch -q -D tmp 2>/dev/null || true
)
}
@@ -78,7 +82,8 @@ hg_push_hg () {
}
hg_log () {
- hg -R $1 log --graph --debug | grep -v 'tag: *default/'
+ hg -R $1 log --graph --debug >log &&
+ grep -v 'tag: *default/' log
}
git_log () {
@@ -97,27 +102,29 @@ setup () {
echo "[extensions]"
echo "hgext.bookmarks ="
echo "hggit ="
- ) >> "$HOME"/.hgrc &&
+ echo "graphlog ="
+ ) >>"$HOME"/.hgrc &&
git config --global receive.denycurrentbranch warn
git config --global remote-hg.hg-git-compat true
+ git config --global remote-hg.track-branches false
- HGEDITOR=/usr/bin/true
+ HGEDITOR=true
+ HGMERGE=true
GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230"
GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"
- export HGEDITOR GIT_AUTHOR_DATE GIT_COMMITTER_DATE
+ export HGEDITOR HGMERGE GIT_AUTHOR_DATE GIT_COMMITTER_DATE
}
setup
test_expect_success 'executable bit' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
chmod 0644 alpha &&
git add alpha &&
git commit -m "add alpha" &&
@@ -129,32 +136,31 @@ test_expect_success 'executable bit' '
git commit -m "clear executable bit"
) &&
- for x in hg git; do
+ for x in hg git
+ do
(
hg_clone_$x gitrepo hgrepo-$x &&
cd hgrepo-$x &&
hg_log . &&
hg manifest -r 1 -v &&
hg manifest -v
- ) > output-$x &&
+ ) >"output-$x" &&
git_clone_$x hgrepo-$x gitrepo2-$x &&
- git_log gitrepo2-$x > log-$x
+ git_log gitrepo2-$x >"log-$x"
done &&
- cp -r log-* output-* /tmp/foo/ &&
test_cmp output-hg output-git &&
test_cmp log-hg log-git
'
test_expect_success 'symlink' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add alpha" &&
ln -s alpha beta &&
@@ -162,16 +168,17 @@ test_expect_success 'symlink' '
git commit -m "add beta"
) &&
- for x in hg git; do
+ for x in hg git
+ do
(
hg_clone_$x gitrepo hgrepo-$x &&
cd hgrepo-$x &&
hg_log . &&
hg manifest -v
- ) > output-$x &&
+ ) >"output-$x" &&
git_clone_$x hgrepo-$x gitrepo2-$x &&
- git_log gitrepo2-$x > log-$x
+ git_log gitrepo2-$x >"log-$x"
done &&
test_cmp output-hg output-git &&
@@ -179,34 +186,34 @@ test_expect_success 'symlink' '
'
test_expect_success 'merge conflict 1' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
hg init hgrepo1 &&
cd hgrepo1 &&
- echo A > afile &&
+ echo A >afile &&
hg add afile &&
hg ci -m "origin" &&
- echo B > afile &&
+ echo B >afile &&
hg ci -m "A->B" &&
hg up -r0 &&
- echo C > afile &&
+ echo C >afile &&
hg ci -m "A->C" &&
- hg merge -r1 || true &&
- echo C > afile &&
+ hg merge -r1 &&
+ echo C >afile &&
hg resolve -m afile &&
hg ci -m "merge to C"
) &&
- for x in hg git; do
+ for x in hg git
+ do
git_clone_$x hgrepo1 gitrepo-$x &&
hg_clone_$x gitrepo-$x hgrepo2-$x &&
- hg_log hgrepo2-$x > hg-log-$x &&
- git_log gitrepo-$x > git-log-$x
+ hg_log hgrepo2-$x >"hg-log-$x" &&
+ git_log gitrepo-$x >"git-log-$x"
done &&
test_cmp hg-log-hg hg-log-git &&
@@ -214,34 +221,34 @@ test_expect_success 'merge conflict 1' '
'
test_expect_success 'merge conflict 2' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
hg init hgrepo1 &&
cd hgrepo1 &&
- echo A > afile &&
+ echo A >afile &&
hg add afile &&
hg ci -m "origin" &&
- echo B > afile &&
+ echo B >afile &&
hg ci -m "A->B" &&
hg up -r0 &&
- echo C > afile &&
+ echo C >afile &&
hg ci -m "A->C" &&
hg merge -r1 || true &&
- echo B > afile &&
+ echo B >afile &&
hg resolve -m afile &&
hg ci -m "merge to B"
) &&
- for x in hg git; do
+ for x in hg git
+ do
git_clone_$x hgrepo1 gitrepo-$x &&
hg_clone_$x gitrepo-$x hgrepo2-$x &&
- hg_log hgrepo2-$x > hg-log-$x &&
- git_log gitrepo-$x > git-log-$x
+ hg_log hgrepo2-$x >"hg-log-$x" &&
+ git_log gitrepo-$x >"git-log-$x"
done &&
test_cmp hg-log-hg hg-log-git &&
@@ -249,35 +256,35 @@ test_expect_success 'merge conflict 2' '
'
test_expect_success 'converged merge' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
hg init hgrepo1 &&
cd hgrepo1 &&
- echo A > afile &&
+ echo A >afile &&
hg add afile &&
hg ci -m "origin" &&
- echo B > afile &&
+ echo B >afile &&
hg ci -m "A->B" &&
- echo C > afile &&
+ echo C >afile &&
hg ci -m "B->C" &&
hg up -r0 &&
- echo C > afile &&
+ echo C >afile &&
hg ci -m "A->C" &&
hg merge -r2 || true &&
hg ci -m "merge"
) &&
- for x in hg git; do
+ for x in hg git
+ do
git_clone_$x hgrepo1 gitrepo-$x &&
hg_clone_$x gitrepo-$x hgrepo2-$x &&
- hg_log hgrepo2-$x > hg-log-$x &&
- git_log gitrepo-$x > git-log-$x
+ hg_log hgrepo2-$x >"hg-log-$x" &&
+ git_log gitrepo-$x >"git-log-$x"
done &&
test_cmp hg-log-hg hg-log-git &&
@@ -285,39 +292,39 @@ test_expect_success 'converged merge' '
'
test_expect_success 'encoding' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add älphà" &&
GIT_AUTHOR_NAME="tést èncödîng" &&
export GIT_AUTHOR_NAME &&
- echo beta > beta &&
+ echo beta >beta &&
git add beta &&
git commit -m "add beta" &&
- echo gamma > gamma &&
+ echo gamma >gamma &&
git add gamma &&
git commit -m "add gämmâ" &&
: TODO git config i18n.commitencoding latin-1 &&
- echo delta > delta &&
+ echo delta >delta &&
git add delta &&
git commit -m "add déltà"
) &&
- for x in hg git; do
+ for x in hg git
+ do
hg_clone_$x gitrepo hgrepo-$x &&
git_clone_$x hgrepo-$x gitrepo2-$x &&
- HGENCODING=utf-8 hg_log hgrepo-$x > hg-log-$x &&
- git_log gitrepo2-$x > git-log-$x
+ HGENCODING=utf-8 hg_log hgrepo-$x >"hg-log-$x" &&
+ git_log gitrepo2-$x >"git-log-$x"
done &&
test_cmp hg-log-hg hg-log-git &&
@@ -325,20 +332,19 @@ test_expect_success 'encoding' '
'
test_expect_success 'file removal' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add alpha" &&
- echo beta > beta &&
+ echo beta >beta &&
git add beta &&
git commit -m "add beta"
mkdir foo &&
- echo blah > foo/bar &&
+ echo blah >foo/bar &&
git add foo &&
git commit -m "add foo" &&
git rm alpha &&
@@ -347,17 +353,18 @@ test_expect_success 'file removal' '
git commit -m "remove foo/bar"
) &&
- for x in hg git; do
+ for x in hg git
+ do
(
hg_clone_$x gitrepo hgrepo-$x &&
cd hgrepo-$x &&
hg_log . &&
hg manifest -r 3 &&
hg manifest
- ) > output-$x &&
+ ) >"output-$x" &&
git_clone_$x hgrepo-$x gitrepo2-$x &&
- git_log gitrepo2-$x > log-$x
+ git_log gitrepo2-$x >"log-$x"
done &&
test_cmp output-hg output-git &&
@@ -365,42 +372,42 @@ test_expect_success 'file removal' '
'
test_expect_success 'git tags' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
cd gitrepo &&
git config receive.denyCurrentBranch ignore &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add alpha" &&
git tag alpha &&
- echo beta > beta &&
+ echo beta >beta &&
git add beta &&
git commit -m "add beta" &&
git tag -a -m "added tag beta" beta
) &&
- for x in hg git; do
+ for x in hg git
+ do
hg_clone_$x gitrepo hgrepo-$x &&
- hg_log hgrepo-$x > log-$x
+ hg_log hgrepo-$x >"log-$x"
done &&
test_cmp log-hg log-git
'
test_expect_success 'hg author' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
- for x in hg git; do
+ for x in hg git
+ do
(
git init -q gitrepo-$x &&
cd gitrepo-$x &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add alpha" &&
git checkout -q -b not-master
@@ -411,38 +418,38 @@ test_expect_success 'hg author' '
cd hgrepo-$x &&
hg co master &&
- echo beta > beta &&
+ echo beta >beta &&
hg add beta &&
hg commit -u "test" -m "add beta" &&
- echo gamma >> beta &&
+ echo gamma >>beta &&
hg commit -u "test <test@example.com> (comment)" -m "modify beta" &&
- echo gamma > gamma &&
+ echo gamma >gamma &&
hg add gamma &&
hg commit -u "<test@example.com>" -m "add gamma" &&
- echo delta > delta &&
+ echo delta >delta &&
hg add delta &&
hg commit -u "name<test@example.com>" -m "add delta" &&
- echo epsilon > epsilon &&
+ echo epsilon >epsilon &&
hg add epsilon &&
hg commit -u "name <test@example.com" -m "add epsilon" &&
- echo zeta > zeta &&
+ echo zeta >zeta &&
hg add zeta &&
hg commit -u " test " -m "add zeta" &&
- echo eta > eta &&
+ echo eta >eta &&
hg add eta &&
hg commit -u "test < test@example.com >" -m "add eta" &&
- echo theta > theta &&
+ echo theta >theta &&
hg add theta &&
hg commit -u "test >test@example.com>" -m "add theta" &&
- echo iota > iota &&
+ echo iota >iota &&
hg add iota &&
hg commit -u "test <test <at> example <dot> com>" -m "add iota"
) &&
@@ -450,26 +457,24 @@ test_expect_success 'hg author' '
hg_push_$x hgrepo-$x gitrepo-$x &&
hg_clone_$x gitrepo-$x hgrepo2-$x &&
- hg_log hgrepo2-$x > hg-log-$x &&
- git_log gitrepo-$x > git-log-$x
+ hg_log hgrepo2-$x >"hg-log-$x" &&
+ git_log gitrepo-$x >"git-log-$x"
done &&
- test_cmp git-log-hg git-log-git &&
-
test_cmp hg-log-hg hg-log-git &&
test_cmp git-log-hg git-log-git
'
test_expect_success 'hg branch' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
- for x in hg git; do
+ for x in hg git
+ do
(
git init -q gitrepo-$x &&
cd gitrepo-$x &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -q -m "add alpha" &&
git checkout -q -b not-master
@@ -489,8 +494,8 @@ test_expect_success 'hg branch' '
hg_push_$x hgrepo-$x gitrepo-$x &&
hg_clone_$x gitrepo-$x hgrepo2-$x &&
- hg_log hgrepo2-$x > hg-log-$x &&
- git_log gitrepo-$x > git-log-$x
+ hg_log hgrepo2-$x >"hg-log-$x" &&
+ git_log gitrepo-$x >"git-log-$x"
done &&
test_cmp hg-log-hg hg-log-git &&
@@ -498,15 +503,15 @@ test_expect_success 'hg branch' '
'
test_expect_success 'hg tags' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
- for x in hg git; do
+ for x in hg git
+ do
(
git init -q gitrepo-$x &&
cd gitrepo-$x &&
- echo alpha > alpha &&
+ echo alpha >alpha &&
git add alpha &&
git commit -m "add alpha" &&
git checkout -q -b not-master
@@ -527,7 +532,7 @@ test_expect_success 'hg tags' '
git --git-dir=gitrepo-$x/.git tag -l &&
hg_log hgrepo2-$x &&
cat hgrepo2-$x/.hgtags
- ) > output-$x
+ ) >"output-$x"
done &&
test_cmp output-hg output-git
diff --git a/contrib/remote-helpers/test-hg.sh b/contrib/remote-helpers/test-hg.sh
index 7bb81f2f8e..72f745d63f 100755
--- a/contrib/remote-helpers/test-hg.sh
+++ b/contrib/remote-helpers/test-hg.sh
@@ -10,112 +10,685 @@ test_description='Test remote-hg'
. ./test-lib.sh
-if ! test_have_prereq PYTHON; then
+if ! test_have_prereq PYTHON
+then
skip_all='skipping remote-hg tests; python not available'
test_done
fi
-if ! "$PYTHON_PATH" -c 'import mercurial'; then
+if ! python -c 'import mercurial'
+then
skip_all='skipping remote-hg tests; mercurial not available'
test_done
fi
check () {
- (cd $1 &&
- git log --format='%s' -1 &&
- git symbolic-ref HEAD) > actual &&
- (echo $2 &&
- echo "refs/heads/$3") > expected &&
+ echo $3 >expected &&
+ git --git-dir=$1/.git log --format='%s' -1 $2 >actual
test_cmp expected actual
}
+check_branch () {
+ if test -n "$3"
+ then
+ echo $3 >expected &&
+ hg -R $1 log -r $2 --template '{desc}\n' >actual &&
+ test_cmp expected actual
+ else
+ hg -R $1 branches >out &&
+ ! grep $2 out
+ fi
+}
+
+check_bookmark () {
+ if test -n "$3"
+ then
+ echo $3 >expected &&
+ hg -R $1 log -r "bookmark('$2')" --template '{desc}\n' >actual &&
+ test_cmp expected actual
+ else
+ hg -R $1 bookmarks >out &&
+ ! grep $2 out
+ fi
+}
+
+check_push () {
+ local expected_ret=$1 ret=0 ref_ret=0 IFS=':'
+
+ shift
+ git push origin "$@" 2>error
+ ret=$?
+ cat error
+
+ while read branch kind
+ do
+ case "$kind" in
+ 'new')
+ grep "^ \* \[new branch\] *${branch} -> ${branch}$" error || ref_ret=1
+ ;;
+ 'non-fast-forward')
+ grep "^ ! \[rejected\] *${branch} -> ${branch} (non-fast-forward)$" error || ref_ret=1
+ ;;
+ 'fetch-first')
+ grep "^ ! \[rejected\] *${branch} -> ${branch} (fetch first)$" error || ref_ret=1
+ ;;
+ 'forced-update')
+ grep "^ + [a-f0-9]*\.\.\.[a-f0-9]* *${branch} -> ${branch} (forced update)$" error || ref_ret=1
+ ;;
+ '')
+ grep "^ [a-f0-9]*\.\.[a-f0-9]* *${branch} -> ${branch}$" error || ref_ret=1
+ ;;
+ esac
+ test $ref_ret -ne 0 && echo "match for '$branch' failed" && break
+ done
+
+ if test $expected_ret -ne $ret -o $ref_ret -ne 0
+ then
+ return 1
+ fi
+
+ return 0
+}
+
setup () {
(
echo "[ui]"
echo "username = H G Wells <wells@example.com>"
- ) >> "$HOME"/.hgrc
+ echo "[extensions]"
+ echo "mq ="
+ ) >>"$HOME"/.hgrc &&
+
+ GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230" &&
+ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE" &&
+ export GIT_COMMITTER_DATE GIT_AUTHOR_DATE
}
setup
test_expect_success 'cloning' '
- test_when_finished "rm -rf gitrepo*" &&
+ test_when_finished "rm -rf gitrepo*" &&
- (
- hg init hgrepo &&
- cd hgrepo &&
- echo zero > content &&
- hg add content &&
- hg commit -m zero
- ) &&
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero >content &&
+ hg add content &&
+ hg commit -m zero
+ ) &&
- git clone "hg::$PWD/hgrepo" gitrepo &&
- check gitrepo zero master
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo HEAD zero
'
test_expect_success 'cloning with branches' '
- test_when_finished "rm -rf gitrepo*" &&
+ test_when_finished "rm -rf gitrepo*" &&
- (
- cd hgrepo &&
- hg branch next &&
- echo next > content &&
- hg commit -m next
- ) &&
+ (
+ cd hgrepo &&
+ hg branch next &&
+ echo next >content &&
+ hg commit -m next
+ ) &&
- git clone "hg::$PWD/hgrepo" gitrepo &&
- check gitrepo next next &&
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo origin/branches/next next
+'
+
+test_expect_success 'cloning with bookmarks' '
+ test_when_finished "rm -rf gitrepo*" &&
- (cd hgrepo && hg checkout default) &&
+ (
+ cd hgrepo &&
+ hg checkout default &&
+ hg bookmark feature-a &&
+ echo feature-a >content &&
+ hg commit -m feature-a
+ ) &&
- git clone "hg::$PWD/hgrepo" gitrepo2 &&
- check gitrepo2 zero master
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo origin/feature-a feature-a
'
-test_expect_success 'cloning with bookmarks' '
- test_when_finished "rm -rf gitrepo*" &&
+test_expect_success 'update bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
- (
- cd hgrepo &&
- hg bookmark feature-a &&
- echo feature-a > content &&
- hg commit -m feature-a
- ) &&
+ (
+ cd hgrepo &&
+ hg bookmark devel
+ ) &&
- git clone "hg::$PWD/hgrepo" gitrepo &&
- check gitrepo feature-a feature-a
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet devel &&
+ echo devel >content &&
+ git commit -a -m devel &&
+ git push --quiet
+ ) &&
+
+ check_bookmark hgrepo devel devel
'
-test_expect_success 'cloning with detached head' '
- test_when_finished "rm -rf gitrepo*" &&
+test_expect_success 'new bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
- (
- cd hgrepo &&
- hg update -r 0
- ) &&
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet -b feature-b &&
+ echo feature-b >content &&
+ git commit -a -m feature-b &&
+ git push --quiet origin feature-b
+ ) &&
- git clone "hg::$PWD/hgrepo" gitrepo &&
- check gitrepo zero master
+ check_bookmark hgrepo feature-b feature-b
'
-test_expect_success 'update bookmark' '
- test_when_finished "rm -rf gitrepo*" &&
-
- (
- cd hgrepo &&
- hg bookmark devel
- ) &&
-
- (
- git clone "hg::$PWD/hgrepo" gitrepo &&
- cd gitrepo &&
- git checkout devel &&
- echo devel > content &&
- git commit -a -m devel &&
- git push
- ) &&
-
- hg -R hgrepo bookmarks | egrep "devel[ ]+3:"
+# cleanup previous stuff
+rm -rf hgrepo
+
+author_test () {
+ echo $1 >>content &&
+ hg commit -u "$2" -m "add $1" &&
+ echo "$3" >>../expected
+}
+
+test_expect_success 'authors' '
+ test_when_finished "rm -rf hgrepo gitrepo" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+
+ touch content &&
+ hg add content &&
+
+ >../expected &&
+ author_test alpha "" "H G Wells <wells@example.com>" &&
+ author_test beta "test" "test <unknown>" &&
+ author_test beta "test <test@example.com> (comment)" "test <test@example.com>" &&
+ author_test gamma "<test@example.com>" "Unknown <test@example.com>" &&
+ author_test delta "name<test@example.com>" "name <test@example.com>" &&
+ author_test epsilon "name <test@example.com" "name <test@example.com>" &&
+ author_test zeta " test " "test <unknown>" &&
+ author_test eta "test < test@example.com >" "test <test@example.com>" &&
+ author_test theta "test >test@example.com>" "test <test@example.com>" &&
+ author_test iota "test < test <at> example <dot> com>" "test <unknown>" &&
+ author_test kappa "test@example.com" "Unknown <test@example.com>"
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ git --git-dir=gitrepo/.git log --reverse --format="%an <%ae>" >actual &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'strip' '
+ test_when_finished "rm -rf hgrepo gitrepo" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+
+ echo one >>content &&
+ hg add content &&
+ hg commit -m one &&
+
+ echo two >>content &&
+ hg commit -m two
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ hg strip 1 &&
+
+ echo three >>content &&
+ hg commit -m three &&
+
+ echo four >>content &&
+ hg commit -m four
+ ) &&
+
+ (
+ cd gitrepo &&
+ git fetch &&
+ git log --format="%s" origin/master >../actual
+ ) &&
+
+ hg -R hgrepo log --template "{desc}\n" >expected &&
+ test_cmp actual expected
+'
+
+test_expect_success 'remote push with master bookmark' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero >content &&
+ hg add content &&
+ hg commit -m zero &&
+ hg bookmark master &&
+ echo one >content &&
+ hg commit -m one
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ echo two >content &&
+ git commit -a -m two &&
+ git push
+ ) &&
+
+ check_branch hgrepo default two
+'
+
+cat >expected <<\EOF
+changeset: 0:6e2126489d3d
+tag: tip
+user: A U Thor <author@example.com>
+date: Mon Jan 01 00:00:00 2007 +0230
+summary: one
+
+EOF
+
+test_expect_success 'remote push from master branch' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ hg init hgrepo &&
+
+ (
+ git init gitrepo &&
+ cd gitrepo &&
+ git remote add origin "hg::../hgrepo" &&
+ echo one >content &&
+ git add content &&
+ git commit -a -m one &&
+ git push origin master
+ ) &&
+
+ hg -R hgrepo log >actual &&
+ cat actual &&
+ test_cmp expected actual &&
+
+ check_branch hgrepo default one
+'
+
+GIT_REMOTE_HG_TEST_REMOTE=1
+export GIT_REMOTE_HG_TEST_REMOTE
+
+test_expect_success 'remote cloning' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero >content &&
+ hg add content &&
+ hg commit -m zero
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo HEAD zero
+'
+
+test_expect_success 'remote update bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg bookmark devel
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet devel &&
+ echo devel >content &&
+ git commit -a -m devel &&
+ git push --quiet
+ ) &&
+
+ check_bookmark hgrepo devel devel
+'
+
+test_expect_success 'remote new bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet -b feature-b &&
+ echo feature-b >content &&
+ git commit -a -m feature-b &&
+ git push --quiet origin feature-b
+ ) &&
+
+ check_bookmark hgrepo feature-b feature-b
+'
+
+test_expect_success 'remote push diverged' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ hg checkout default &&
+ echo bump >content &&
+ hg commit -m bump
+ ) &&
+
+ (
+ cd gitrepo &&
+ echo diverge >content &&
+ git commit -a -m diverged &&
+ check_push 1 <<-\EOF
+ master:non-fast-forward
+ EOF
+ ) &&
+
+ check_branch hgrepo default bump
+'
+
+test_expect_success 'remote update bookmark diverge' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg checkout tip^ &&
+ hg bookmark diverge
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ echo "bump bookmark" >content &&
+ hg commit -m "bump bookmark"
+ ) &&
+
+ (
+ cd gitrepo &&
+ git checkout --quiet diverge &&
+ echo diverge >content &&
+ git commit -a -m diverge &&
+ check_push 1 <<-\EOF
+ diverge:fetch-first
+ EOF
+ ) &&
+
+ check_bookmark hgrepo diverge "bump bookmark"
+'
+
+test_expect_success 'remote new bookmark multiple branch head' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet -b feature-c HEAD^ &&
+ echo feature-c >content &&
+ git commit -a -m feature-c &&
+ git push --quiet origin feature-c
+ ) &&
+
+ check_bookmark hgrepo feature-c feature-c
+'
+
+# cleanup previous stuff
+rm -rf hgrepo
+
+setup_big_push () {
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero >content &&
+ hg add content &&
+ hg commit -m zero &&
+ hg bookmark bad_bmark1 &&
+ echo one >content &&
+ hg commit -m one &&
+ hg bookmark bad_bmark2 &&
+ hg bookmark good_bmark &&
+ hg bookmark -i good_bmark &&
+ hg -q branch good_branch &&
+ echo "good branch" >content &&
+ hg commit -m "good branch" &&
+ hg -q branch bad_branch &&
+ echo "bad branch" >content &&
+ hg commit -m "bad branch"
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd gitrepo &&
+ echo two >content &&
+ git commit -q -a -m two &&
+
+ git checkout -q good_bmark &&
+ echo three >content &&
+ git commit -q -a -m three &&
+
+ git checkout -q bad_bmark1 &&
+ git reset --hard HEAD^ &&
+ echo four >content &&
+ git commit -q -a -m four &&
+
+ git checkout -q bad_bmark2 &&
+ git reset --hard HEAD^ &&
+ echo five >content &&
+ git commit -q -a -m five &&
+
+ git checkout -q -b new_bmark master &&
+ echo six >content &&
+ git commit -q -a -m six &&
+
+ git checkout -q branches/good_branch &&
+ echo seven >content &&
+ git commit -q -a -m seven &&
+ echo eight >content &&
+ git commit -q -a -m eight &&
+
+ git checkout -q branches/bad_branch &&
+ git reset --hard HEAD^ &&
+ echo nine >content &&
+ git commit -q -a -m nine &&
+
+ git checkout -q -b branches/new_branch master &&
+ echo ten >content &&
+ git commit -q -a -m ten
+ )
+}
+
+test_expect_success 'remote big push' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ setup_big_push
+
+ (
+ cd gitrepo &&
+
+ check_push 1 --all <<-\EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ bad_bmark1:non-fast-forward
+ bad_bmark2:non-fast-forward
+ branches/bad_branch:non-fast-forward
+ EOF
+ ) &&
+
+ check_branch hgrepo default one &&
+ check_branch hgrepo good_branch "good branch" &&
+ check_branch hgrepo bad_branch "bad branch" &&
+ check_branch hgrepo new_branch '' &&
+ check_bookmark hgrepo good_bmark one &&
+ check_bookmark hgrepo bad_bmark1 one &&
+ check_bookmark hgrepo bad_bmark2 one &&
+ check_bookmark hgrepo new_bmark ''
+'
+
+test_expect_success 'remote big push fetch first' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero >content &&
+ hg add content &&
+ hg commit -m zero &&
+ hg bookmark bad_bmark &&
+ hg bookmark good_bmark &&
+ hg bookmark -i good_bmark &&
+ hg -q branch good_branch &&
+ echo "good branch" >content &&
+ hg commit -m "good branch" &&
+ hg -q branch bad_branch &&
+ echo "bad branch" >content &&
+ hg commit -m "bad branch"
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ hg bookmark -f bad_bmark &&
+ echo update_bmark >content &&
+ hg commit -m "update bmark"
+ ) &&
+
+ (
+ cd gitrepo &&
+ echo two >content &&
+ git commit -q -a -m two &&
+
+ git checkout -q good_bmark &&
+ echo three >content &&
+ git commit -q -a -m three &&
+
+ git checkout -q bad_bmark &&
+ echo four >content &&
+ git commit -q -a -m four &&
+
+ git checkout -q branches/bad_branch &&
+ echo five >content &&
+ git commit -q -a -m five &&
+
+ check_push 1 --all <<-\EOF &&
+ master
+ good_bmark
+ bad_bmark:fetch-first
+ branches/bad_branch:festch-first
+ EOF
+
+ git fetch &&
+
+ check_push 1 --all <<-\EOF
+ master
+ good_bmark
+ bad_bmark:non-fast-forward
+ branches/bad_branch:non-fast-forward
+ EOF
+ )
+'
+
+test_expect_failure 'remote big push force' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ setup_big_push
+
+ (
+ cd gitrepo &&
+
+ check_push 0 --force --all <<-\EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ bad_bmark1:forced-update
+ bad_bmark2:forced-update
+ branches/bad_branch:forced-update
+ EOF
+ ) &&
+
+ check_branch hgrepo default six &&
+ check_branch hgrepo good_branch eight &&
+ check_branch hgrepo bad_branch nine &&
+ check_branch hgrepo new_branch ten &&
+ check_bookmark hgrepo good_bmark three &&
+ check_bookmark hgrepo bad_bmark1 four &&
+ check_bookmark hgrepo bad_bmark2 five &&
+ check_bookmark hgrepo new_bmark six
+'
+
+test_expect_failure 'remote big push dry-run' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ setup_big_push
+
+ (
+ cd gitrepo &&
+
+ check_push 1 --dry-run --all <<-\EOF &&
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ bad_bmark1:non-fast-forward
+ bad_bmark2:non-fast-forward
+ branches/bad_branch:non-fast-forward
+ EOF
+
+ check_push 0 --dry-run master good_bmark new_bmark branches/good_branch branches/new_branch <<-\EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ EOF
+ ) &&
+
+ check_branch hgrepo default one &&
+ check_branch hgrepo good_branch "good branch" &&
+ check_branch hgrepo bad_branch "bad branch" &&
+ check_branch hgrepo new_branch '' &&
+ check_bookmark hgrepo good_bmark one &&
+ check_bookmark hgrepo bad_bmark1 one &&
+ check_bookmark hgrepo bad_bmark2 one &&
+ check_bookmark hgrepo new_bmark ''
+'
+
+test_expect_success 'remote double failed push' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero >content &&
+ hg add content &&
+ hg commit -m zero &&
+ echo one >content &&
+ hg commit -m one
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git reset --hard HEAD^ &&
+ echo two >content &&
+ git commit -a -m two &&
+ test_expect_code 1 git push &&
+ test_expect_code 1 git push
+ )
'
test_done
diff --git a/contrib/subtree/Makefile b/contrib/subtree/Makefile
index b50750565f..4030a16898 100644
--- a/contrib/subtree/Makefile
+++ b/contrib/subtree/Makefile
@@ -21,15 +21,17 @@ GIT_SUBTREE := git-subtree
GIT_SUBTREE_DOC := git-subtree.1
GIT_SUBTREE_XML := git-subtree.xml
GIT_SUBTREE_TXT := git-subtree.txt
+GIT_SUBTREE_HTML := git-subtree.html
all: $(GIT_SUBTREE)
$(GIT_SUBTREE): $(GIT_SUBTREE_SH)
cp $< $@ && chmod +x $@
-doc: $(GIT_SUBTREE_DOC)
+doc: $(GIT_SUBTREE_DOC) $(GIT_SUBTREE_HTML)
install: $(GIT_SUBTREE)
+ $(INSTALL) -d -m 755 $(DESTDIR)$(libexecdir)
$(INSTALL) -m 755 $(GIT_SUBTREE) $(DESTDIR)$(libexecdir)
install-doc: install-man
@@ -45,6 +47,10 @@ $(GIT_SUBTREE_XML): $(GIT_SUBTREE_TXT)
asciidoc -b docbook -d manpage -f $(ASCIIDOC_CONF) \
-agit_version=$(gitver) $^
+$(GIT_SUBTREE_HTML): $(GIT_SUBTREE_TXT)
+ asciidoc -b xhtml11 -d manpage -f $(ASCIIDOC_CONF) \
+ -agit_version=$(gitver) $^
+
test:
$(MAKE) -C t/ test
diff --git a/contrib/subtree/git-subtree.sh b/contrib/subtree/git-subtree.sh
index 8a23f58ba0..7d7af03274 100755
--- a/contrib/subtree/git-subtree.sh
+++ b/contrib/subtree/git-subtree.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
#
# git-subtree.sh: split/join git repositories in subdirectories of this one
#
@@ -311,7 +311,7 @@ copy_commit()
GIT_COMMITTER_NAME \
GIT_COMMITTER_EMAIL \
GIT_COMMITTER_DATE
- (echo -n "$annotate"; cat ) |
+ (printf "%s" "$annotate"; cat ) |
git commit-tree "$2" $3 # reads the rest of stdin
) || die "Can't copy commit $1"
}
@@ -715,7 +715,8 @@ cmd_push()
repository=$1
refspec=$2
echo "git push using: " $repository $refspec
- git push $repository $(git subtree split --prefix=$prefix):refs/heads/$refspec
+ localrev=$(git subtree split --prefix="$prefix") || die
+ git push $repository $localrev:refs/heads/$refspec
else
die "'$dir' must already exist. Try 'git subtree add'."
fi
diff --git a/contrib/subtree/git-subtree.txt b/contrib/subtree/git-subtree.txt
index 7ba853eeda..e0957eee55 100644
--- a/contrib/subtree/git-subtree.txt
+++ b/contrib/subtree/git-subtree.txt
@@ -270,7 +270,7 @@ git-extensions repository in ~/git-extensions/:
name
You can omit the --squash flag, but doing so will increase the number
-of commits that are incldued in your local repository.
+of commits that are included in your local repository.
We now have a ~/git-extensions/git-subtree directory containing code
from the master branch of git://github.com/apenwarr/git-subtree.git
diff --git a/contrib/subtree/t/t7900-subtree.sh b/contrib/subtree/t/t7900-subtree.sh
index b0f8536fca..66ce4b07c2 100755
--- a/contrib/subtree/t/t7900-subtree.sh
+++ b/contrib/subtree/t/t7900-subtree.sh
@@ -182,9 +182,9 @@ test_expect_success 'merge new subproj history into subdir' '
test_expect_success 'Check that prefix argument is required for split' '
echo "You must provide the --prefix option." > expected &&
test_must_fail git subtree split > actual 2>&1 &&
- test_debug "echo -n expected: " &&
+ test_debug "printf '"'"'expected: '"'"'" &&
test_debug "cat expected" &&
- test_debug "echo -n actual: " &&
+ test_debug "printf '"'"'actual: '"'"'" &&
test_debug "cat actual" &&
test_cmp expected actual &&
rm -f expected actual
@@ -193,9 +193,9 @@ test_expect_success 'Check that prefix argument is required for split' '
test_expect_success 'Check that the <prefix> exists for a split' '
echo "'"'"'non-existent-directory'"'"'" does not exist\; use "'"'"'git subtree add'"'"'" > expected &&
test_must_fail git subtree split --prefix=non-existent-directory > actual 2>&1 &&
- test_debug "echo -n expected: " &&
+ test_debug "printf '"'"'expected: '"'"'" &&
test_debug "cat expected" &&
- test_debug "echo -n actual: " &&
+ test_debug "printf '"'"'actual: '"'"'" &&
test_debug "cat actual" &&
test_cmp expected actual
# rm -f expected actual