summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki823
-rw-r--r--contrib/mw-to-git/git-remote-mediawiki.txt7
2 files changed, 830 insertions, 0 deletions
diff --git a/contrib/mw-to-git/git-remote-mediawiki b/contrib/mw-to-git/git-remote-mediawiki
new file mode 100755
index 0000000000..0b32d18eaa
--- /dev/null
+++ b/contrib/mw-to-git/git-remote-mediawiki
@@ -0,0 +1,823 @@
+#! /usr/bin/perl
+
+# Copyright (C) 2011
+# Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
+# Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
+# Claire Fousse <claire.fousse@ensimag.imag.fr>
+# David Amouyal <david.amouyal@ensimag.imag.fr>
+# Matthieu Moy <matthieu.moy@grenoble-inp.fr>
+# License: GPL v2 or later
+
+# Gateway between Git and MediaWiki.
+# https://github.com/Bibzball/Git-Mediawiki/wiki
+#
+# Known limitations:
+#
+# - Only wiki pages are managed, no support for [[File:...]]
+# attachments.
+#
+# - Poor performance in the best case: it takes forever to check
+# whether we're up-to-date (on fetch or push) or to fetch a few
+# revisions from a large wiki, because we use exclusively a
+# page-based synchronization. We could switch to a wiki-wide
+# synchronization when the synchronization involves few revisions
+# but the wiki is large.
+#
+# - Git renames could be turned into MediaWiki renames (see TODO
+# below)
+#
+# - login/password support requires the user to write the password
+# cleartext in a file (see TODO below).
+#
+# - No way to import "one page, and all pages included in it"
+#
+# - Multiple remote MediaWikis have not been very well tested.
+
+use strict;
+use MediaWiki::API;
+use DateTime::Format::ISO8601;
+use encoding 'utf8';
+
+# use encoding 'utf8' doesn't change STDERROR
+# but we're going to output UTF-8 filenames to STDERR
+binmode STDERR, ":utf8";
+
+use URI::Escape;
+use warnings;
+
+# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
+use constant SLASH_REPLACEMENT => "%2F";
+
+# It's not always possible to delete pages (may require some
+# priviledges). Deleted pages are replaced with this content.
+use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
+
+# It's not possible to create empty pages. New empty files in Git are
+# sent with this content instead.
+use constant EMPTY_CONTENT => "<!-- empty page -->\n";
+
+# used to reflect file creation or deletion in diff.
+use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+
+my $remotename = $ARGV[0];
+my $url = $ARGV[1];
+
+# Accept both space-separated and multiple keys in config file.
+# Spaces should be written as _ anyway because we'll use chomp.
+my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
+chomp(@tracked_pages);
+
+# Just like @tracked_pages, but for MediaWiki categories.
+my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
+chomp(@tracked_categories);
+
+my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
+# TODO: ideally, this should be able to read from keyboard, but we're
+# inside a remote helper, so our stdin is connect to git, not to a
+# terminal.
+my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
+my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
+chomp($wiki_login);
+chomp($wiki_passwd);
+chomp($wiki_domain);
+
+# Import only last revisions (both for clone and fetch)
+my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
+chomp($shallow_import);
+$shallow_import = ($shallow_import eq "true");
+
+# Dumb push: don't update notes and mediawiki ref to reflect the last push.
+#
+# Configurable with mediawiki.dumbPush, or per-remote with
+# remote.<remotename>.dumbPush.
+#
+# This means the user will have to re-import the just-pushed
+# revisions. On the other hand, this means that the Git revisions
+# corresponding to MediaWiki revisions are all imported from the wiki,
+# regardless of whether they were initially created in Git or from the
+# web interface, hence all users will get the same history (i.e. if
+# the push from Git to MediaWiki loses some information, everybody
+# will get the history with information lost). If the import is
+# deterministic, this means everybody gets the same sha1 for each
+# MediaWiki revision.
+my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
+unless ($dumb_push) {
+ $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
+}
+chomp($dumb_push);
+$dumb_push = ($dumb_push eq "true");
+
+my $wiki_name = $url;
+$wiki_name =~ s/[^\/]*:\/\///;
+
+# Commands parser
+my $entry;
+my @cmd;
+while (<STDIN>) {
+ chomp;
+ @cmd = split(/ /);
+ if (defined($cmd[0])) {
+ # Line not blank
+ if ($cmd[0] eq "capabilities") {
+ die("Too many arguments for capabilities") unless (!defined($cmd[1]));
+ mw_capabilities();
+ } elsif ($cmd[0] eq "list") {
+ die("Too many arguments for list") unless (!defined($cmd[2]));
+ mw_list($cmd[1]);
+ } elsif ($cmd[0] eq "import") {
+ die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
+ mw_import($cmd[1]);
+ } elsif ($cmd[0] eq "option") {
+ die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
+ mw_option($cmd[1],$cmd[2]);
+ } elsif ($cmd[0] eq "push") {
+ mw_push($cmd[1]);
+ } else {
+ print STDERR "Unknown command. Aborting...\n";
+ last;
+ }
+ } else {
+ # blank line: we should terminate
+ last;
+ }
+
+ BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
+ # command is fully processed.
+}
+
+########################## Functions ##############################
+
+# MediaWiki API instance, created lazily.
+my $mediawiki;
+
+sub mw_connect_maybe {
+ if ($mediawiki) {
+ return;
+ }
+ $mediawiki = MediaWiki::API->new;
+ $mediawiki->{config}->{api_url} = "$url/api.php";
+ if ($wiki_login) {
+ if (!$mediawiki->login({
+ lgname => $wiki_login,
+ lgpassword => $wiki_passwd,
+ lgdomain => $wiki_domain,
+ })) {
+ print STDERR "Failed to log in mediawiki user \"$wiki_login\" on $url\n";
+ print STDERR "(error " .
+ $mediawiki->{error}->{code} . ': ' .
+ $mediawiki->{error}->{details} . ")\n";
+ exit 1;
+ } else {
+ print STDERR "Logged in with user \"$wiki_login\".\n";
+ }
+ }
+}
+
+sub get_mw_first_pages {
+ my $some_pages = shift;
+ my @some_pages = @{$some_pages};
+
+ my $pages = shift;
+
+ # pattern 'page1|page2|...' required by the API
+ my $titles = join('|', @some_pages);
+
+ my $mw_pages = $mediawiki->api({
+ action => 'query',
+ titles => $titles,
+ });
+ if (!defined($mw_pages)) {
+ print STDERR "fatal: could not query the list of wiki pages.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
+ if ($id < 0) {
+ print STDERR "Warning: page $page->{title} not found on wiki\n";
+ } else {
+ $pages->{$page->{title}} = $page;
+ }
+ }
+}
+
+sub get_mw_pages {
+ mw_connect_maybe();
+
+ my %pages; # hash on page titles to avoid duplicates
+ my $user_defined;
+ if (@tracked_pages) {
+ $user_defined = 1;
+ # The user provided a list of pages titles, but we
+ # still need to query the API to get the page IDs.
+
+ my @some_pages = @tracked_pages;
+ while (@some_pages) {
+ my $last = 50;
+ if ($#some_pages < $last) {
+ $last = $#some_pages;
+ }
+ my @slice = @some_pages[0..$last];
+ get_mw_first_pages(\@slice, \%pages);
+ @some_pages = @some_pages[51..$#some_pages];
+ }
+ }
+ if (@tracked_categories) {
+ $user_defined = 1;
+ foreach my $category (@tracked_categories) {
+ if (index($category, ':') < 0) {
+ # Mediawiki requires the Category
+ # prefix, but let's not force the user
+ # to specify it.
+ $category = "Category:" . $category;
+ }
+ my $mw_pages = $mediawiki->list( {
+ action => 'query',
+ list => 'categorymembers',
+ cmtitle => $category,
+ cmlimit => 'max' } )
+ || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details};
+ foreach my $page (@{$mw_pages}) {
+ $pages{$page->{title}} = $page;
+ }
+ }
+ }
+ if (!$user_defined) {
+ # No user-provided list, get the list of pages from
+ # the API.
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 500,
+ });
+ if (!defined($mw_pages)) {
+ print STDERR "fatal: could not get the list of wiki pages.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages{$page->{title}} = $page;
+ }
+ }
+ return values(%pages);
+}
+
+sub run_git {
+ open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]);
+ my $res = do { local $/; <$git> };
+ close($git);
+
+ return $res;
+}
+
+
+sub get_last_local_revision {
+ # Get note regarding last mediawiki revision
+ my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
+ my @note_info = split(/ /, $note);
+
+ my $lastrevision_number;
+ if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
+ print STDERR "No previous mediawiki revision found";
+ $lastrevision_number = 0;
+ } else {
+ # Notes are formatted : mediawiki_revision: #number
+ $lastrevision_number = $note_info[1];
+ chomp($lastrevision_number);
+ print STDERR "Last local mediawiki revision found is $lastrevision_number";
+ }
+ return $lastrevision_number;
+}
+
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
+sub get_last_remote_revision {
+ mw_connect_maybe();
+
+ my @pages = get_mw_pages();
+
+ my $max_rev_num = 0;
+
+ foreach my $page (@pages) {
+ my $id = $page->{pageid};
+
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'ids|timestamp',
+ pageids => $id,
+ };
+
+ my $result = $mediawiki->api($query);
+
+ my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
+
+ $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
+
+ $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
+ }
+
+ print STDERR "Last remote revision found is $max_rev_num.\n";
+ return $max_rev_num;
+}
+
+# Clean content before sending it to MediaWiki
+sub mediawiki_clean {
+ my $string = shift;
+ my $page_created = shift;
+ # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
+ # This function right trims a string and adds a \n at the end to follow this rule
+ $string =~ s/\s+$//;
+ if ($string eq "" && $page_created) {
+ # Creating empty pages is forbidden.
+ $string = EMPTY_CONTENT;
+ }
+ return $string."\n";
+}
+
+# Filter applied on MediaWiki data before adding them to Git
+sub mediawiki_smudge {
+ my $string = shift;
+ if ($string eq EMPTY_CONTENT) {
+ $string = "";
+ }
+ # This \n is important. This is due to mediawiki's way to handle end of files.
+ return $string."\n";
+}
+
+sub mediawiki_clean_filename {
+ my $filename = shift;
+ $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
+ # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
+ # Do a variant of URL-encoding, i.e. looks like URL-encoding,
+ # but with _ added to prevent MediaWiki from thinking this is
+ # an actual special character.
+ $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
+ # If we use the uri escape before
+ # we should unescape here, before anything
+
+ return $filename;
+}
+
+sub mediawiki_smudge_filename {
+ my $filename = shift;
+ $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
+ $filename =~ s/ /_/g;
+ # Decode forbidden characters encoded in mediawiki_clean_filename
+ $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
+ return $filename;
+}
+
+sub literal_data {
+ my ($content) = @_;
+ print STDOUT "data ", bytes::length($content), "\n", $content;
+}
+
+sub mw_capabilities {
+ # Revisions are imported to the private namespace
+ # refs/mediawiki/$remotename/ by the helper and fetched into
+ # refs/remotes/$remotename later by fetch.
+ print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
+ print STDOUT "import\n";
+ print STDOUT "list\n";
+ print STDOUT "push\n";
+ print STDOUT "\n";
+}
+
+sub mw_list {
+ # MediaWiki do not have branches, we consider one branch arbitrarily
+ # called master, and HEAD pointing to it.
+ print STDOUT "? refs/heads/master\n";
+ print STDOUT "\@refs/heads/master HEAD\n";
+ print STDOUT "\n";
+}
+
+sub mw_option {
+ print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
+ print STDOUT "unsupported\n";
+}
+
+sub fetch_mw_revisions_for_page {
+ my $page = shift;
+ my $id = shift;
+ my $fetch_from = shift;
+ my @page_revs = ();
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'ids',
+ rvdir => 'newer',
+ rvstartid => $fetch_from,
+ rvlimit => 500,
+ pageids => $id,
+ };
+
+ my $revnum = 0;
+ # Get 500 revisions at a time due to the mediawiki api limit
+ while (1) {
+ my $result = $mediawiki->api($query);
+
+ # Parse each of those 500 revisions
+ foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
+ my $page_rev_ids;
+ $page_rev_ids->{pageid} = $page->{pageid};
+ $page_rev_ids->{revid} = $revision->{revid};
+ push(@page_revs, $page_rev_ids);
+ $revnum++;
+ }
+ last unless $result->{'query-continue'};
+ $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
+ }
+ if ($shallow_import && @page_revs) {
+ print STDERR " Found 1 revision (shallow import).\n";
+ @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
+ return $page_revs[0];
+ }
+ print STDERR " Found ", $revnum, " revision(s).\n";
+ return @page_revs;
+}
+
+sub fetch_mw_revisions {
+ my $pages = shift; my @pages = @{$pages};
+ my $fetch_from = shift;
+
+ my @revisions = ();
+ my $n = 1;
+ foreach my $page (@pages) {
+ my $id = $page->{pageid};
+
+ print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
+ $n++;
+ my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
+ @revisions = (@page_revs, @revisions);
+ }
+
+ return ($n, @revisions);
+}
+
+sub import_file_revision {
+ my $commit = shift;
+ my %commit = %{$commit};
+ my $full_import = shift;
+ my $n = shift;
+
+ my $title = $commit{title};
+ my $comment = $commit{comment};
+ my $content = $commit{content};
+ my $author = $commit{author};
+ my $date = $commit{date};
+
+ print STDOUT "commit refs/mediawiki/$remotename/master\n";
+ print STDOUT "mark :$n\n";
+ print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ literal_data($comment);
+
+ # If it's not a clone, we need to know where to start from
+ if (!$full_import && $n == 1) {
+ print STDOUT "from refs/mediawiki/$remotename/master^0\n";
+ }
+ if ($content ne DELETED_CONTENT) {
+ print STDOUT "M 644 inline $title.mw\n";
+ literal_data($content);
+ print STDOUT "\n\n";
+ } else {
+ print STDOUT "D $title.mw\n";
+ }
+
+ # mediawiki revision number in the git note
+ if ($full_import && $n == 1) {
+ print STDOUT "reset refs/notes/$remotename/mediawiki\n";
+ }
+ print STDOUT "commit refs/notes/$remotename/mediawiki\n";
+ print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ literal_data("Note added by git-mediawiki during import");
+ if (!$full_import && $n == 1) {
+ print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
+ }
+ print STDOUT "N inline :$n\n";
+ literal_data("mediawiki_revision: " . $commit{mw_revision});
+ print STDOUT "\n\n";
+}
+
+# parse a sequence of
+# <cmd> <arg1>
+# <cmd> <arg2>
+# \n
+# (like batch sequence of import and sequence of push statements)
+sub get_more_refs {
+ my $cmd = shift;
+ my @refs;
+ while (1) {
+ my $line = <STDIN>;
+ if ($line =~ m/^$cmd (.*)$/) {
+ push(@refs, $1);
+ } elsif ($line eq "\n") {
+ return @refs;
+ } else {
+ die("Invalid command in a '$cmd' batch: ". $_);
+ }
+ }
+}
+
+sub mw_import {
+ # multiple import commands can follow each other.
+ my @refs = (shift, get_more_refs("import"));
+ foreach my $ref (@refs) {
+ mw_import_ref($ref);
+ }
+ print STDOUT "done\n";
+}
+
+sub mw_import_ref {
+ my $ref = shift;
+ # The remote helper will call "import HEAD" and
+ # "import refs/heads/master".
+ # Since HEAD is a symbolic ref to master (by convention,
+ # followed by the output of the command "list" that we gave),
+ # we don't need to do anything in this case.
+ if ($ref eq "HEAD") {
+ return;
+ }
+
+ mw_connect_maybe();
+
+ my @pages = get_mw_pages();
+
+ print STDERR "Searching revisions...\n";
+ my $last_local = get_last_local_revision();
+ my $fetch_from = $last_local + 1;
+ if ($fetch_from == 1) {
+ print STDERR ", fetching from beginning.\n";
+ } else {
+ print STDERR ", fetching from here.\n";
+ }
+ my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
+
+ # Creation of the fast-import stream
+ print STDERR "Fetching & writing export data...\n";
+
+ $n = 0;
+ my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
+
+ foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) {
+ # fetch the content of the pages
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'content|timestamp|comment|user|ids',
+ revids => $pagerevid->{revid},
+ };
+
+ my $result = $mediawiki->api($query);
+
+ my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}});
+
+ $n++;
+
+ my %commit;
+ $commit{author} = $rev->{user} || 'Anonymous';
+ $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*';
+ $commit{title} = mediawiki_smudge_filename(
+ $result->{query}->{pages}->{$pagerevid->{pageid}}->{title}
+ );
+ $commit{mw_revision} = $pagerevid->{revid};
+ $commit{content} = mediawiki_smudge($rev->{'*'});
+
+ if (!defined($rev->{timestamp})) {
+ $last_timestamp++;
+ } else {
+ $last_timestamp = $rev->{timestamp};
+ }
+ $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
+
+ print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n";
+
+ import_file_revision(\%commit, ($fetch_from == 1), $n);
+ }
+
+ if ($fetch_from == 1 && $n == 0) {
+ print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ # Something has to be done remote-helper side. If nothing is done, an error is
+ # thrown saying that HEAD is refering to unknown object 0000000000000000000
+ # and the clone fails.
+ }
+}
+
+sub error_non_fast_forward {
+ my $advice = run_git("config --bool advice.pushNonFastForward");
+ chomp($advice);
+ if ($advice ne "false") {
+ # Native git-push would show this after the summary.
+ # We can't ask it to display it cleanly, so print it
+ # ourselves before.
+ print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
+ print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+ print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
+ }
+ print STDOUT "error $_[0] \"non-fast-forward\"\n";
+ return 0;
+}
+
+sub mw_push_file {
+ my $diff_info = shift;
+ # $diff_info contains a string in this format:
+ # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
+ my @diff_info_split = split(/[ \t]/, $diff_info);
+
+ # Filename, including .mw extension
+ my $complete_file_name = shift;
+ # Commit message
+ my $summary = shift;
+ # MediaWiki revision number. Keep the previous one by default,
+ # in case there's no edit to perform.
+ my $newrevid = shift;
+
+ my $new_sha1 = $diff_info_split[3];
+ my $old_sha1 = $diff_info_split[2];
+ my $page_created = ($old_sha1 eq NULL_SHA1);
+ my $page_deleted = ($new_sha1 eq NULL_SHA1);
+ $complete_file_name = mediawiki_clean_filename($complete_file_name);
+
+ if (substr($complete_file_name,-3) eq ".mw") {
+ my $title = substr($complete_file_name,0,-3);
+
+ my $file_content;
+ if ($page_deleted) {
+ # Deleting a page usually requires
+ # special priviledges. A common
+ # convention is to replace the page
+ # with this content instead:
+ $file_content = DELETED_CONTENT;
+ } else {
+ $file_content = run_git("cat-file blob $new_sha1");
+ }
+
+ mw_connect_maybe();
+
+ my $result = $mediawiki->edit( {
+ action => 'edit',
+ summary => $summary,
+ title => $title,
+ basetimestamp => $basetimestamps{$newrevid},
+ text => mediawiki_clean($file_content, $page_created),
+ }, {
+ skip_encoding => 1 # Helps with names with accentuated characters
+ });
+ if (!$result) {
+ if ($mediawiki->{error}->{code} == 3) {
+ # edit conflicts, considered as non-fast-forward
+ print STDERR 'Warning: Error ' .
+ $mediawiki->{error}->{code} .
+ ' from mediwiki: ' . $mediawiki->{error}->{details} .
+ ".\n";
+ return ($newrevid, "non-fast-forward");
+ } else {
+ # Other errors. Shouldn't happen => just die()
+ die 'Fatal: Error ' .
+ $mediawiki->{error}->{code} .
+ ' from mediwiki: ' . $mediawiki->{error}->{details};
+ }
+ }
+ $newrevid = $result->{edit}->{newrevid};
+ print STDERR "Pushed file: $new_sha1 - $title\n";
+ } else {
+ print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n"
+ }
+ return ($newrevid, "ok");
+}
+
+sub mw_push {
+ # multiple push statements can follow each other
+ my @refsspecs = (shift, get_more_refs("push"));
+ my $pushed;
+ for my $refspec (@refsspecs) {
+ my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
+ or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
+ if ($force) {
+ print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
+ }
+ if ($local eq "") {
+ print STDERR "Cannot delete remote branch on a MediaWiki\n";
+ print STDOUT "error $remote cannot delete\n";
+ next;
+ }
+ if ($remote ne "refs/heads/master") {
+ print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
+ print STDOUT "error $remote only master allowed\n";
+ next;
+ }
+ if (mw_push_revision($local, $remote)) {
+ $pushed = 1;
+ }
+ }
+
+ # Notify Git that the push is done
+ print STDOUT "\n";
+
+ if ($pushed && $dumb_push) {
+ print STDERR "Just pushed some revisions to MediaWiki.\n";
+ print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
+ print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
+ print STDERR "\n";
+ print STDERR " git pull --rebase\n";
+ print STDERR "\n";
+ }
+}
+
+sub mw_push_revision {
+ my $local = shift;
+ my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
+ my $last_local_revid = get_last_local_revision();
+ print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
+ my $last_remote_revid = get_last_remote_revision();
+ my $mw_revision = $last_remote_revid;
+
+ # Get sha1 of commit pointed by local HEAD
+ my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
+ # Get sha1 of commit pointed by remotes/$remotename/master
+ my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
+ chomp($remoteorigin_sha1);
+
+ if ($last_local_revid > 0 &&
+ $last_local_revid < $last_remote_revid) {
+ return error_non_fast_forward($remote);
+ }
+
+ if ($HEAD_sha1 eq $remoteorigin_sha1) {
+ # nothing to push
+ return 0;
+ }
+
+ # Get every commit in between HEAD and refs/remotes/origin/master,
+ # including HEAD and refs/remotes/origin/master
+ my @commit_pairs = ();
+ if ($last_local_revid > 0) {
+ my $parsed_sha1 = $remoteorigin_sha1;
+ # Find a path from last MediaWiki commit to pushed commit
+ while ($parsed_sha1 ne $HEAD_sha1) {
+ my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local")));
+ if (!@commit_info) {
+ return error_non_fast_forward($remote);
+ }
+ my @commit_info_split = split(/ |\n/, $commit_info[0]);
+ # $commit_info_split[1] is the sha1 of the commit to export
+ # $commit_info_split[0] is the sha1 of its direct child
+ push(@commit_pairs, \@commit_info_split);
+ $parsed_sha1 = $commit_info_split[1];
+ }
+ } else {
+ # No remote mediawiki revision. Export the whole
+ # history (linearized with --first-parent)
+ print STDERR "Warning: no common ancestor, pushing complete history\n";
+ my $history = run_git("rev-list --first-parent --children $local");
+ my @history = split('\n', $history);
+ @history = @history[1..$#history];
+ foreach my $line (reverse @history) {
+ my @commit_info_split = split(/ |\n/, $line);
+ push(@commit_pairs, \@commit_info_split);
+ }
+ }
+
+ foreach my $commit_info_split (@commit_pairs) {
+ my $sha1_child = @{$commit_info_split}[0];
+ my $sha1_commit = @{$commit_info_split}[1];
+ my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
+ # TODO: we could detect rename, and encode them with a #redirect on the wiki.
+ # TODO: for now, it's just a delete+add
+ my @diff_info_list = split(/\0/, $diff_infos);
+ # Keep the first line of the commit message as mediawiki comment for the revision
+ my $commit_msg = (split(/\n/, run_git("show --pretty=format:\"%s\" $sha1_commit")))[0];
+ chomp($commit_msg);
+ # Push every blob
+ while (@diff_info_list) {
+ my $status;
+ # git diff-tree -z gives an output like
+ # <metadata>\0<filename1>\0
+ # <metadata>\0<filename2>\0
+ # and we've split on \0.
+ my $info = shift(@diff_info_list);
+ my $file = shift(@diff_info_list);
+ ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
+ if ($status eq "non-fast-forward") {
+ # we may already have sent part of the
+ # commit to MediaWiki, but it's too
+ # late to cancel it. Stop the push in
+ # the middle, but still give an
+ # accurate error message.
+ return error_non_fast_forward($remote);
+ }
+ if ($status ne "ok") {
+ die("Unknown error from mw_push_file()");
+ }
+ }
+ unless ($dumb_push) {
+ run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
+ run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
+ }
+ }
+
+ print STDOUT "ok $remote\n";
+ return 1;
+}
diff --git a/contrib/mw-to-git/git-remote-mediawiki.txt b/contrib/mw-to-git/git-remote-mediawiki.txt
new file mode 100644
index 0000000000..4d211f5b81
--- /dev/null
+++ b/contrib/mw-to-git/git-remote-mediawiki.txt
@@ -0,0 +1,7 @@
+Git-Mediawiki is a project which aims the creation of a gate
+between git and mediawiki, allowing git users to push and pull
+objects from mediawiki just as one would do with a classic git
+repository thanks to remote-helpers.
+
+For more information, visit the wiki at
+https://github.com/Bibzball/Git-Mediawiki/wiki