summaryrefslogtreecommitdiff
path: root/contrib/mw-to-git/git-remote-mediawiki.perl
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/mw-to-git/git-remote-mediawiki.perl')
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki.perl681
1 files changed, 327 insertions, 354 deletions
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
index 9c14c1f88d..476e0a2bc0 100755
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -13,19 +13,17 @@
use strict;
use MediaWiki::API;
+use Git;
+use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
+ EMPTY HTTP_CODE_OK);
use DateTime::Format::ISO8601;
+use warnings;
# By default, use UTF-8 to communicate with Git and the user
-binmode STDERR, ":utf8";
-binmode STDOUT, ":utf8";
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
use URI::Escape;
-use IPC::Open2;
-
-use warnings;
-
-# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
-use constant SLASH_REPLACEMENT => "%2F";
# It's not always possible to delete pages (may require some
# privileges). Deleted pages are replaced with this content.
@@ -36,45 +34,57 @@ use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
use constant EMPTY_CONTENT => "<!-- empty page -->\n";
# used to reflect file creation or deletion in diff.
-use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
# Used on Git's side to reflect empty edit messages on the wiki
use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+# Number of pages taken into account at once in submodule get_mw_page_list
+use constant SLICE_SIZE => 50;
+
+# Number of linked mediafile to get at once in get_linked_mediafiles
+# The query is split in small batches because of the MW API limit of
+# the number of links to be returned (500 links max).
+use constant BATCH_SIZE => 10;
+
+if (@ARGV != 2) {
+ exit_error_usage();
+}
+
my $remotename = $ARGV[0];
my $url = $ARGV[1];
# Accept both space-separated and multiple keys in config file.
# Spaces should be written as _ anyway because we'll use chomp.
-my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
+my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
chomp(@tracked_pages);
# Just like @tracked_pages, but for MediaWiki categories.
-my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
+my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
chomp(@tracked_categories);
# Import media files on pull
-my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
+my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
chomp($import_media);
-$import_media = ($import_media eq "true");
+$import_media = ($import_media eq 'true');
# Export media files on push
-my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
+my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
chomp($export_media);
-$export_media = !($export_media eq "false");
+$export_media = !($export_media eq 'false');
-my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
+my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
# Note: mwPassword is discourraged. Use the credential system instead.
-my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
-my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
+my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
+my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
chomp($wiki_login);
chomp($wiki_passwd);
chomp($wiki_domain);
# Import only last revisions (both for clone and fetch)
-my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
+my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
chomp($shallow_import);
-$shallow_import = ($shallow_import eq "true");
+$shallow_import = ($shallow_import eq 'true');
# Fetch (clone and pull) by revisions instead of by pages. This behavior
# is more efficient when we have a wiki with lots of pages and we fetch
@@ -82,15 +92,18 @@ $shallow_import = ($shallow_import eq "true");
# Possible values:
# - by_rev: perform one query per new revision on the remote wiki
# - by_page: query each tracked page for new revision
-my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
-unless ($fetch_strategy) {
- $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
+my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
+if (!$fetch_strategy) {
+ $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
}
chomp($fetch_strategy);
-unless ($fetch_strategy) {
- $fetch_strategy = "by_page";
+if (!$fetch_strategy) {
+ $fetch_strategy = 'by_page';
}
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
# Dumb push: don't update notes and mediawiki ref to reflect the last push.
#
# Configurable with mediawiki.dumbPush, or per-remote with
@@ -105,48 +118,25 @@ unless ($fetch_strategy) {
# will get the history with information lost). If the import is
# deterministic, this means everybody gets the same sha1 for each
# MediaWiki revision.
-my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
-unless ($dumb_push) {
- $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
+my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
+if (!$dumb_push) {
+ $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
}
chomp($dumb_push);
-$dumb_push = ($dumb_push eq "true");
+$dumb_push = ($dumb_push eq 'true');
my $wiki_name = $url;
-$wiki_name =~ s/[^\/]*:\/\///;
+$wiki_name =~ s{[^/]*://}{};
# If URL is like http://user:password@example.com/, we clearly don't
# want the password in $wiki_name. While we're there, also remove user
# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
$wiki_name =~ s/^.*@//;
# Commands parser
-my $entry;
-my @cmd;
while (<STDIN>) {
chomp;
- @cmd = split(/ /);
- if (defined($cmd[0])) {
- # Line not blank
- if ($cmd[0] eq "capabilities") {
- die("Too many arguments for capabilities") unless (!defined($cmd[1]));
- mw_capabilities();
- } elsif ($cmd[0] eq "list") {
- die("Too many arguments for list") unless (!defined($cmd[2]));
- mw_list($cmd[1]);
- } elsif ($cmd[0] eq "import") {
- die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
- mw_import($cmd[1]);
- } elsif ($cmd[0] eq "option") {
- die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
- mw_option($cmd[1],$cmd[2]);
- } elsif ($cmd[0] eq "push") {
- mw_push($cmd[1]);
- } else {
- print STDERR "Unknown command. Aborting...\n";
- last;
- }
- } else {
- # blank line: we should terminate
+
+ if (!parse_command($_)) {
last;
}
@@ -156,107 +146,91 @@ while (<STDIN>) {
########################## Functions ##############################
-## credential API management (generic functions)
-
-sub credential_read {
- my %credential;
- my $reader = shift;
- my $op = shift;
- while (<$reader>) {
- my ($key, $value) = /([^=]*)=(.*)/;
- if (not defined $key) {
- die "ERROR receiving response from git credential $op:\n$_\n";
- }
- $credential{$key} = $value;
- }
- return %credential;
+## error handling
+sub exit_error_usage {
+ die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
+ "parameters\n" .
+ "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
+ "module directly.\n" .
+ "This module can be used the following way:\n" .
+ "\tgit clone mediawiki://<address of a mediawiki>\n" .
+ "Then, use git commit, push and pull as with every normal git repository.\n";
}
-sub credential_write {
- my $credential = shift;
- my $writer = shift;
- # url overwrites other fields, so it must come first
- print $writer "url=$credential->{url}\n" if exists $credential->{url};
- while (my ($key, $value) = each(%$credential) ) {
- if (length $value && $key ne 'url') {
- print $writer "$key=$value\n";
- }
+sub parse_command {
+ my ($line) = @_;
+ my @cmd = split(/ /, $line);
+ if (!defined $cmd[0]) {
+ return 0;
}
-}
-
-sub credential_run {
- my $op = shift;
- my $credential = shift;
- my $pid = open2(my $reader, my $writer, "git credential $op");
- credential_write($credential, $writer);
- print $writer "\n";
- close($writer);
-
- if ($op eq "fill") {
- %$credential = credential_read($reader, $op);
+ if ($cmd[0] eq 'capabilities') {
+ die("Too many arguments for capabilities\n")
+ if (defined($cmd[1]));
+ mw_capabilities();
+ } elsif ($cmd[0] eq 'list') {
+ die("Too many arguments for list\n") if (defined($cmd[2]));
+ mw_list($cmd[1]);
+ } elsif ($cmd[0] eq 'import') {
+ die("Invalid argument for import\n")
+ if ($cmd[1] eq EMPTY);
+ die("Too many arguments for import\n")
+ if (defined($cmd[2]));
+ mw_import($cmd[1]);
+ } elsif ($cmd[0] eq 'option') {
+ die("Invalid arguments for option\n")
+ if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
+ die("Too many arguments for option\n")
+ if (defined($cmd[3]));
+ mw_option($cmd[1],$cmd[2]);
+ } elsif ($cmd[0] eq 'push') {
+ mw_push($cmd[1]);
} else {
- if (<$reader>) {
- die "ERROR while running git credential $op:\n$_";
- }
- }
- close($reader);
- waitpid($pid, 0);
- my $child_exit_status = $? >> 8;
- if ($child_exit_status != 0) {
- die "'git credential $op' failed with code $child_exit_status.";
+ print {*STDERR} "Unknown command. Aborting...\n";
+ return 0;
}
+ return 1;
}
# MediaWiki API instance, created lazily.
my $mediawiki;
-sub mw_connect_maybe {
- if ($mediawiki) {
- return;
- }
- $mediawiki = MediaWiki::API->new;
- $mediawiki->{config}->{api_url} = "$url/api.php";
- if ($wiki_login) {
- my %credential = (url => $url);
- $credential{username} = $wiki_login;
- $credential{password} = $wiki_passwd;
- credential_run("fill", \%credential);
- my $request = {lgname => $credential{username},
- lgpassword => $credential{password},
- lgdomain => $wiki_domain};
- if ($mediawiki->login($request)) {
- credential_run("approve", \%credential);
- print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
- } else {
- print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
- print STDERR " (error " .
- $mediawiki->{error}->{code} . ': ' .
- $mediawiki->{error}->{details} . ")\n";
- credential_run("reject", \%credential);
- exit 1;
- }
+sub fatal_mw_error {
+ my $action = shift;
+ print STDERR "fatal: could not $action.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ if ($url =~ /^https/) {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
+ print STDERR "fatal: and the SSL certificate is correct.\n";
+ } else {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
}
+ print STDERR "fatal: (error " .
+ $mediawiki->{error}->{code} . ': ' .
+ $mediawiki->{error}->{details} . ")\n";
+ exit 1;
}
## Functions for listing pages on the remote wiki
sub get_mw_tracked_pages {
my $pages = shift;
get_mw_page_list(\@tracked_pages, $pages);
+ return;
}
sub get_mw_page_list {
my $page_list = shift;
my $pages = shift;
- my @some_pages = @$page_list;
+ my @some_pages = @{$page_list};
while (@some_pages) {
- my $last = 50;
- if ($#some_pages < $last) {
- $last = $#some_pages;
+ my $last_page = SLICE_SIZE;
+ if ($#some_pages < $last_page) {
+ $last_page = $#some_pages;
}
- my @slice = @some_pages[0..$last];
+ my @slice = @some_pages[0..$last_page];
get_mw_first_pages(\@slice, $pages);
- @some_pages = @some_pages[51..$#some_pages];
+ @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
}
+ return;
}
sub get_mw_tracked_categories {
@@ -266,7 +240,7 @@ sub get_mw_tracked_categories {
# Mediawiki requires the Category
# prefix, but let's not force the user
# to specify it.
- $category = "Category:" . $category;
+ $category = "Category:${category}";
}
my $mw_pages = $mediawiki->list( {
action => 'query',
@@ -274,11 +248,12 @@ sub get_mw_tracked_categories {
cmtitle => $category,
cmlimit => 'max' } )
|| die $mediawiki->{error}->{code} . ': '
- . $mediawiki->{error}->{details};
+ . $mediawiki->{error}->{details} . "\n";
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
}
+ return;
}
sub get_mw_all_pages {
@@ -290,14 +265,12 @@ sub get_mw_all_pages {
aplimit => 'max'
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of wiki pages.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
- exit 1;
+ fatal_mw_error("get the list of wiki pages");
}
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
+ return;
}
# queries the wiki for a set of pages. Meant to be used within a loop
@@ -316,25 +289,23 @@ sub get_mw_first_pages {
titles => $titles,
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not query the list of wiki pages.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
- exit 1;
+ fatal_mw_error("query the list of wiki pages");
}
while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
if ($id < 0) {
- print STDERR "Warning: page $page->{title} not found on wiki\n";
+ print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
} else {
$pages->{$page->{title}} = $page;
}
}
+ return;
}
# Get the list of pages to be fetched according to configuration.
sub get_mw_pages {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
- print STDERR "Listing pages on remote wiki...\n";
+ print {*STDERR} "Listing pages on remote wiki...\n";
my %pages; # hash on page titles to avoid duplicates
my $user_defined;
@@ -352,14 +323,14 @@ sub get_mw_pages {
get_mw_all_pages(\%pages);
}
if ($import_media) {
- print STDERR "Getting media files for selected pages...\n";
+ print {*STDERR} "Getting media files for selected pages...\n";
if ($user_defined) {
get_linked_mediafiles(\%pages);
} else {
get_all_mediafiles(\%pages);
}
}
- print STDERR (scalar keys %pages) . " pages found.\n";
+ print {*STDERR} (scalar keys %pages) . " pages found.\n";
return %pages;
}
@@ -367,9 +338,13 @@ sub get_mw_pages {
# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
sub run_git {
my $args = shift;
- my $encoding = (shift || "encoding(UTF-8)");
- open(my $git, "-|:$encoding", "git " . $args);
- my $res = do { local $/; <$git> };
+ my $encoding = (shift || 'encoding(UTF-8)');
+ open(my $git, "-|:${encoding}", "git ${args}")
+ or die "Unable to fork: $!\n";
+ my $res = do {
+ local $/ = undef;
+ <$git>
+ };
close($git);
return $res;
@@ -384,27 +359,26 @@ sub get_all_mediafiles {
my $mw_pages = $mediawiki->list({
action => 'query',
list => 'allpages',
- apnamespace => get_mw_namespace_id("File"),
+ apnamespace => get_mw_namespace_id('File'),
aplimit => 'max'
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of pages for media files.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ print {*STDERR} "fatal: could not get the list of pages for media files.\n";
+ print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
+ print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
exit 1;
}
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
+ return;
}
sub get_linked_mediafiles {
my $pages = shift;
- my @titles = map $_->{title}, values(%{$pages});
+ my @titles = map { $_->{title} } values(%{$pages});
- # The query is split in small batches because of the MW API limit of
- # the number of links to be returned (500 links max).
- my $batch = 10;
+ my $batch = BATCH_SIZE;
while (@titles) {
if ($#titles < $batch) {
$batch = $#titles;
@@ -420,7 +394,7 @@ sub get_linked_mediafiles {
action => 'query',
prop => 'links|images',
titles => $mw_titles,
- plnamespace => get_mw_namespace_id("File"),
+ plnamespace => get_mw_namespace_id('File'),
pllimit => 'max'
};
my $result = $mediawiki->api($query);
@@ -428,11 +402,13 @@ sub get_linked_mediafiles {
while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
my @media_titles;
if (defined($page->{links})) {
- my @link_titles = map $_->{title}, @{$page->{links}};
+ my @link_titles
+ = map { $_->{title} } @{$page->{links}};
push(@media_titles, @link_titles);
}
if (defined($page->{images})) {
- my @image_titles = map $_->{title}, @{$page->{images}};
+ my @image_titles
+ = map { $_->{title} } @{$page->{images}};
push(@media_titles, @image_titles);
}
if (@media_titles) {
@@ -442,6 +418,7 @@ sub get_linked_mediafiles {
@titles = @titles[($batch+1)..$#titles];
}
+ return;
}
sub get_mw_mediafile_for_page_revision {
@@ -455,7 +432,7 @@ sub get_mw_mediafile_for_page_revision {
my $query = {
action => 'query',
prop => 'imageinfo',
- titles => "File:" . $filename,
+ titles => "File:${filename}",
iistart => $timestamp,
iiend => $timestamp,
iiprop => 'timestamp|archivename|url',
@@ -473,53 +450,50 @@ sub get_mw_mediafile_for_page_revision {
$mediafile{timestamp} = $fileinfo->{timestamp};
# Mediawiki::API's download function doesn't support https URLs
# and can't download old versions of files.
- print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+ print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
$mediafile{content} = download_mw_mediafile($fileinfo->{url});
}
return %mediafile;
}
sub download_mw_mediafile {
- my $url = shift;
+ my $download_url = shift;
- my $response = $mediawiki->{ua}->get($url);
- if ($response->code == 200) {
+ my $response = $mediawiki->{ua}->get($download_url);
+ if ($response->code == HTTP_CODE_OK) {
return $response->decoded_content;
} else {
- print STDERR "Error downloading mediafile from :\n";
- print STDERR "URL: $url\n";
- print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
+ print {*STDERR} "Error downloading mediafile from :\n";
+ print {*STDERR} "URL: ${download_url}\n";
+ print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
exit 1;
}
}
sub get_last_local_revision {
# Get note regarding last mediawiki revision
- my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
+ my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
my @note_info = split(/ /, $note);
my $lastrevision_number;
- if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
- print STDERR "No previous mediawiki revision found";
+ if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
+ print {*STDERR} 'No previous mediawiki revision found';
$lastrevision_number = 0;
} else {
# Notes are formatted : mediawiki_revision: #number
$lastrevision_number = $note_info[1];
chomp($lastrevision_number);
- print STDERR "Last local mediawiki revision found is $lastrevision_number";
+ print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
}
return $lastrevision_number;
}
-# Remember the timestamp corresponding to a revision id.
-my %basetimestamps;
-
# Get the last remote revision without taking in account which pages are
# tracked or not. This function makes a single request to the wiki thus
# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
# option.
sub get_last_global_remote_rev {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $query = {
action => 'query',
@@ -535,14 +509,14 @@ sub get_last_global_remote_rev {
# Get the last remote revision concerning the tracked pages and the tracked
# categories.
sub get_last_remote_revision {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my %pages_hash = get_mw_pages();
my @pages = values(%pages_hash);
my $max_rev_num = 0;
- print STDERR "Getting last revision id on tracked pages...\n";
+ print {*STDERR} "Getting last revision id on tracked pages...\n";
foreach my $page (@pages) {
my $id = $page->{pageid};
@@ -563,7 +537,7 @@ sub get_last_remote_revision {
$max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
}
- print STDERR "Last remote revision found is $max_rev_num.\n";
+ print {*STDERR} "Last remote revision found is $max_rev_num.\n";
return $max_rev_num;
}
@@ -574,7 +548,7 @@ sub mediawiki_clean {
# Mediawiki does not allow blank space at the end of a page and ends with a single \n.
# This function right trims a string and adds a \n at the end to follow this rule
$string =~ s/\s+$//;
- if ($string eq "" && $page_created) {
+ if ($string eq EMPTY && $page_created) {
# Creating empty pages is forbidden.
$string = EMPTY_CONTENT;
}
@@ -585,38 +559,16 @@ sub mediawiki_clean {
sub mediawiki_smudge {
my $string = shift;
if ($string eq EMPTY_CONTENT) {
- $string = "";
+ $string = EMPTY;
}
# This \n is important. This is due to mediawiki's way to handle end of files.
- return $string."\n";
-}
-
-sub mediawiki_clean_filename {
- my $filename = shift;
- $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
- # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
- # Do a variant of URL-encoding, i.e. looks like URL-encoding,
- # but with _ added to prevent MediaWiki from thinking this is
- # an actual special character.
- $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
- # If we use the uri escape before
- # we should unescape here, before anything
-
- return $filename;
-}
-
-sub mediawiki_smudge_filename {
- my $filename = shift;
- $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
- $filename =~ s/ /_/g;
- # Decode forbidden characters encoded in mediawiki_clean_filename
- $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
- return $filename;
+ return "${string}\n";
}
sub literal_data {
my ($content) = @_;
- print STDOUT "data ", bytes::length($content), "\n", $content;
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ return;
}
sub literal_data_raw {
@@ -624,33 +576,40 @@ sub literal_data_raw {
my ($content) = @_;
# Avoid confusion between size in bytes and in characters
utf8::downgrade($content);
- binmode STDOUT, ":raw";
- print STDOUT "data ", bytes::length($content), "\n", $content;
- binmode STDOUT, ":utf8";
+ binmode STDOUT, ':raw';
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ binmode STDOUT, ':encoding(UTF-8)';
+ return;
}
sub mw_capabilities {
# Revisions are imported to the private namespace
# refs/mediawiki/$remotename/ by the helper and fetched into
# refs/remotes/$remotename later by fetch.
- print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
- print STDOUT "import\n";
- print STDOUT "list\n";
- print STDOUT "push\n";
- print STDOUT "\n";
+ print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
+ print {*STDOUT} "import\n";
+ print {*STDOUT} "list\n";
+ print {*STDOUT} "push\n";
+ if ($dumb_push) {
+ print {*STDOUT} "no-private-update\n";
+ }
+ print {*STDOUT} "\n";
+ return;
}
sub mw_list {
# MediaWiki do not have branches, we consider one branch arbitrarily
# called master, and HEAD pointing to it.
- print STDOUT "? refs/heads/master\n";
- print STDOUT "\@refs/heads/master HEAD\n";
- print STDOUT "\n";
+ print {*STDOUT} "? refs/heads/master\n";
+ print {*STDOUT} "\@refs/heads/master HEAD\n";
+ print {*STDOUT} "\n";
+ return;
}
sub mw_option {
- print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
- print STDOUT "unsupported\n";
+ print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
+ print {*STDOUT} "unsupported\n";
+ return;
}
sub fetch_mw_revisions_for_page {
@@ -666,6 +625,9 @@ sub fetch_mw_revisions_for_page {
rvstartid => $fetch_from,
rvlimit => 500,
pageids => $id,
+
+ # Let MediaWiki know that we support the latest API.
+ continue => '',
};
my $revnum = 0;
@@ -681,15 +643,22 @@ sub fetch_mw_revisions_for_page {
push(@page_revs, $page_rev_ids);
$revnum++;
}
- last unless $result->{'query-continue'};
- $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
+
+ if ($result->{'query-continue'}) { # For legacy APIs
+ $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
+ } elsif ($result->{continue}) { # For newer APIs
+ $query->{rvstartid} = $result->{continue}->{rvcontinue};
+ $query->{continue} = $result->{continue}->{continue};
+ } else {
+ last;
+ }
}
if ($shallow_import && @page_revs) {
- print STDERR " Found 1 revision (shallow import).\n";
+ print {*STDERR} " Found 1 revision (shallow import).\n";
@page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
return $page_revs[0];
}
- print STDERR " Found ", $revnum, " revision(s).\n";
+ print {*STDERR} " Found ${revnum} revision(s).\n";
return @page_revs;
}
@@ -701,8 +670,7 @@ sub fetch_mw_revisions {
my $n = 1;
foreach my $page (@pages) {
my $id = $page->{pageid};
-
- print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
+ print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
$n++;
my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
@revisions = (@page_revs, @revisions);
@@ -716,7 +684,7 @@ sub fe_escape_path {
$path =~ s/\\/\\\\/g;
$path =~ s/"/\\"/g;
$path =~ s/\n/\\n/g;
- return '"' . $path . '"';
+ return qq("${path}");
}
sub import_file_revision {
@@ -736,42 +704,43 @@ sub import_file_revision {
my $author = $commit{author};
my $date = $commit{date};
- print STDOUT "commit refs/mediawiki/$remotename/master\n";
- print STDOUT "mark :$n\n";
- print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
+ print {*STDOUT} "mark :${n}\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
literal_data($comment);
# If it's not a clone, we need to know where to start from
if (!$full_import && $n == 1) {
- print STDOUT "from refs/mediawiki/$remotename/master^0\n";
+ print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
}
if ($content ne DELETED_CONTENT) {
- print STDOUT "M 644 inline " .
- fe_escape_path($title . ".mw") . "\n";
+ print {*STDOUT} 'M 644 inline ' .
+ fe_escape_path("${title}.mw") . "\n";
literal_data($content);
if (%mediafile) {
- print STDOUT "M 644 inline "
+ print {*STDOUT} 'M 644 inline '
. fe_escape_path($mediafile{title}) . "\n";
literal_data_raw($mediafile{content});
}
- print STDOUT "\n\n";
+ print {*STDOUT} "\n\n";
} else {
- print STDOUT "D " . fe_escape_path($title . ".mw") . "\n";
+ print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
}
# mediawiki revision number in the git note
if ($full_import && $n == 1) {
- print STDOUT "reset refs/notes/$remotename/mediawiki\n";
+ print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
}
- print STDOUT "commit refs/notes/$remotename/mediawiki\n";
- print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
- literal_data("Note added by git-mediawiki during import");
+ print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+ literal_data('Note added by git-mediawiki during import');
if (!$full_import && $n == 1) {
- print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
+ print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
}
- print STDOUT "N inline :$n\n";
- literal_data("mediawiki_revision: " . $commit{mw_revision});
- print STDOUT "\n\n";
+ print {*STDOUT} "N inline :${n}\n";
+ literal_data("mediawiki_revision: $commit{mw_revision}");
+ print {*STDOUT} "\n\n";
+ return;
}
# parse a sequence of
@@ -784,23 +753,25 @@ sub get_more_refs {
my @refs;
while (1) {
my $line = <STDIN>;
- if ($line =~ m/^$cmd (.*)$/) {
+ if ($line =~ /^$cmd (.*)$/) {
push(@refs, $1);
} elsif ($line eq "\n") {
return @refs;
} else {
- die("Invalid command in a '$cmd' batch: ". $_);
+ die("Invalid command in a '$cmd' batch: $_\n");
}
}
+ return;
}
sub mw_import {
# multiple import commands can follow each other.
- my @refs = (shift, get_more_refs("import"));
+ my @refs = (shift, get_more_refs('import'));
foreach my $ref (@refs) {
mw_import_ref($ref);
}
- print STDOUT "done\n";
+ print {*STDOUT} "done\n";
+ return;
}
sub mw_import_ref {
@@ -810,40 +781,41 @@ sub mw_import_ref {
# Since HEAD is a symbolic ref to master (by convention,
# followed by the output of the command "list" that we gave),
# we don't need to do anything in this case.
- if ($ref eq "HEAD") {
+ if ($ref eq 'HEAD') {
return;
}
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
- print STDERR "Searching revisions...\n";
+ print {*STDERR} "Searching revisions...\n";
my $last_local = get_last_local_revision();
my $fetch_from = $last_local + 1;
if ($fetch_from == 1) {
- print STDERR ", fetching from beginning.\n";
+ print {*STDERR} ", fetching from beginning.\n";
} else {
- print STDERR ", fetching from here.\n";
+ print {*STDERR} ", fetching from here.\n";
}
my $n = 0;
- if ($fetch_strategy eq "by_rev") {
- print STDERR "Fetching & writing export data by revs...\n";
+ if ($fetch_strategy eq 'by_rev') {
+ print {*STDERR} "Fetching & writing export data by revs...\n";
$n = mw_import_ref_by_revs($fetch_from);
- } elsif ($fetch_strategy eq "by_page") {
- print STDERR "Fetching & writing export data by pages...\n";
+ } elsif ($fetch_strategy eq 'by_page') {
+ print {*STDERR} "Fetching & writing export data by pages...\n";
$n = mw_import_ref_by_pages($fetch_from);
} else {
- print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
- print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
+ print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
+ print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
exit 1;
}
if ($fetch_from == 1 && $n == 0) {
- print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
# Something has to be done remote-helper side. If nothing is done, an error is
# thrown saying that HEAD is referring to unknown object 0000000000000000000
# and the clone fails.
}
+ return;
}
sub mw_import_ref_by_pages {
@@ -855,7 +827,7 @@ sub mw_import_ref_by_pages {
my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
@revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
- my @revision_ids = map $_->{revid}, @revisions;
+ my @revision_ids = map { $_->{revid} } @revisions;
return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
}
@@ -882,7 +854,7 @@ sub mw_import_revids {
my $n_actual = 0;
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
- foreach my $pagerevid (@$revision_ids) {
+ foreach my $pagerevid (@{$revision_ids}) {
# Count page even if we skip it, since we display
# $n/$total and $total includes skipped pages.
$n++;
@@ -898,7 +870,7 @@ sub mw_import_revids {
my $result = $mediawiki->api($query);
if (!$result) {
- die "Failed to retrieve modified page for revision $pagerevid";
+ die "Failed to retrieve modified page for revision $pagerevid\n";
}
if (defined($result->{query}->{badrevids}->{$pagerevid})) {
@@ -907,7 +879,7 @@ sub mw_import_revids {
}
if (!defined($result->{query}->{pages})) {
- die "Invalid revision $pagerevid.";
+ die "Invalid revision ${pagerevid}.\n";
}
my @result_pages = values(%{$result->{query}->{pages}});
@@ -917,8 +889,8 @@ sub mw_import_revids {
my $page_title = $result_page->{title};
if (!exists($pages->{$page_title})) {
- print STDERR "$n/", scalar(@$revision_ids),
- ": Skipping revision #$rev->{revid} of $page_title\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}),
+ ": Skipping revision #$rev->{revid} of ${page_title}\n";
next;
}
@@ -927,7 +899,7 @@ sub mw_import_revids {
my %commit;
$commit{author} = $rev->{user} || 'Anonymous';
$commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
- $commit{title} = mediawiki_smudge_filename($page_title);
+ $commit{title} = smudge_filename($page_title);
$commit{mw_revision} = $rev->{revid};
$commit{content} = mediawiki_smudge($rev->{'*'});
@@ -943,14 +915,14 @@ sub mw_import_revids {
my %mediafile;
if ($namespace) {
my $id = get_mw_namespace_id($namespace);
- if ($id && $id == get_mw_namespace_id("File")) {
+ if ($id && $id == get_mw_namespace_id('File')) {
%mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
}
}
# If this is a revision of the media page for new version
# of a file do one common commit for both file and media page.
# Else do commit only for that page.
- print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}
@@ -958,17 +930,17 @@ sub mw_import_revids {
}
sub error_non_fast_forward {
- my $advice = run_git("config --bool advice.pushNonFastForward");
+ my $advice = run_git('config --bool advice.pushNonFastForward');
chomp($advice);
- if ($advice ne "false") {
+ if ($advice ne 'false') {
# Native git-push would show this after the summary.
# We can't ask it to display it cleanly, so print it
# ourselves before.
- print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
- print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
- print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
+ print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
+ print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+ print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
}
- print STDOUT "error $_[0] \"non-fast-forward\"\n";
+ print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
return 0;
}
@@ -979,34 +951,34 @@ sub mw_upload_file {
my $file_deleted = shift;
my $summary = shift;
my $newrevid;
- my $path = "File:" . $complete_file_name;
+ my $path = "File:${complete_file_name}";
my %hashFiles = get_allowed_file_extensions();
if (!exists($hashFiles{$extension})) {
- print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
- print STDERR "Check the configuration of file uploads in your mediawiki.\n";
+ print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
+ print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
return $newrevid;
}
# Deleting and uploading a file requires a priviledged user
if ($file_deleted) {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $query = {
action => 'delete',
title => $path,
reason => $summary
};
if (!$mediawiki->edit($query)) {
- print STDERR "Failed to delete file on remote wiki\n";
- print STDERR "Check your permissions on the remote site. Error code:\n";
- print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+ print {*STDERR} "Failed to delete file on remote wiki\n";
+ print {*STDERR} "Check your permissions on the remote site. Error code:\n";
+ print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
exit 1;
}
} else {
# Don't let perl try to interpret file content as UTF-8 => use "raw"
- my $content = run_git("cat-file blob $new_sha1", "raw");
- if ($content ne "") {
- mw_connect_maybe();
+ my $content = run_git("cat-file blob ${new_sha1}", 'raw');
+ if ($content ne EMPTY) {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
$mediawiki->{config}->{upload_url} =
- "$url/index.php/Special:Upload";
+ "${url}/index.php/Special:Upload";
$mediawiki->edit({
action => 'upload',
filename => $complete_file_name,
@@ -1018,12 +990,12 @@ sub mw_upload_file {
}, {
skip_encoding => 1
} ) || die $mediawiki->{error}->{code} . ':'
- . $mediawiki->{error}->{details};
+ . $mediawiki->{error}->{details} . "\n";
my $last_file_page = $mediawiki->get_page({title => $path});
$newrevid = $last_file_page->{revid};
- print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
} else {
- print STDERR "Empty file $complete_file_name not pushed.\n";
+ print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
}
}
return $newrevid;
@@ -1045,24 +1017,24 @@ sub mw_push_file {
my $newrevid;
if ($summary eq EMPTY_MESSAGE) {
- $summary = '';
+ $summary = EMPTY;
}
my $new_sha1 = $diff_info_split[3];
my $old_sha1 = $diff_info_split[2];
my $page_created = ($old_sha1 eq NULL_SHA1);
my $page_deleted = ($new_sha1 eq NULL_SHA1);
- $complete_file_name = mediawiki_clean_filename($complete_file_name);
+ $complete_file_name = clean_filename($complete_file_name);
my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
if (!defined($extension)) {
- $extension = "";
+ $extension = EMPTY;
}
- if ($extension eq "mw") {
+ if ($extension eq 'mw') {
my $ns = get_mw_namespace_id_for_page($complete_file_name);
- if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
- print STDERR "Ignoring media file related page: $complete_file_name\n";
- return ($oldrevid, "ok");
+ if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
+ print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
+ return ($oldrevid, 'ok');
}
my $file_content;
if ($page_deleted) {
@@ -1072,10 +1044,10 @@ sub mw_push_file {
# with this content instead:
$file_content = DELETED_CONTENT;
} else {
- $file_content = run_git("cat-file blob $new_sha1");
+ $file_content = run_git("cat-file blob ${new_sha1}");
}
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $result = $mediawiki->edit( {
action => 'edit',
@@ -1089,49 +1061,49 @@ sub mw_push_file {
if (!$result) {
if ($mediawiki->{error}->{code} == 3) {
# edit conflicts, considered as non-fast-forward
- print STDERR 'Warning: Error ' .
+ print {*STDERR} 'Warning: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details} .
+ ' from mediawiki: ' . $mediawiki->{error}->{details} .
".\n";
- return ($oldrevid, "non-fast-forward");
+ return ($oldrevid, 'non-fast-forward');
} else {
# Other errors. Shouldn't happen => just die()
die 'Fatal: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details};
+ ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
}
}
$newrevid = $result->{edit}->{newrevid};
- print STDERR "Pushed file: $new_sha1 - $title\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
} elsif ($export_media) {
$newrevid = mw_upload_file($complete_file_name, $new_sha1,
$extension, $page_deleted,
$summary);
} else {
- print STDERR "Ignoring media file $title\n";
+ print {*STDERR} "Ignoring media file ${title}\n";
}
$newrevid = ($newrevid or $oldrevid);
- return ($newrevid, "ok");
+ return ($newrevid, 'ok');
}
sub mw_push {
# multiple push statements can follow each other
- my @refsspecs = (shift, get_more_refs("push"));
+ my @refsspecs = (shift, get_more_refs('push'));
my $pushed;
for my $refspec (@refsspecs) {
my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
- or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
+ or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
if ($force) {
- print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
+ print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
}
- if ($local eq "") {
- print STDERR "Cannot delete remote branch on a MediaWiki\n";
- print STDOUT "error $remote cannot delete\n";
+ if ($local eq EMPTY) {
+ print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} cannot delete\n";
next;
}
- if ($remote ne "refs/heads/master") {
- print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
- print STDOUT "error $remote only master allowed\n";
+ if ($remote ne 'refs/heads/master') {
+ print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} only master allowed\n";
next;
}
if (mw_push_revision($local, $remote)) {
@@ -1140,30 +1112,32 @@ sub mw_push {
}
# Notify Git that the push is done
- print STDOUT "\n";
+ print {*STDOUT} "\n";
if ($pushed && $dumb_push) {
- print STDERR "Just pushed some revisions to MediaWiki.\n";
- print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
- print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
- print STDERR "\n";
- print STDERR " git pull --rebase\n";
- print STDERR "\n";
+ print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
+ print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
+ print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
+ print {*STDERR} "\n";
+ print {*STDERR} " git pull --rebase\n";
+ print {*STDERR} "\n";
}
+ return;
}
sub mw_push_revision {
my $local = shift;
my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
my $last_local_revid = get_last_local_revision();
- print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
+ print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
my $last_remote_revid = get_last_remote_revision();
my $mw_revision = $last_remote_revid;
# Get sha1 of commit pointed by local HEAD
- my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
+ my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
+ chomp($HEAD_sha1);
# Get sha1 of commit pointed by remotes/$remotename/master
- my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
+ my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
chomp($remoteorigin_sha1);
if ($last_local_revid > 0 &&
@@ -1182,22 +1156,22 @@ sub mw_push_revision {
if ($last_local_revid > 0) {
my $parsed_sha1 = $remoteorigin_sha1;
# Find a path from last MediaWiki commit to pushed commit
- print STDERR "Computing path from local to remote ...\n";
- my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
+ print {*STDERR} "Computing path from local to remote ...\n";
+ my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
my %local_ancestry;
foreach my $line (@local_ancestry) {
- if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
- foreach my $parent (split(' ', $parents)) {
+ if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
+ foreach my $parent (split(/ /, $parents)) {
$local_ancestry{$parent} = $child;
}
- } elsif (!$line =~ m/^([a-f0-9]+)/) {
- die "Unexpected output from git rev-list: $line";
+ } elsif (!$line =~ /^([a-f0-9]+)/) {
+ die "Unexpected output from git rev-list: ${line}\n";
}
}
while ($parsed_sha1 ne $HEAD_sha1) {
my $child = $local_ancestry{$parsed_sha1};
if (!$child) {
- printf STDERR "Cannot find a path in history from remote commit to last commit\n";
+ print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
return error_non_fast_forward($remote);
}
push(@commit_pairs, [$parsed_sha1, $child]);
@@ -1206,12 +1180,12 @@ sub mw_push_revision {
} else {
# No remote mediawiki revision. Export the whole
# history (linearized with --first-parent)
- print STDERR "Warning: no common ancestor, pushing complete history\n";
- my $history = run_git("rev-list --first-parent --children $local");
- my @history = split('\n', $history);
+ print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
+ my $history = run_git("rev-list --first-parent --children ${local}");
+ my @history = split(/\n/, $history);
@history = @history[1..$#history];
foreach my $line (reverse @history) {
- my @commit_info_split = split(/ |\n/, $line);
+ my @commit_info_split = split(/[ \n]/, $line);
push(@commit_pairs, \@commit_info_split);
}
}
@@ -1219,12 +1193,12 @@ sub mw_push_revision {
foreach my $commit_info_split (@commit_pairs) {
my $sha1_child = @{$commit_info_split}[0];
my $sha1_commit = @{$commit_info_split}[1];
- my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
+ my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
# TODO: we could detect rename, and encode them with a #redirect on the wiki.
# TODO: for now, it's just a delete+add
my @diff_info_list = split(/\0/, $diff_infos);
# Keep the subject line of the commit message as mediawiki comment for the revision
- my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
+ my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
chomp($commit_msg);
# Push every blob
while (@diff_info_list) {
@@ -1236,7 +1210,7 @@ sub mw_push_revision {
my $info = shift(@diff_info_list);
my $file = shift(@diff_info_list);
($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
- if ($status eq "non-fast-forward") {
+ if ($status eq 'non-fast-forward') {
# we may already have sent part of the
# commit to MediaWiki, but it's too
# late to cancel it. Stop the push in
@@ -1244,22 +1218,21 @@ sub mw_push_revision {
# accurate error message.
return error_non_fast_forward($remote);
}
- if ($status ne "ok") {
- die("Unknown error from mw_push_file()");
+ if ($status ne 'ok') {
+ die("Unknown error from mw_push_file()\n");
}
}
- unless ($dumb_push) {
- run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
- run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
+ if (!$dumb_push) {
+ run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
}
}
- print STDOUT "ok $remote\n";
+ print {*STDOUT} "ok ${remote}\n";
return 1;
}
sub get_allowed_file_extensions {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $query = {
action => 'query',
@@ -1267,8 +1240,8 @@ sub get_allowed_file_extensions {
siprop => 'fileextensions'
};
my $result = $mediawiki->api($query);
- my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
- my %hashFile = map {$_ => 1}@file_extensions;
+ my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
+ my %hashFile = map { $_ => 1 } @file_extensions;
return %hashFile;
}
@@ -1283,15 +1256,15 @@ my %cached_mw_namespace_id;
# Return MediaWiki id for a canonical namespace name.
# Ex.: "File", "Project".
sub get_mw_namespace_id {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $name = shift;
if (!exists $namespace_id{$name}) {
# Look at configuration file, if the record for that namespace is
# already cached. Namespaces are stored in form:
# "Name_of_namespace:Id_namespace", ex.: "File:6".
- my @temp = split(/[\n]/, run_git("config --get-all remote."
- . $remotename .".namespaceCache"));
+ my @temp = split(/\n/,
+ run_git("config --get-all remote.${remotename}.namespaceCache"));
chomp(@temp);
foreach my $ns (@temp) {
my ($n, $id) = split(/:/, $ns);
@@ -1305,7 +1278,7 @@ sub get_mw_namespace_id {
}
if (!exists $namespace_id{$name}) {
- print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
+ print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
# NS not found => get namespace id from MW and store it in
# configuration file.
my $query = {
@@ -1329,8 +1302,8 @@ sub get_mw_namespace_id {
my $ns = $namespace_id{$name};
my $id;
- unless (defined $ns) {
- print STDERR "No such namespace $name on MediaWiki.\n";
+ if (!defined $ns) {
+ print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
$ns = {is_namespace => 0};
$namespace_id{$name} = $ns;
}
@@ -1344,15 +1317,15 @@ sub get_mw_namespace_id {
# Store explicitely requested namespaces on disk
if (!exists $cached_mw_namespace_id{$name}) {
- run_git("config --add remote.". $remotename
- .".namespaceCache \"". $name .":". $store_id ."\"");
+ run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
$cached_mw_namespace_id{$name} = 1;
}
return $id;
}
sub get_mw_namespace_id_for_page {
- if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
+ my $namespace = shift;
+ if ($namespace =~ /^([^:]*):/) {
return get_mw_namespace_id($namespace);
} else {
return;