summaryrefslogtreecommitdiff
path: root/contrib/mw-to-git
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/mw-to-git')
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki.perl7
-rwxr-xr-xcontrib/mw-to-git/t/install-wiki.sh10
-rwxr-xr-xcontrib/mw-to-git/t/t9360-mw-to-git-clone.sh14
-rwxr-xr-xcontrib/mw-to-git/t/t9362-mw-to-git-utf8.sh4
-rwxr-xr-xcontrib/mw-to-git/t/t9363-mw-to-git-export-import.sh19
-rwxr-xr-xcontrib/mw-to-git/t/t9365-continuing-queries.sh2
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw-lib.sh8
7 files changed, 49 insertions, 15 deletions
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
index 3f8d993afa..8dd74a9a40 100755
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -461,7 +461,12 @@ sub download_mw_mediafile {
my $response = $mediawiki->{ua}->get($download_url);
if ($response->code == HTTP_CODE_OK) {
- return $response->decoded_content;
+ # It is tempting to return
+ # $response->decoded_content({charset => "none"}), but
+ # when doing so, utf8::downgrade($content) fails with
+ # "Wide character in subroutine entry".
+ $response->decode();
+ return $response->content();
} else {
print {*STDERR} "Error downloading mediafile from :\n";
print {*STDERR} "URL: ${download_url}\n";
diff --git a/contrib/mw-to-git/t/install-wiki.sh b/contrib/mw-to-git/t/install-wiki.sh
index 70a53f67fd..c215213c4b 100755
--- a/contrib/mw-to-git/t/install-wiki.sh
+++ b/contrib/mw-to-git/t/install-wiki.sh
@@ -20,6 +20,8 @@ usage () {
echo " install | -i : Install a wiki on your computer."
echo " delete | -d : Delete the wiki and all its pages and "
echo " content."
+ echo " start | -s : Start the previously configured lighttpd daemon"
+ echo " stop : Stop lighttpd daemon."
}
@@ -33,6 +35,14 @@ case "$1" in
wiki_delete
exit 0
;;
+ "start" | "-s")
+ start_lighttpd
+ exit
+ ;;
+ "stop")
+ stop_lighttpd
+ exit
+ ;;
"--help" | "-h")
usage
exit 0
diff --git a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
index 811a90c9ae..22f069db48 100755
--- a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
+++ b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
@@ -191,10 +191,10 @@ test_expect_success 'Git clone works with the shallow option' '
test_path_is_file mw_dir_11/Main_Page.mw &&
(
cd mw_dir_11 &&
- test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
- test `git log --oneline Foo.mw | wc -l` -eq 1 &&
- test `git log --oneline Bar.mw | wc -l` -eq 1 &&
- test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ test $(git log --oneline Nyan.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Foo.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Bar.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
) &&
wiki_check_content mw_dir_11/Nyan.mw Nyan &&
wiki_check_content mw_dir_11/Foo.mw Foo &&
@@ -218,9 +218,9 @@ test_expect_success 'Git clone works with the shallow option with a delete page'
test_path_is_file mw_dir_12/Main_Page.mw &&
(
cd mw_dir_12 &&
- test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
- test `git log --oneline Bar.mw | wc -l` -eq 1 &&
- test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ test $(git log --oneline Nyan.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Bar.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
) &&
wiki_check_content mw_dir_12/Nyan.mw Nyan &&
wiki_check_content mw_dir_12/Bar.mw Bar &&
diff --git a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
index 37021e200a..6b0dbdac4d 100755
--- a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
+++ b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
@@ -70,8 +70,8 @@ test_expect_success 'The shallow option works with accents' '
test_path_is_file mw_dir_4/Main_Page.mw &&
(
cd mw_dir_4 &&
- test `git log --oneline Néoà.mw | wc -l` -eq 1 &&
- test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ test $(git log --oneline Néoà.mw | wc -l) -eq 1 &&
+ test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
) &&
wiki_check_content mw_dir_4/Néoà.mw Néoà &&
wiki_check_content mw_dir_4/Main_Page.mw Main_Page
diff --git a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
index 5a0373935f..3ff3a09567 100755
--- a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
+++ b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
@@ -58,6 +58,25 @@ test_expect_success 'git clone works on previously created wiki with media files
test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
'
+test_expect_success 'git push can upload media (File:) files containing valid UTF-8' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ (
+ cd mw_dir &&
+ "$PERL_PATH" -e "print STDOUT \"UTF-8 content: éèàéê€.\";" >Bar.txt &&
+ git add Bar.txt &&
+ git commit -m "add a text file with UTF-8 content" &&
+ git push
+ )
+'
+
+test_expect_success 'git clone works on previously created wiki with media files containing valid UTF-8' '
+ test_when_finished "rm -rf mw_dir mw_dir_clone" &&
+ git clone -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_clone &&
+ test_cmp mw_dir_clone/Bar.txt mw_dir/Bar.txt
+'
+
test_expect_success 'git push & pull work with locally renamed media files' '
wiki_reset &&
git clone mediawiki::'"$WIKI_URL"' mw_dir &&
diff --git a/contrib/mw-to-git/t/t9365-continuing-queries.sh b/contrib/mw-to-git/t/t9365-continuing-queries.sh
index 27e267f532..016454749f 100755
--- a/contrib/mw-to-git/t/t9365-continuing-queries.sh
+++ b/contrib/mw-to-git/t/t9365-continuing-queries.sh
@@ -9,7 +9,7 @@ test_check_precond
test_expect_success 'creating page w/ >500 revisions' '
wiki_reset &&
- for i in `test_seq 501`
+ for i in $(test_seq 501)
do
echo "creating revision $i" &&
wiki_editpage foo "revision $i<br/>" true
diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh
index 3372b2af34..6546294f15 100755
--- a/contrib/mw-to-git/t/test-gitmw-lib.sh
+++ b/contrib/mw-to-git/t/test-gitmw-lib.sh
@@ -90,7 +90,7 @@ test_diff_directories () {
#
# Check that <dir> contains exactly <N> files
test_contains_N_files () {
- if test `ls -- "$1" | wc -l` -ne "$2"; then
+ if test $(ls -- "$1" | wc -l) -ne "$2"; then
echo "directory $1 should contain $2 files"
echo "it contains these files:"
ls "$1"
@@ -289,7 +289,6 @@ start_lighttpd () {
# Kill daemon lighttpd and removes files and folders associated.
stop_lighttpd () {
test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
- rm -rf "$WEB"
}
# Create the SQLite database of the MediaWiki. If the database file already
@@ -341,10 +340,10 @@ wiki_install () {
"http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\
"$MW_FILENAME. "\
"Please fix your connection and launch the script again."
- echo "$MW_FILENAME downloaded in `pwd`. "\
+ echo "$MW_FILENAME downloaded in $(pwd). "\
"You can delete it later if you want."
else
- echo "Reusing existing $MW_FILENAME downloaded in `pwd`."
+ echo "Reusing existing $MW_FILENAME downloaded in $(pwd)."
fi
archive_abs_path=$(pwd)/$MW_FILENAME
cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
@@ -415,6 +414,7 @@ wiki_reset () {
wiki_delete () {
if test $LIGHTTPD = "true"; then
stop_lighttpd
+ rm -fr "$WEB"
else
# Delete the wiki's directory.
rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||