1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
|
#! /usr/bin/perl
# Copyright (C) 2011
# Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
# Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
# Claire Fousse <claire.fousse@ensimag.imag.fr>
# David Amouyal <david.amouyal@ensimag.imag.fr>
# Matthieu Moy <matthieu.moy@grenoble-inp.fr>
# License: GPL v2 or later
# Gateway between Git and MediaWiki.
# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
use strict;
use MediaWiki::API;
use Git;
use DateTime::Format::ISO8601;
use warnings;
# By default, use UTF-8 to communicate with Git and the user
binmode STDERR, ':encoding(UTF-8)';
binmode STDOUT, ':encoding(UTF-8)';
use URI::Escape;
# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
use constant SLASH_REPLACEMENT => '%2F';
# It's not always possible to delete pages (may require some
# privileges). Deleted pages are replaced with this content.
use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
# It's not possible to create empty pages. New empty files in Git are
# sent with this content instead.
use constant EMPTY_CONTENT => "<!-- empty page -->\n";
# used to reflect file creation or deletion in diff.
use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
# Used on Git's side to reflect empty edit messages on the wiki
use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
use constant EMPTY => q{};
# Number of pages taken into account at once in submodule get_mw_page_list
use constant SLICE_SIZE => 50;
# Number of linked mediafile to get at once in get_linked_mediafiles
# The query is split in small batches because of the MW API limit of
# the number of links to be returned (500 links max).
use constant BATCH_SIZE => 10;
use constant HTTP_CODE_OK => 200;
my $remotename = $ARGV[0];
my $url = $ARGV[1];
# Accept both space-separated and multiple keys in config file.
# Spaces should be written as _ anyway because we'll use chomp.
my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
chomp(@tracked_pages);
# Just like @tracked_pages, but for MediaWiki categories.
my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
chomp(@tracked_categories);
# Import media files on pull
my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
chomp($import_media);
$import_media = ($import_media eq 'true');
# Export media files on push
my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
chomp($export_media);
$export_media = !($export_media eq 'false');
my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
# Note: mwPassword is discourraged. Use the credential system instead.
my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
chomp($wiki_login);
chomp($wiki_passwd);
chomp($wiki_domain);
# Import only last revisions (both for clone and fetch)
my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
chomp($shallow_import);
$shallow_import = ($shallow_import eq 'true');
# Fetch (clone and pull) by revisions instead of by pages. This behavior
# is more efficient when we have a wiki with lots of pages and we fetch
# the revisions quite often so that they concern only few pages.
# Possible values:
# - by_rev: perform one query per new revision on the remote wiki
# - by_page: query each tracked page for new revision
my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
if (!$fetch_strategy) {
$fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
}
chomp($fetch_strategy);
if (!$fetch_strategy) {
$fetch_strategy = 'by_page';
}
# Remember the timestamp corresponding to a revision id.
my %basetimestamps;
# Dumb push: don't update notes and mediawiki ref to reflect the last push.
#
# Configurable with mediawiki.dumbPush, or per-remote with
# remote.<remotename>.dumbPush.
#
# This means the user will have to re-import the just-pushed
# revisions. On the other hand, this means that the Git revisions
# corresponding to MediaWiki revisions are all imported from the wiki,
# regardless of whether they were initially created in Git or from the
# web interface, hence all users will get the same history (i.e. if
# the push from Git to MediaWiki loses some information, everybody
# will get the history with information lost). If the import is
# deterministic, this means everybody gets the same sha1 for each
# MediaWiki revision.
my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
if (!$dumb_push) {
$dumb_push = run_git('config --get --bool mediawiki.dumbPush');
}
chomp($dumb_push);
$dumb_push = ($dumb_push eq 'true');
my $wiki_name = $url;
$wiki_name =~ s{[^/]*://}{};
# If URL is like http://user:password@example.com/, we clearly don't
# want the password in $wiki_name. While we're there, also remove user
# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
$wiki_name =~ s/^.*@//;
# Commands parser
while (<STDIN>) {
chomp;
if (!parse_command($_)) {
last;
}
BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
# command is fully processed.
}
########################## Functions ##############################
sub parse_command {
my ($line) = @_;
my @cmd = split(/ /, $line);
if (!defined $cmd[0]) {
return 0;
}
if ($cmd[0] eq 'capabilities') {
die("Too many arguments for capabilities\n")
if (defined($cmd[1]));
mw_capabilities();
} elsif ($cmd[0] eq 'list') {
die("Too many arguments for list\n") if (defined($cmd[2]));
mw_list($cmd[1]);
} elsif ($cmd[0] eq 'import') {
die("Invalid argument for import\n")
if ($cmd[1] eq EMPTY);
die("Too many arguments for import\n")
if (defined($cmd[2]));
mw_import($cmd[1]);
} elsif ($cmd[0] eq 'option') {
die("Invalid arguments for option\n")
if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
die("Too many arguments for option\n")
if (defined($cmd[3]));
mw_option($cmd[1],$cmd[2]);
} elsif ($cmd[0] eq 'push') {
mw_push($cmd[1]);
} else {
print {*STDERR} "Unknown command. Aborting...\n";
return 0;
}
return 1;
}
# MediaWiki API instance, created lazily.
my $mediawiki;
sub mw_connect_maybe {
if ($mediawiki) {
return;
}
$mediawiki = MediaWiki::API->new;
$mediawiki->{config}->{api_url} = "${url}/api.php";
if ($wiki_login) {
my %credential = (
'url' => $url,
'username' => $wiki_login,
'password' => $wiki_passwd
);
Git::credential(\%credential);
my $request = {lgname => $credential{username},
lgpassword => $credential{password},
lgdomain => $wiki_domain};
if ($mediawiki->login($request)) {
Git::credential(\%credential, 'approve');
print {*STDERR} qq(Logged in mediawiki user "$credential{username}".\n);
} else {
print {*STDERR} qq(Failed to log in mediawiki user "$credential{username}" on ${url}\n);
print {*STDERR} ' (error ' .
$mediawiki->{error}->{code} . ': ' .
$mediawiki->{error}->{details} . ")\n";
Git::credential(\%credential, 'reject');
exit 1;
}
}
return;
}
sub fatal_mw_error {
my $action = shift;
print STDERR "fatal: could not $action.\n";
print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
if ($url =~ /^https/) {
print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
print STDERR "fatal: and the SSL certificate is correct.\n";
} else {
print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
}
print STDERR "fatal: (error " .
$mediawiki->{error}->{code} . ': ' .
$mediawiki->{error}->{details} . ")\n";
exit 1;
}
## Functions for listing pages on the remote wiki
sub get_mw_tracked_pages {
my $pages = shift;
get_mw_page_list(\@tracked_pages, $pages);
return;
}
sub get_mw_page_list {
my $page_list = shift;
my $pages = shift;
my @some_pages = @{$page_list};
while (@some_pages) {
my $last_page = SLICE_SIZE;
if ($#some_pages < $last_page) {
$last_page = $#some_pages;
}
my @slice = @some_pages[0..$last_page];
get_mw_first_pages(\@slice, $pages);
@some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
}
return;
}
sub get_mw_tracked_categories {
my $pages = shift;
foreach my $category (@tracked_categories) {
if (index($category, ':') < 0) {
# Mediawiki requires the Category
# prefix, but let's not force the user
# to specify it.
$category = "Category:${category}";
}
my $mw_pages = $mediawiki->list( {
action => 'query',
list => 'categorymembers',
cmtitle => $category,
cmlimit => 'max' } )
|| die $mediawiki->{error}->{code} . ': '
. $mediawiki->{error}->{details} . "\n";
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
}
return;
}
sub get_mw_all_pages {
my $pages = shift;
# No user-provided list, get the list of pages from the API.
my $mw_pages = $mediawiki->list({
action => 'query',
list => 'allpages',
aplimit => 'max'
});
if (!defined($mw_pages)) {
fatal_mw_error("get the list of wiki pages");
}
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
return;
}
# queries the wiki for a set of pages. Meant to be used within a loop
# querying the wiki for slices of page list.
sub get_mw_first_pages {
my $some_pages = shift;
my @some_pages = @{$some_pages};
my $pages = shift;
# pattern 'page1|page2|...' required by the API
my $titles = join('|', @some_pages);
my $mw_pages = $mediawiki->api({
action => 'query',
titles => $titles,
});
if (!defined($mw_pages)) {
fatal_mw_error("query the list of wiki pages");
}
while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
if ($id < 0) {
print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
} else {
$pages->{$page->{title}} = $page;
}
}
return;
}
# Get the list of pages to be fetched according to configuration.
sub get_mw_pages {
mw_connect_maybe();
print {*STDERR} "Listing pages on remote wiki...\n";
my %pages; # hash on page titles to avoid duplicates
my $user_defined;
if (@tracked_pages) {
$user_defined = 1;
# The user provided a list of pages titles, but we
# still need to query the API to get the page IDs.
get_mw_tracked_pages(\%pages);
}
if (@tracked_categories) {
$user_defined = 1;
get_mw_tracked_categories(\%pages);
}
if (!$user_defined) {
get_mw_all_pages(\%pages);
}
if ($import_media) {
print {*STDERR} "Getting media files for selected pages...\n";
if ($user_defined) {
get_linked_mediafiles(\%pages);
} else {
get_all_mediafiles(\%pages);
}
}
print {*STDERR} (scalar keys %pages) . " pages found.\n";
return %pages;
}
# usage: $out = run_git("command args");
# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
sub run_git {
my $args = shift;
my $encoding = (shift || 'encoding(UTF-8)');
open(my $git, "-|:${encoding}", "git ${args}")
or die "Unable to fork: $!\n";
my $res = do {
local $/ = undef;
<$git>
};
close($git);
return $res;
}
sub get_all_mediafiles {
my $pages = shift;
# Attach list of all pages for media files from the API,
# they are in a different namespace, only one namespace
# can be queried at the same moment
my $mw_pages = $mediawiki->list({
action => 'query',
list => 'allpages',
apnamespace => get_mw_namespace_id('File'),
aplimit => 'max'
});
if (!defined($mw_pages)) {
print {*STDERR} "fatal: could not get the list of pages for media files.\n";
print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
exit 1;
}
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
return;
}
sub get_linked_mediafiles {
my $pages = shift;
my @titles = map { $_->{title} } values(%{$pages});
my $batch = BATCH_SIZE;
while (@titles) {
if ($#titles < $batch) {
$batch = $#titles;
}
my @slice = @titles[0..$batch];
# pattern 'page1|page2|...' required by the API
my $mw_titles = join('|', @slice);
# Media files could be included or linked from
# a page, get all related
my $query = {
action => 'query',
prop => 'links|images',
titles => $mw_titles,
plnamespace => get_mw_namespace_id('File'),
pllimit => 'max'
};
my $result = $mediawiki->api($query);
while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
my @media_titles;
if (defined($page->{links})) {
my @link_titles
= map { $_->{title} } @{$page->{links}};
push(@media_titles, @link_titles);
}
if (defined($page->{images})) {
my @image_titles
= map { $_->{title} } @{$page->{images}};
push(@media_titles, @image_titles);
}
if (@media_titles) {
get_mw_page_list(\@media_titles, $pages);
}
}
@titles = @titles[($batch+1)..$#titles];
}
return;
}
sub get_mw_mediafile_for_page_revision {
# Name of the file on Wiki, with the prefix.
my $filename = shift;
my $timestamp = shift;
my %mediafile;
# Search if on a media file with given timestamp exists on
# MediaWiki. In that case download the file.
my $query = {
action => 'query',
prop => 'imageinfo',
titles => "File:${filename}",
iistart => $timestamp,
iiend => $timestamp,
iiprop => 'timestamp|archivename|url',
iilimit => 1
};
my $result = $mediawiki->api($query);
my ($fileid, $file) = each( %{$result->{query}->{pages}} );
# If not defined it means there is no revision of the file for
# given timestamp.
if (defined($file->{imageinfo})) {
$mediafile{title} = $filename;
my $fileinfo = pop(@{$file->{imageinfo}});
$mediafile{timestamp} = $fileinfo->{timestamp};
# Mediawiki::API's download function doesn't support https URLs
# and can't download old versions of files.
print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
$mediafile{content} = download_mw_mediafile($fileinfo->{url});
}
return %mediafile;
}
sub download_mw_mediafile {
my $download_url = shift;
my $response = $mediawiki->{ua}->get($download_url);
if ($response->code == HTTP_CODE_OK) {
return $response->decoded_content;
} else {
print {*STDERR} "Error downloading mediafile from :\n";
print {*STDERR} "URL: ${download_url}\n";
print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
exit 1;
}
}
sub get_last_local_revision {
# Get note regarding last mediawiki revision
my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
my @note_info = split(/ /, $note);
my $lastrevision_number;
if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
print {*STDERR} 'No previous mediawiki revision found';
$lastrevision_number = 0;
} else {
# Notes are formatted : mediawiki_revision: #number
$lastrevision_number = $note_info[1];
chomp($lastrevision_number);
print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
}
return $lastrevision_number;
}
# Get the last remote revision without taking in account which pages are
# tracked or not. This function makes a single request to the wiki thus
# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
# option.
sub get_last_global_remote_rev {
mw_connect_maybe();
my $query = {
action => 'query',
list => 'recentchanges',
prop => 'revisions',
rclimit => '1',
rcdir => 'older',
};
my $result = $mediawiki->api($query);
return $result->{query}->{recentchanges}[0]->{revid};
}
# Get the last remote revision concerning the tracked pages and the tracked
# categories.
sub get_last_remote_revision {
mw_connect_maybe();
my %pages_hash = get_mw_pages();
my @pages = values(%pages_hash);
my $max_rev_num = 0;
print {*STDERR} "Getting last revision id on tracked pages...\n";
foreach my $page (@pages) {
my $id = $page->{pageid};
my $query = {
action => 'query',
prop => 'revisions',
rvprop => 'ids|timestamp',
pageids => $id,
};
my $result = $mediawiki->api($query);
my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
$basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
$max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
}
print {*STDERR} "Last remote revision found is $max_rev_num.\n";
return $max_rev_num;
}
# Clean content before sending it to MediaWiki
sub mediawiki_clean {
my $string = shift;
my $page_created = shift;
# Mediawiki does not allow blank space at the end of a page and ends with a single \n.
# This function right trims a string and adds a \n at the end to follow this rule
$string =~ s/\s+$//;
if ($string eq EMPTY && $page_created) {
# Creating empty pages is forbidden.
$string = EMPTY_CONTENT;
}
return $string."\n";
}
# Filter applied on MediaWiki data before adding them to Git
sub mediawiki_smudge {
my $string = shift;
if ($string eq EMPTY_CONTENT) {
$string = EMPTY;
}
# This \n is important. This is due to mediawiki's way to handle end of files.
return "${string}\n";
}
sub mediawiki_clean_filename {
my $filename = shift;
$filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
# [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
# Do a variant of URL-encoding, i.e. looks like URL-encoding,
# but with _ added to prevent MediaWiki from thinking this is
# an actual special character.
$filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
# If we use the uri escape before
# we should unescape here, before anything
return $filename;
}
sub mediawiki_smudge_filename {
my $filename = shift;
$filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g;
$filename =~ s/ /_/g;
# Decode forbidden characters encoded in mediawiki_clean_filename
$filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf('%c', hex($1))/ge;
return $filename;
}
sub literal_data {
my ($content) = @_;
print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
return;
}
sub literal_data_raw {
# Output possibly binary content.
my ($content) = @_;
# Avoid confusion between size in bytes and in characters
utf8::downgrade($content);
binmode {*STDOUT}, ':raw';
print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
binmode {*STDOUT}, ':encoding(UTF-8)';
return;
}
sub mw_capabilities {
# Revisions are imported to the private namespace
# refs/mediawiki/$remotename/ by the helper and fetched into
# refs/remotes/$remotename later by fetch.
print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
print {*STDOUT} "import\n";
print {*STDOUT} "list\n";
print {*STDOUT} "push\n";
print {*STDOUT} "\n";
return;
}
sub mw_list {
# MediaWiki do not have branches, we consider one branch arbitrarily
# called master, and HEAD pointing to it.
print {*STDOUT} "? refs/heads/master\n";
print {*STDOUT} "\@refs/heads/master HEAD\n";
print {*STDOUT} "\n";
return;
}
sub mw_option {
print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
print {*STDOUT} "unsupported\n";
return;
}
sub fetch_mw_revisions_for_page {
my $page = shift;
my $id = shift;
my $fetch_from = shift;
my @page_revs = ();
my $query = {
action => 'query',
prop => 'revisions',
rvprop => 'ids',
rvdir => 'newer',
rvstartid => $fetch_from,
rvlimit => 500,
pageids => $id,
};
my $revnum = 0;
# Get 500 revisions at a time due to the mediawiki api limit
while (1) {
my $result = $mediawiki->api($query);
# Parse each of those 500 revisions
foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
my $page_rev_ids;
$page_rev_ids->{pageid} = $page->{pageid};
$page_rev_ids->{revid} = $revision->{revid};
push(@page_revs, $page_rev_ids);
$revnum++;
}
last if (!$result->{'query-continue'});
$query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
}
if ($shallow_import && @page_revs) {
print {*STDERR} " Found 1 revision (shallow import).\n";
@page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
return $page_revs[0];
}
print {*STDERR} " Found ${revnum} revision(s).\n";
return @page_revs;
}
sub fetch_mw_revisions {
my $pages = shift; my @pages = @{$pages};
my $fetch_from = shift;
my @revisions = ();
my $n = 1;
foreach my $page (@pages) {
my $id = $page->{pageid};
print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
$n++;
my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
@revisions = (@page_revs, @revisions);
}
return ($n, @revisions);
}
sub fe_escape_path {
my $path = shift;
$path =~ s/\\/\\\\/g;
$path =~ s/"/\\"/g;
$path =~ s/\n/\\n/g;
return qq("${path}");
}
sub import_file_revision {
my $commit = shift;
my %commit = %{$commit};
my $full_import = shift;
my $n = shift;
my $mediafile = shift;
my %mediafile;
if ($mediafile) {
%mediafile = %{$mediafile};
}
my $title = $commit{title};
my $comment = $commit{comment};
my $content = $commit{content};
my $author = $commit{author};
my $date = $commit{date};
print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
print {*STDOUT} "mark :${n}\n";
print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
literal_data($comment);
# If it's not a clone, we need to know where to start from
if (!$full_import && $n == 1) {
print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
}
if ($content ne DELETED_CONTENT) {
print {*STDOUT} 'M 644 inline ' .
fe_escape_path("${title}.mw") . "\n";
literal_data($content);
if (%mediafile) {
print {*STDOUT} 'M 644 inline '
. fe_escape_path($mediafile{title}) . "\n";
literal_data_raw($mediafile{content});
}
print {*STDOUT} "\n\n";
} else {
print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
}
# mediawiki revision number in the git note
if ($full_import && $n == 1) {
print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
}
print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
literal_data('Note added by git-mediawiki during import');
if (!$full_import && $n == 1) {
print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
}
print {*STDOUT} "N inline :${n}\n";
literal_data("mediawiki_revision: $commit{mw_revision}");
print {*STDOUT} "\n\n";
return;
}
# parse a sequence of
# <cmd> <arg1>
# <cmd> <arg2>
# \n
# (like batch sequence of import and sequence of push statements)
sub get_more_refs {
my $cmd = shift;
my @refs;
while (1) {
my $line = <STDIN>;
if ($line =~ /^$cmd (.*)$/) {
push(@refs, $1);
} elsif ($line eq "\n") {
return @refs;
} else {
die("Invalid command in a '$cmd' batch: $_\n");
}
}
return;
}
sub mw_import {
# multiple import commands can follow each other.
my @refs = (shift, get_more_refs('import'));
foreach my $ref (@refs) {
mw_import_ref($ref);
}
print {*STDOUT} "done\n";
return;
}
sub mw_import_ref {
my $ref = shift;
# The remote helper will call "import HEAD" and
# "import refs/heads/master".
# Since HEAD is a symbolic ref to master (by convention,
# followed by the output of the command "list" that we gave),
# we don't need to do anything in this case.
if ($ref eq 'HEAD') {
return;
}
mw_connect_maybe();
print {*STDERR} "Searching revisions...\n";
my $last_local = get_last_local_revision();
my $fetch_from = $last_local + 1;
if ($fetch_from == 1) {
print {*STDERR} ", fetching from beginning.\n";
} else {
print {*STDERR} ", fetching from here.\n";
}
my $n = 0;
if ($fetch_strategy eq 'by_rev') {
print {*STDERR} "Fetching & writing export data by revs...\n";
$n = mw_import_ref_by_revs($fetch_from);
} elsif ($fetch_strategy eq 'by_page') {
print {*STDERR} "Fetching & writing export data by pages...\n";
$n = mw_import_ref_by_pages($fetch_from);
} else {
print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
exit 1;
}
if ($fetch_from == 1 && $n == 0) {
print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
# Something has to be done remote-helper side. If nothing is done, an error is
# thrown saying that HEAD is referring to unknown object 0000000000000000000
# and the clone fails.
}
return;
}
sub mw_import_ref_by_pages {
my $fetch_from = shift;
my %pages_hash = get_mw_pages();
my @pages = values(%pages_hash);
my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
@revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
my @revision_ids = map { $_->{revid} } @revisions;
return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
}
sub mw_import_ref_by_revs {
my $fetch_from = shift;
my %pages_hash = get_mw_pages();
my $last_remote = get_last_global_remote_rev();
my @revision_ids = $fetch_from..$last_remote;
return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
}
# Import revisions given in second argument (array of integers).
# Only pages appearing in the third argument (hash indexed by page titles)
# will be imported.
sub mw_import_revids {
my $fetch_from = shift;
my $revision_ids = shift;
my $pages = shift;
my $n = 0;
my $n_actual = 0;
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
foreach my $pagerevid (@{$revision_ids}) {
# Count page even if we skip it, since we display
# $n/$total and $total includes skipped pages.
$n++;
# fetch the content of the pages
my $query = {
action => 'query',
prop => 'revisions',
rvprop => 'content|timestamp|comment|user|ids',
revids => $pagerevid,
};
my $result = $mediawiki->api($query);
if (!$result) {
die "Failed to retrieve modified page for revision $pagerevid\n";
}
if (defined($result->{query}->{badrevids}->{$pagerevid})) {
# The revision id does not exist on the remote wiki.
next;
}
if (!defined($result->{query}->{pages})) {
die "Invalid revision ${pagerevid}.\n";
}
my @result_pages = values(%{$result->{query}->{pages}});
my $result_page = $result_pages[0];
my $rev = $result_pages[0]->{revisions}->[0];
my $page_title = $result_page->{title};
if (!exists($pages->{$page_title})) {
print {*STDERR} "${n}/", scalar(@{$revision_ids}),
": Skipping revision #$rev->{revid} of ${page_title}\n";
next;
}
$n_actual++;
my %commit;
$commit{author} = $rev->{user} || 'Anonymous';
$commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
$commit{title} = mediawiki_smudge_filename($page_title);
$commit{mw_revision} = $rev->{revid};
$commit{content} = mediawiki_smudge($rev->{'*'});
if (!defined($rev->{timestamp})) {
$last_timestamp++;
} else {
$last_timestamp = $rev->{timestamp};
}
$commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
# Differentiates classic pages and media files.
my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
my %mediafile;
if ($namespace) {
my $id = get_mw_namespace_id($namespace);
if ($id && $id == get_mw_namespace_id('File')) {
%mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
}
}
# If this is a revision of the media page for new version
# of a file do one common commit for both file and media page.
# Else do commit only for that page.
print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}
return $n_actual;
}
sub error_non_fast_forward {
my $advice = run_git('config --bool advice.pushNonFastForward');
chomp($advice);
if ($advice ne 'false') {
# Native git-push would show this after the summary.
# We can't ask it to display it cleanly, so print it
# ourselves before.
print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
}
print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
return 0;
}
sub mw_upload_file {
my $complete_file_name = shift;
my $new_sha1 = shift;
my $extension = shift;
my $file_deleted = shift;
my $summary = shift;
my $newrevid;
my $path = "File:${complete_file_name}";
my %hashFiles = get_allowed_file_extensions();
if (!exists($hashFiles{$extension})) {
print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
return $newrevid;
}
# Deleting and uploading a file requires a priviledged user
if ($file_deleted) {
mw_connect_maybe();
my $query = {
action => 'delete',
title => $path,
reason => $summary
};
if (!$mediawiki->edit($query)) {
print {*STDERR} "Failed to delete file on remote wiki\n";
print {*STDERR} "Check your permissions on the remote site. Error code:\n";
print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
exit 1;
}
} else {
# Don't let perl try to interpret file content as UTF-8 => use "raw"
my $content = run_git("cat-file blob ${new_sha1}", 'raw');
if ($content ne EMPTY) {
mw_connect_maybe();
$mediawiki->{config}->{upload_url} =
"${url}/index.php/Special:Upload";
$mediawiki->edit({
action => 'upload',
filename => $complete_file_name,
comment => $summary,
file => [undef,
$complete_file_name,
Content => $content],
ignorewarnings => 1,
}, {
skip_encoding => 1
} ) || die $mediawiki->{error}->{code} . ':'
. $mediawiki->{error}->{details} . "\n";
my $last_file_page = $mediawiki->get_page({title => $path});
$newrevid = $last_file_page->{revid};
print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
} else {
print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
}
}
return $newrevid;
}
sub mw_push_file {
my $diff_info = shift;
# $diff_info contains a string in this format:
# 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
my @diff_info_split = split(/[ \t]/, $diff_info);
# Filename, including .mw extension
my $complete_file_name = shift;
# Commit message
my $summary = shift;
# MediaWiki revision number. Keep the previous one by default,
# in case there's no edit to perform.
my $oldrevid = shift;
my $newrevid;
if ($summary eq EMPTY_MESSAGE) {
$summary = EMPTY;
}
my $new_sha1 = $diff_info_split[3];
my $old_sha1 = $diff_info_split[2];
my $page_created = ($old_sha1 eq NULL_SHA1);
my $page_deleted = ($new_sha1 eq NULL_SHA1);
$complete_file_name = mediawiki_clean_filename($complete_file_name);
my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
if (!defined($extension)) {
$extension = EMPTY;
}
if ($extension eq 'mw') {
my $ns = get_mw_namespace_id_for_page($complete_file_name);
if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
return ($oldrevid, 'ok');
}
my $file_content;
if ($page_deleted) {
# Deleting a page usually requires
# special privileges. A common
# convention is to replace the page
# with this content instead:
$file_content = DELETED_CONTENT;
} else {
$file_content = run_git("cat-file blob ${new_sha1}");
}
mw_connect_maybe();
my $result = $mediawiki->edit( {
action => 'edit',
summary => $summary,
title => $title,
basetimestamp => $basetimestamps{$oldrevid},
text => mediawiki_clean($file_content, $page_created),
}, {
skip_encoding => 1 # Helps with names with accentuated characters
});
if (!$result) {
if ($mediawiki->{error}->{code} == 3) {
# edit conflicts, considered as non-fast-forward
print {*STDERR} 'Warning: Error ' .
$mediawiki->{error}->{code} .
' from mediawiki: ' . $mediawiki->{error}->{details} .
".\n";
return ($oldrevid, 'non-fast-forward');
} else {
# Other errors. Shouldn't happen => just die()
die 'Fatal: Error ' .
$mediawiki->{error}->{code} .
' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
}
}
$newrevid = $result->{edit}->{newrevid};
print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
} elsif ($export_media) {
$newrevid = mw_upload_file($complete_file_name, $new_sha1,
$extension, $page_deleted,
$summary);
} else {
print {*STDERR} "Ignoring media file ${title}\n";
}
$newrevid = ($newrevid or $oldrevid);
return ($newrevid, 'ok');
}
sub mw_push {
# multiple push statements can follow each other
my @refsspecs = (shift, get_more_refs('push'));
my $pushed;
for my $refspec (@refsspecs) {
my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
if ($force) {
print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
}
if ($local eq EMPTY) {
print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
print {*STDOUT} "error ${remote} cannot delete\n";
next;
}
if ($remote ne 'refs/heads/master') {
print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
print {*STDOUT} "error ${remote} only master allowed\n";
next;
}
if (mw_push_revision($local, $remote)) {
$pushed = 1;
}
}
# Notify Git that the push is done
print {*STDOUT} "\n";
if ($pushed && $dumb_push) {
print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
print {*STDERR} "\n";
print {*STDERR} " git pull --rebase\n";
print {*STDERR} "\n";
}
return;
}
sub mw_push_revision {
my $local = shift;
my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
my $last_local_revid = get_last_local_revision();
print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
my $last_remote_revid = get_last_remote_revision();
my $mw_revision = $last_remote_revid;
# Get sha1 of commit pointed by local HEAD
my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
chomp($HEAD_sha1);
# Get sha1 of commit pointed by remotes/$remotename/master
my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
chomp($remoteorigin_sha1);
if ($last_local_revid > 0 &&
$last_local_revid < $last_remote_revid) {
return error_non_fast_forward($remote);
}
if ($HEAD_sha1 eq $remoteorigin_sha1) {
# nothing to push
return 0;
}
# Get every commit in between HEAD and refs/remotes/origin/master,
# including HEAD and refs/remotes/origin/master
my @commit_pairs = ();
if ($last_local_revid > 0) {
my $parsed_sha1 = $remoteorigin_sha1;
# Find a path from last MediaWiki commit to pushed commit
print {*STDERR} "Computing path from local to remote ...\n";
my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
my %local_ancestry;
foreach my $line (@local_ancestry) {
if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
foreach my $parent (split(/ /, $parents)) {
$local_ancestry{$parent} = $child;
}
} elsif (!$line =~ /^([a-f0-9]+)/) {
die "Unexpected output from git rev-list: ${line}\n";
}
}
while ($parsed_sha1 ne $HEAD_sha1) {
my $child = $local_ancestry{$parsed_sha1};
if (!$child) {
print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
return error_non_fast_forward($remote);
}
push(@commit_pairs, [$parsed_sha1, $child]);
$parsed_sha1 = $child;
}
} else {
# No remote mediawiki revision. Export the whole
# history (linearized with --first-parent)
print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
my $history = run_git("rev-list --first-parent --children ${local}");
my @history = split(/\n/, $history);
@history = @history[1..$#history];
foreach my $line (reverse @history) {
my @commit_info_split = split(/[ \n]/, $line);
push(@commit_pairs, \@commit_info_split);
}
}
foreach my $commit_info_split (@commit_pairs) {
my $sha1_child = @{$commit_info_split}[0];
my $sha1_commit = @{$commit_info_split}[1];
my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
# TODO: we could detect rename, and encode them with a #redirect on the wiki.
# TODO: for now, it's just a delete+add
my @diff_info_list = split(/\0/, $diff_infos);
# Keep the subject line of the commit message as mediawiki comment for the revision
my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
chomp($commit_msg);
# Push every blob
while (@diff_info_list) {
my $status;
# git diff-tree -z gives an output like
# <metadata>\0<filename1>\0
# <metadata>\0<filename2>\0
# and we've split on \0.
my $info = shift(@diff_info_list);
my $file = shift(@diff_info_list);
($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
if ($status eq 'non-fast-forward') {
# we may already have sent part of the
# commit to MediaWiki, but it's too
# late to cancel it. Stop the push in
# the middle, but still give an
# accurate error message.
return error_non_fast_forward($remote);
}
if ($status ne 'ok') {
die("Unknown error from mw_push_file()\n");
}
}
if (!$dumb_push) {
run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
run_git(qq(update-ref -m "Git-MediaWiki push" refs/mediawiki/${remotename}/master ${sha1_commit} ${sha1_child}));
}
}
print {*STDOUT} "ok ${remote}\n";
return 1;
}
sub get_allowed_file_extensions {
mw_connect_maybe();
my $query = {
action => 'query',
meta => 'siteinfo',
siprop => 'fileextensions'
};
my $result = $mediawiki->api($query);
my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
my %hashFile = map { $_ => 1 } @file_extensions;
return %hashFile;
}
# In memory cache for MediaWiki namespace ids.
my %namespace_id;
# Namespaces whose id is cached in the configuration file
# (to avoid duplicates)
my %cached_mw_namespace_id;
# Return MediaWiki id for a canonical namespace name.
# Ex.: "File", "Project".
sub get_mw_namespace_id {
mw_connect_maybe();
my $name = shift;
if (!exists $namespace_id{$name}) {
# Look at configuration file, if the record for that namespace is
# already cached. Namespaces are stored in form:
# "Name_of_namespace:Id_namespace", ex.: "File:6".
my @temp = split(/\n/,
run_git("config --get-all remote.${remotename}.namespaceCache"));
chomp(@temp);
foreach my $ns (@temp) {
my ($n, $id) = split(/:/, $ns);
if ($id eq 'notANameSpace') {
$namespace_id{$n} = {is_namespace => 0};
} else {
$namespace_id{$n} = {is_namespace => 1, id => $id};
}
$cached_mw_namespace_id{$n} = 1;
}
}
if (!exists $namespace_id{$name}) {
print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
# NS not found => get namespace id from MW and store it in
# configuration file.
my $query = {
action => 'query',
meta => 'siteinfo',
siprop => 'namespaces'
};
my $result = $mediawiki->api($query);
while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
if (defined($ns->{id}) && defined($ns->{canonical})) {
$namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
if ($ns->{'*'}) {
# alias (e.g. french Fichier: as alias for canonical File:)
$namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
}
}
}
}
my $ns = $namespace_id{$name};
my $id;
if (!defined $ns) {
print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
$ns = {is_namespace => 0};
$namespace_id{$name} = $ns;
}
if ($ns->{is_namespace}) {
$id = $ns->{id};
}
# Store "notANameSpace" as special value for inexisting namespaces
my $store_id = ($id || 'notANameSpace');
# Store explicitely requested namespaces on disk
if (!exists $cached_mw_namespace_id{$name}) {
run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
$cached_mw_namespace_id{$name} = 1;
}
return $id;
}
sub get_mw_namespace_id_for_page {
my $namespace = shift;
if ($namespace =~ /^([^:]*):/) {
return get_mw_namespace_id($namespace);
} else {
return;
}
}
|