git-remote-mediawiki: put non-trivial numeric values in constants.
[git/raj.git] / contrib / mw-to-git / git-remote-mediawiki.perl
blob1cedbeeced7b2c7a5ce70185f5922845a3d9a129
1 #! /usr/bin/perl
3 # Copyright (C) 2011
4 # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5 # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6 # Claire Fousse <claire.fousse@ensimag.imag.fr>
7 # David Amouyal <david.amouyal@ensimag.imag.fr>
8 # Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9 # License: GPL v2 or later
11 # Gateway between Git and MediaWiki.
12 # Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
14 use strict;
15 use MediaWiki::API;
16 use Git;
17 use DateTime::Format::ISO8601;
18 use warnings;
20 # By default, use UTF-8 to communicate with Git and the user
21 binmode STDERR, ':encoding(UTF-8)';
22 binmode STDOUT, ':encoding(UTF-8)';
24 use URI::Escape;
26 # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
27 use constant SLASH_REPLACEMENT => '%2F';
29 # It's not always possible to delete pages (may require some
30 # privileges). Deleted pages are replaced with this content.
31 use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
33 # It's not possible to create empty pages. New empty files in Git are
34 # sent with this content instead.
35 use constant EMPTY_CONTENT => "<!-- empty page -->\n";
37 # used to reflect file creation or deletion in diff.
38 use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
40 # Used on Git's side to reflect empty edit messages on the wiki
41 use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
43 use constant EMPTY => q{};
45 # Number of pages taken into account at once in submodule get_mw_page_list
46 use constant SLICE_SIZE => 50;
48 # Number of linked mediafile to get at once in get_linked_mediafiles
49 # The query is split in small batches because of the MW API limit of
50 # the number of links to be returned (500 links max).
51 use constant BATCH_SIZE => 10;
53 use constant HTTP_CODE_OK => 200;
55 my $remotename = $ARGV[0];
56 my $url = $ARGV[1];
58 # Accept both space-separated and multiple keys in config file.
59 # Spaces should be written as _ anyway because we'll use chomp.
60 my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
61 chomp(@tracked_pages);
63 # Just like @tracked_pages, but for MediaWiki categories.
64 my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
65 chomp(@tracked_categories);
67 # Import media files on pull
68 my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
69 chomp($import_media);
70 $import_media = ($import_media eq 'true');
72 # Export media files on push
73 my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
74 chomp($export_media);
75 $export_media = !($export_media eq 'false');
77 my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
78 # Note: mwPassword is discourraged. Use the credential system instead.
79 my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
80 my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
81 chomp($wiki_login);
82 chomp($wiki_passwd);
83 chomp($wiki_domain);
85 # Import only last revisions (both for clone and fetch)
86 my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
87 chomp($shallow_import);
88 $shallow_import = ($shallow_import eq 'true');
90 # Fetch (clone and pull) by revisions instead of by pages. This behavior
91 # is more efficient when we have a wiki with lots of pages and we fetch
92 # the revisions quite often so that they concern only few pages.
93 # Possible values:
94 # - by_rev: perform one query per new revision on the remote wiki
95 # - by_page: query each tracked page for new revision
96 my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
97 if (!$fetch_strategy) {
98 $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
100 chomp($fetch_strategy);
101 if (!$fetch_strategy) {
102 $fetch_strategy = 'by_page';
105 # Remember the timestamp corresponding to a revision id.
106 my %basetimestamps;
108 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
110 # Configurable with mediawiki.dumbPush, or per-remote with
111 # remote.<remotename>.dumbPush.
113 # This means the user will have to re-import the just-pushed
114 # revisions. On the other hand, this means that the Git revisions
115 # corresponding to MediaWiki revisions are all imported from the wiki,
116 # regardless of whether they were initially created in Git or from the
117 # web interface, hence all users will get the same history (i.e. if
118 # the push from Git to MediaWiki loses some information, everybody
119 # will get the history with information lost). If the import is
120 # deterministic, this means everybody gets the same sha1 for each
121 # MediaWiki revision.
122 my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
123 if (!$dumb_push) {
124 $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
126 chomp($dumb_push);
127 $dumb_push = ($dumb_push eq 'true');
129 my $wiki_name = $url;
130 $wiki_name =~ s{[^/]*://}{};
131 # If URL is like http://user:password@example.com/, we clearly don't
132 # want the password in $wiki_name. While we're there, also remove user
133 # and '@' sign, to avoid author like MWUser@HTTPUser@host.com
134 $wiki_name =~ s/^.*@//;
136 # Commands parser
137 while (<STDIN>) {
138 chomp;
140 if (!parse_command($_)) {
141 last;
144 BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
145 # command is fully processed.
148 ########################## Functions ##############################
150 sub parse_command {
151 my ($line) = @_;
152 my @cmd = split(/ /, $line);
153 if (!defined $cmd[0]) {
154 return 0;
156 if ($cmd[0] eq 'capabilities') {
157 die("Too many arguments for capabilities\n")
158 if (defined($cmd[1]));
159 mw_capabilities();
160 } elsif ($cmd[0] eq 'list') {
161 die("Too many arguments for list\n") if (defined($cmd[2]));
162 mw_list($cmd[1]);
163 } elsif ($cmd[0] eq 'import') {
164 die("Invalid arguments for import\n")
165 if ($cmd[1] eq EMPTY || defined($cmd[2]));
166 mw_import($cmd[1]);
167 } elsif ($cmd[0] eq 'option') {
168 die("Too many arguments for option\n")
169 if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY || defined($cmd[3]));
170 mw_option($cmd[1],$cmd[2]);
171 } elsif ($cmd[0] eq 'push') {
172 mw_push($cmd[1]);
173 } else {
174 print {*STDERR} "Unknown command. Aborting...\n";
175 return 0;
177 return 1;
180 # MediaWiki API instance, created lazily.
181 my $mediawiki;
183 sub mw_connect_maybe {
184 if ($mediawiki) {
185 return;
187 $mediawiki = MediaWiki::API->new;
188 $mediawiki->{config}->{api_url} = "${url}/api.php";
189 if ($wiki_login) {
190 my %credential = (
191 'url' => $url,
192 'username' => $wiki_login,
193 'password' => $wiki_passwd
195 Git::credential(\%credential);
196 my $request = {lgname => $credential{username},
197 lgpassword => $credential{password},
198 lgdomain => $wiki_domain};
199 if ($mediawiki->login($request)) {
200 Git::credential(\%credential, 'approve');
201 print {*STDERR} qq(Logged in mediawiki user "$credential{username}".\n);
202 } else {
203 print {*STDERR} qq(Failed to log in mediawiki user "$credential{username}" on ${url}\n);
204 print {*STDERR} ' (error ' .
205 $mediawiki->{error}->{code} . ': ' .
206 $mediawiki->{error}->{details} . ")\n";
207 Git::credential(\%credential, 'reject');
208 exit 1;
211 return;
214 sub fatal_mw_error {
215 my $action = shift;
216 print STDERR "fatal: could not $action.\n";
217 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
218 if ($url =~ /^https/) {
219 print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
220 print STDERR "fatal: and the SSL certificate is correct.\n";
221 } else {
222 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
224 print STDERR "fatal: (error " .
225 $mediawiki->{error}->{code} . ': ' .
226 $mediawiki->{error}->{details} . ")\n";
227 exit 1;
230 ## Functions for listing pages on the remote wiki
231 sub get_mw_tracked_pages {
232 my $pages = shift;
233 get_mw_page_list(\@tracked_pages, $pages);
234 return;
237 sub get_mw_page_list {
238 my $page_list = shift;
239 my $pages = shift;
240 my @some_pages = @$page_list;
241 while (@some_pages) {
242 my $last_page = SLICE_SIZE;
243 if ($#some_pages < $last_page) {
244 $last_page = $#some_pages;
246 my @slice = @some_pages[0..$last_page];
247 get_mw_first_pages(\@slice, $pages);
248 @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
250 return;
253 sub get_mw_tracked_categories {
254 my $pages = shift;
255 foreach my $category (@tracked_categories) {
256 if (index($category, ':') < 0) {
257 # Mediawiki requires the Category
258 # prefix, but let's not force the user
259 # to specify it.
260 $category = "Category:${category}";
262 my $mw_pages = $mediawiki->list( {
263 action => 'query',
264 list => 'categorymembers',
265 cmtitle => $category,
266 cmlimit => 'max' } )
267 || die $mediawiki->{error}->{code} . ': '
268 . $mediawiki->{error}->{details} . "\n";
269 foreach my $page (@{$mw_pages}) {
270 $pages->{$page->{title}} = $page;
273 return;
276 sub get_mw_all_pages {
277 my $pages = shift;
278 # No user-provided list, get the list of pages from the API.
279 my $mw_pages = $mediawiki->list({
280 action => 'query',
281 list => 'allpages',
282 aplimit => 'max'
284 if (!defined($mw_pages)) {
285 fatal_mw_error("get the list of wiki pages");
287 foreach my $page (@{$mw_pages}) {
288 $pages->{$page->{title}} = $page;
290 return;
293 # queries the wiki for a set of pages. Meant to be used within a loop
294 # querying the wiki for slices of page list.
295 sub get_mw_first_pages {
296 my $some_pages = shift;
297 my @some_pages = @{$some_pages};
299 my $pages = shift;
301 # pattern 'page1|page2|...' required by the API
302 my $titles = join('|', @some_pages);
304 my $mw_pages = $mediawiki->api({
305 action => 'query',
306 titles => $titles,
308 if (!defined($mw_pages)) {
309 fatal_mw_error("query the list of wiki pages");
311 while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
312 if ($id < 0) {
313 print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
314 } else {
315 $pages->{$page->{title}} = $page;
318 return;
321 # Get the list of pages to be fetched according to configuration.
322 sub get_mw_pages {
323 mw_connect_maybe();
325 print {*STDERR} "Listing pages on remote wiki...\n";
327 my %pages; # hash on page titles to avoid duplicates
328 my $user_defined;
329 if (@tracked_pages) {
330 $user_defined = 1;
331 # The user provided a list of pages titles, but we
332 # still need to query the API to get the page IDs.
333 get_mw_tracked_pages(\%pages);
335 if (@tracked_categories) {
336 $user_defined = 1;
337 get_mw_tracked_categories(\%pages);
339 if (!$user_defined) {
340 get_mw_all_pages(\%pages);
342 if ($import_media) {
343 print {*STDERR} "Getting media files for selected pages...\n";
344 if ($user_defined) {
345 get_linked_mediafiles(\%pages);
346 } else {
347 get_all_mediafiles(\%pages);
350 print {*STDERR} (scalar keys %pages) . " pages found.\n";
351 return %pages;
354 # usage: $out = run_git("command args");
355 # $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
356 sub run_git {
357 my $args = shift;
358 my $encoding = (shift || 'encoding(UTF-8)');
359 open(my $git, "-|:${encoding}", "git ${args}")
360 or die "Unable to fork: $!\n";
361 my $res = do {
362 local $/ = undef;
363 <$git>
365 close($git);
367 return $res;
371 sub get_all_mediafiles {
372 my $pages = shift;
373 # Attach list of all pages for media files from the API,
374 # they are in a different namespace, only one namespace
375 # can be queried at the same moment
376 my $mw_pages = $mediawiki->list({
377 action => 'query',
378 list => 'allpages',
379 apnamespace => get_mw_namespace_id('File'),
380 aplimit => 'max'
382 if (!defined($mw_pages)) {
383 print {*STDERR} "fatal: could not get the list of pages for media files.\n";
384 print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
385 print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
386 exit 1;
388 foreach my $page (@{$mw_pages}) {
389 $pages->{$page->{title}} = $page;
391 return;
394 sub get_linked_mediafiles {
395 my $pages = shift;
396 my @titles = map { $_->{title} } values(%{$pages});
398 my $batch = BATCH_SIZE;
399 while (@titles) {
400 if ($#titles < $batch) {
401 $batch = $#titles;
403 my @slice = @titles[0..$batch];
405 # pattern 'page1|page2|...' required by the API
406 my $mw_titles = join('|', @slice);
408 # Media files could be included or linked from
409 # a page, get all related
410 my $query = {
411 action => 'query',
412 prop => 'links|images',
413 titles => $mw_titles,
414 plnamespace => get_mw_namespace_id('File'),
415 pllimit => 'max'
417 my $result = $mediawiki->api($query);
419 while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
420 my @media_titles;
421 if (defined($page->{links})) {
422 my @link_titles
423 = map { $_->{title} } @{$page->{links}};
424 push(@media_titles, @link_titles);
426 if (defined($page->{images})) {
427 my @image_titles
428 = map { $_->{title} } @{$page->{images}};
429 push(@media_titles, @image_titles);
431 if (@media_titles) {
432 get_mw_page_list(\@media_titles, $pages);
436 @titles = @titles[($batch+1)..$#titles];
438 return;
441 sub get_mw_mediafile_for_page_revision {
442 # Name of the file on Wiki, with the prefix.
443 my $filename = shift;
444 my $timestamp = shift;
445 my %mediafile;
447 # Search if on a media file with given timestamp exists on
448 # MediaWiki. In that case download the file.
449 my $query = {
450 action => 'query',
451 prop => 'imageinfo',
452 titles => "File:${filename}",
453 iistart => $timestamp,
454 iiend => $timestamp,
455 iiprop => 'timestamp|archivename|url',
456 iilimit => 1
458 my $result = $mediawiki->api($query);
460 my ($fileid, $file) = each( %{$result->{query}->{pages}} );
461 # If not defined it means there is no revision of the file for
462 # given timestamp.
463 if (defined($file->{imageinfo})) {
464 $mediafile{title} = $filename;
466 my $fileinfo = pop(@{$file->{imageinfo}});
467 $mediafile{timestamp} = $fileinfo->{timestamp};
468 # Mediawiki::API's download function doesn't support https URLs
469 # and can't download old versions of files.
470 print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
471 $mediafile{content} = download_mw_mediafile($fileinfo->{url});
473 return %mediafile;
476 sub download_mw_mediafile {
477 my $download_url = shift;
479 my $response = $mediawiki->{ua}->get($download_url);
480 if ($response->code == HTTP_CODE_OK) {
481 return $response->decoded_content;
482 } else {
483 print {*STDERR} "Error downloading mediafile from :\n";
484 print {*STDERR} "URL: ${download_url}\n";
485 print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
486 exit 1;
490 sub get_last_local_revision {
491 # Get note regarding last mediawiki revision
492 my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
493 my @note_info = split(/ /, $note);
495 my $lastrevision_number;
496 if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
497 print {*STDERR} 'No previous mediawiki revision found';
498 $lastrevision_number = 0;
499 } else {
500 # Notes are formatted : mediawiki_revision: #number
501 $lastrevision_number = $note_info[1];
502 chomp($lastrevision_number);
503 print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
505 return $lastrevision_number;
508 # Get the last remote revision without taking in account which pages are
509 # tracked or not. This function makes a single request to the wiki thus
510 # avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
511 # option.
512 sub get_last_global_remote_rev {
513 mw_connect_maybe();
515 my $query = {
516 action => 'query',
517 list => 'recentchanges',
518 prop => 'revisions',
519 rclimit => '1',
520 rcdir => 'older',
522 my $result = $mediawiki->api($query);
523 return $result->{query}->{recentchanges}[0]->{revid};
526 # Get the last remote revision concerning the tracked pages and the tracked
527 # categories.
528 sub get_last_remote_revision {
529 mw_connect_maybe();
531 my %pages_hash = get_mw_pages();
532 my @pages = values(%pages_hash);
534 my $max_rev_num = 0;
536 print {*STDERR} "Getting last revision id on tracked pages...\n";
538 foreach my $page (@pages) {
539 my $id = $page->{pageid};
541 my $query = {
542 action => 'query',
543 prop => 'revisions',
544 rvprop => 'ids|timestamp',
545 pageids => $id,
548 my $result = $mediawiki->api($query);
550 my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
552 $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
554 $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
557 print {*STDERR} "Last remote revision found is $max_rev_num.\n";
558 return $max_rev_num;
561 # Clean content before sending it to MediaWiki
562 sub mediawiki_clean {
563 my $string = shift;
564 my $page_created = shift;
565 # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
566 # This function right trims a string and adds a \n at the end to follow this rule
567 $string =~ s/\s+$//;
568 if ($string eq EMPTY && $page_created) {
569 # Creating empty pages is forbidden.
570 $string = EMPTY_CONTENT;
572 return $string."\n";
575 # Filter applied on MediaWiki data before adding them to Git
576 sub mediawiki_smudge {
577 my $string = shift;
578 if ($string eq EMPTY_CONTENT) {
579 $string = EMPTY;
581 # This \n is important. This is due to mediawiki's way to handle end of files.
582 return "${string}\n";
585 sub mediawiki_clean_filename {
586 my $filename = shift;
587 $filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
588 # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
589 # Do a variant of URL-encoding, i.e. looks like URL-encoding,
590 # but with _ added to prevent MediaWiki from thinking this is
591 # an actual special character.
592 $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
593 # If we use the uri escape before
594 # we should unescape here, before anything
596 return $filename;
599 sub mediawiki_smudge_filename {
600 my $filename = shift;
601 $filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g;
602 $filename =~ s/ /_/g;
603 # Decode forbidden characters encoded in mediawiki_clean_filename
604 $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf('%c', hex($1))/ge;
605 return $filename;
608 sub literal_data {
609 my ($content) = @_;
610 print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
611 return;
614 sub literal_data_raw {
615 # Output possibly binary content.
616 my ($content) = @_;
617 # Avoid confusion between size in bytes and in characters
618 utf8::downgrade($content);
619 binmode {*STDOUT}, ':raw';
620 print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
621 binmode {*STDOUT}, ':encoding(UTF-8)';
622 return;
625 sub mw_capabilities {
626 # Revisions are imported to the private namespace
627 # refs/mediawiki/$remotename/ by the helper and fetched into
628 # refs/remotes/$remotename later by fetch.
629 print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
630 print {*STDOUT} "import\n";
631 print {*STDOUT} "list\n";
632 print {*STDOUT} "push\n";
633 print {*STDOUT} "\n";
634 return;
637 sub mw_list {
638 # MediaWiki do not have branches, we consider one branch arbitrarily
639 # called master, and HEAD pointing to it.
640 print {*STDOUT} "? refs/heads/master\n";
641 print {*STDOUT} "\@refs/heads/master HEAD\n";
642 print {*STDOUT} "\n";
643 return;
646 sub mw_option {
647 print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
648 print {*STDOUT} "unsupported\n";
649 return;
652 sub fetch_mw_revisions_for_page {
653 my $page = shift;
654 my $id = shift;
655 my $fetch_from = shift;
656 my @page_revs = ();
657 my $query = {
658 action => 'query',
659 prop => 'revisions',
660 rvprop => 'ids',
661 rvdir => 'newer',
662 rvstartid => $fetch_from,
663 rvlimit => 500,
664 pageids => $id,
667 my $revnum = 0;
668 # Get 500 revisions at a time due to the mediawiki api limit
669 while (1) {
670 my $result = $mediawiki->api($query);
672 # Parse each of those 500 revisions
673 foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
674 my $page_rev_ids;
675 $page_rev_ids->{pageid} = $page->{pageid};
676 $page_rev_ids->{revid} = $revision->{revid};
677 push(@page_revs, $page_rev_ids);
678 $revnum++;
680 last if (!$result->{'query-continue'});
681 $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
683 if ($shallow_import && @page_revs) {
684 print {*STDERR} " Found 1 revision (shallow import).\n";
685 @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
686 return $page_revs[0];
688 print {*STDERR} " Found ${revnum} revision(s).\n";
689 return @page_revs;
692 sub fetch_mw_revisions {
693 my $pages = shift; my @pages = @{$pages};
694 my $fetch_from = shift;
696 my @revisions = ();
697 my $n = 1;
698 foreach my $page (@pages) {
699 my $id = $page->{pageid};
700 print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
701 $n++;
702 my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
703 @revisions = (@page_revs, @revisions);
706 return ($n, @revisions);
709 sub fe_escape_path {
710 my $path = shift;
711 $path =~ s/\\/\\\\/g;
712 $path =~ s/"/\\"/g;
713 $path =~ s/\n/\\n/g;
714 return qq("${path}");
717 sub import_file_revision {
718 my $commit = shift;
719 my %commit = %{$commit};
720 my $full_import = shift;
721 my $n = shift;
722 my $mediafile = shift;
723 my %mediafile;
724 if ($mediafile) {
725 %mediafile = %{$mediafile};
728 my $title = $commit{title};
729 my $comment = $commit{comment};
730 my $content = $commit{content};
731 my $author = $commit{author};
732 my $date = $commit{date};
734 print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
735 print {*STDOUT} "mark :${n}\n";
736 print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
737 literal_data($comment);
739 # If it's not a clone, we need to know where to start from
740 if (!$full_import && $n == 1) {
741 print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
743 if ($content ne DELETED_CONTENT) {
744 print {*STDOUT} 'M 644 inline ' .
745 fe_escape_path("${title}.mw") . "\n";
746 literal_data($content);
747 if (%mediafile) {
748 print {*STDOUT} 'M 644 inline '
749 . fe_escape_path($mediafile{title}) . "\n";
750 literal_data_raw($mediafile{content});
752 print {*STDOUT} "\n\n";
753 } else {
754 print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
757 # mediawiki revision number in the git note
758 if ($full_import && $n == 1) {
759 print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
761 print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
762 print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
763 literal_data('Note added by git-mediawiki during import');
764 if (!$full_import && $n == 1) {
765 print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
767 print {*STDOUT} "N inline :${n}\n";
768 literal_data("mediawiki_revision: $commit{mw_revision}");
769 print {*STDOUT} "\n\n";
770 return;
773 # parse a sequence of
774 # <cmd> <arg1>
775 # <cmd> <arg2>
776 # \n
777 # (like batch sequence of import and sequence of push statements)
778 sub get_more_refs {
779 my $cmd = shift;
780 my @refs;
781 while (1) {
782 my $line = <STDIN>;
783 if ($line =~ /^$cmd (.*)$/) {
784 push(@refs, $1);
785 } elsif ($line eq "\n") {
786 return @refs;
787 } else {
788 die("Invalid command in a '$cmd' batch: $_\n");
791 return;
794 sub mw_import {
795 # multiple import commands can follow each other.
796 my @refs = (shift, get_more_refs('import'));
797 foreach my $ref (@refs) {
798 mw_import_ref($ref);
800 print {*STDOUT} "done\n";
801 return;
804 sub mw_import_ref {
805 my $ref = shift;
806 # The remote helper will call "import HEAD" and
807 # "import refs/heads/master".
808 # Since HEAD is a symbolic ref to master (by convention,
809 # followed by the output of the command "list" that we gave),
810 # we don't need to do anything in this case.
811 if ($ref eq 'HEAD') {
812 return;
815 mw_connect_maybe();
817 print {*STDERR} "Searching revisions...\n";
818 my $last_local = get_last_local_revision();
819 my $fetch_from = $last_local + 1;
820 if ($fetch_from == 1) {
821 print {*STDERR} ", fetching from beginning.\n";
822 } else {
823 print {*STDERR} ", fetching from here.\n";
826 my $n = 0;
827 if ($fetch_strategy eq 'by_rev') {
828 print {*STDERR} "Fetching & writing export data by revs...\n";
829 $n = mw_import_ref_by_revs($fetch_from);
830 } elsif ($fetch_strategy eq 'by_page') {
831 print {*STDERR} "Fetching & writing export data by pages...\n";
832 $n = mw_import_ref_by_pages($fetch_from);
833 } else {
834 print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
835 print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
836 exit 1;
839 if ($fetch_from == 1 && $n == 0) {
840 print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
841 # Something has to be done remote-helper side. If nothing is done, an error is
842 # thrown saying that HEAD is referring to unknown object 0000000000000000000
843 # and the clone fails.
845 return;
848 sub mw_import_ref_by_pages {
850 my $fetch_from = shift;
851 my %pages_hash = get_mw_pages();
852 my @pages = values(%pages_hash);
854 my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
856 @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
857 my @revision_ids = map { $_->{revid} } @revisions;
859 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
862 sub mw_import_ref_by_revs {
864 my $fetch_from = shift;
865 my %pages_hash = get_mw_pages();
867 my $last_remote = get_last_global_remote_rev();
868 my @revision_ids = $fetch_from..$last_remote;
869 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
872 # Import revisions given in second argument (array of integers).
873 # Only pages appearing in the third argument (hash indexed by page titles)
874 # will be imported.
875 sub mw_import_revids {
876 my $fetch_from = shift;
877 my $revision_ids = shift;
878 my $pages = shift;
880 my $n = 0;
881 my $n_actual = 0;
882 my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
884 foreach my $pagerevid (@$revision_ids) {
885 # Count page even if we skip it, since we display
886 # $n/$total and $total includes skipped pages.
887 $n++;
889 # fetch the content of the pages
890 my $query = {
891 action => 'query',
892 prop => 'revisions',
893 rvprop => 'content|timestamp|comment|user|ids',
894 revids => $pagerevid,
897 my $result = $mediawiki->api($query);
899 if (!$result) {
900 die "Failed to retrieve modified page for revision $pagerevid\n";
903 if (defined($result->{query}->{badrevids}->{$pagerevid})) {
904 # The revision id does not exist on the remote wiki.
905 next;
908 if (!defined($result->{query}->{pages})) {
909 die "Invalid revision ${pagerevid}.\n";
912 my @result_pages = values(%{$result->{query}->{pages}});
913 my $result_page = $result_pages[0];
914 my $rev = $result_pages[0]->{revisions}->[0];
916 my $page_title = $result_page->{title};
918 if (!exists($pages->{$page_title})) {
919 print {*STDERR} "${n}/", scalar(@$revision_ids),
920 ": Skipping revision #$rev->{revid} of ${page_title}\n";
921 next;
924 $n_actual++;
926 my %commit;
927 $commit{author} = $rev->{user} || 'Anonymous';
928 $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
929 $commit{title} = mediawiki_smudge_filename($page_title);
930 $commit{mw_revision} = $rev->{revid};
931 $commit{content} = mediawiki_smudge($rev->{'*'});
933 if (!defined($rev->{timestamp})) {
934 $last_timestamp++;
935 } else {
936 $last_timestamp = $rev->{timestamp};
938 $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
940 # Differentiates classic pages and media files.
941 my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
942 my %mediafile;
943 if ($namespace) {
944 my $id = get_mw_namespace_id($namespace);
945 if ($id && $id == get_mw_namespace_id('File')) {
946 %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
949 # If this is a revision of the media page for new version
950 # of a file do one common commit for both file and media page.
951 # Else do commit only for that page.
952 print {*STDERR} "${n}/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
953 import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
956 return $n_actual;
959 sub error_non_fast_forward {
960 my $advice = run_git('config --bool advice.pushNonFastForward');
961 chomp($advice);
962 if ($advice ne 'false') {
963 # Native git-push would show this after the summary.
964 # We can't ask it to display it cleanly, so print it
965 # ourselves before.
966 print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
967 print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
968 print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
970 print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
971 return 0;
974 sub mw_upload_file {
975 my $complete_file_name = shift;
976 my $new_sha1 = shift;
977 my $extension = shift;
978 my $file_deleted = shift;
979 my $summary = shift;
980 my $newrevid;
981 my $path = "File:${complete_file_name}";
982 my %hashFiles = get_allowed_file_extensions();
983 if (!exists($hashFiles{$extension})) {
984 print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
985 print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
986 return $newrevid;
988 # Deleting and uploading a file requires a priviledged user
989 if ($file_deleted) {
990 mw_connect_maybe();
991 my $query = {
992 action => 'delete',
993 title => $path,
994 reason => $summary
996 if (!$mediawiki->edit($query)) {
997 print {*STDERR} "Failed to delete file on remote wiki\n";
998 print {*STDERR} "Check your permissions on the remote site. Error code:\n";
999 print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
1000 exit 1;
1002 } else {
1003 # Don't let perl try to interpret file content as UTF-8 => use "raw"
1004 my $content = run_git("cat-file blob ${new_sha1}", 'raw');
1005 if ($content ne EMPTY) {
1006 mw_connect_maybe();
1007 $mediawiki->{config}->{upload_url} =
1008 "${url}/index.php/Special:Upload";
1009 $mediawiki->edit({
1010 action => 'upload',
1011 filename => $complete_file_name,
1012 comment => $summary,
1013 file => [undef,
1014 $complete_file_name,
1015 Content => $content],
1016 ignorewarnings => 1,
1017 }, {
1018 skip_encoding => 1
1019 } ) || die $mediawiki->{error}->{code} . ':'
1020 . $mediawiki->{error}->{details} . "\n";
1021 my $last_file_page = $mediawiki->get_page({title => $path});
1022 $newrevid = $last_file_page->{revid};
1023 print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
1024 } else {
1025 print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
1028 return $newrevid;
1031 sub mw_push_file {
1032 my $diff_info = shift;
1033 # $diff_info contains a string in this format:
1034 # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1035 my @diff_info_split = split(/[ \t]/, $diff_info);
1037 # Filename, including .mw extension
1038 my $complete_file_name = shift;
1039 # Commit message
1040 my $summary = shift;
1041 # MediaWiki revision number. Keep the previous one by default,
1042 # in case there's no edit to perform.
1043 my $oldrevid = shift;
1044 my $newrevid;
1046 if ($summary eq EMPTY_MESSAGE) {
1047 $summary = EMPTY;
1050 my $new_sha1 = $diff_info_split[3];
1051 my $old_sha1 = $diff_info_split[2];
1052 my $page_created = ($old_sha1 eq NULL_SHA1);
1053 my $page_deleted = ($new_sha1 eq NULL_SHA1);
1054 $complete_file_name = mediawiki_clean_filename($complete_file_name);
1056 my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1057 if (!defined($extension)) {
1058 $extension = EMPTY;
1060 if ($extension eq 'mw') {
1061 my $ns = get_mw_namespace_id_for_page($complete_file_name);
1062 if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
1063 print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
1064 return ($oldrevid, 'ok');
1066 my $file_content;
1067 if ($page_deleted) {
1068 # Deleting a page usually requires
1069 # special privileges. A common
1070 # convention is to replace the page
1071 # with this content instead:
1072 $file_content = DELETED_CONTENT;
1073 } else {
1074 $file_content = run_git("cat-file blob ${new_sha1}");
1077 mw_connect_maybe();
1079 my $result = $mediawiki->edit( {
1080 action => 'edit',
1081 summary => $summary,
1082 title => $title,
1083 basetimestamp => $basetimestamps{$oldrevid},
1084 text => mediawiki_clean($file_content, $page_created),
1085 }, {
1086 skip_encoding => 1 # Helps with names with accentuated characters
1088 if (!$result) {
1089 if ($mediawiki->{error}->{code} == 3) {
1090 # edit conflicts, considered as non-fast-forward
1091 print {*STDERR} 'Warning: Error ' .
1092 $mediawiki->{error}->{code} .
1093 ' from mediwiki: ' . $mediawiki->{error}->{details} .
1094 ".\n";
1095 return ($oldrevid, 'non-fast-forward');
1096 } else {
1097 # Other errors. Shouldn't happen => just die()
1098 die 'Fatal: Error ' .
1099 $mediawiki->{error}->{code} .
1100 ' from mediwiki: ' . $mediawiki->{error}->{details} . "\n";
1103 $newrevid = $result->{edit}->{newrevid};
1104 print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
1105 } elsif ($export_media) {
1106 $newrevid = mw_upload_file($complete_file_name, $new_sha1,
1107 $extension, $page_deleted,
1108 $summary);
1109 } else {
1110 print {*STDERR} "Ignoring media file ${title}\n";
1112 $newrevid = ($newrevid or $oldrevid);
1113 return ($newrevid, 'ok');
1116 sub mw_push {
1117 # multiple push statements can follow each other
1118 my @refsspecs = (shift, get_more_refs('push'));
1119 my $pushed;
1120 for my $refspec (@refsspecs) {
1121 my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1122 or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
1123 if ($force) {
1124 print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
1126 if ($local eq EMPTY) {
1127 print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
1128 print {*STDOUT} "error ${remote} cannot delete\n";
1129 next;
1131 if ($remote ne 'refs/heads/master') {
1132 print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
1133 print {*STDOUT} "error ${remote} only master allowed\n";
1134 next;
1136 if (mw_push_revision($local, $remote)) {
1137 $pushed = 1;
1141 # Notify Git that the push is done
1142 print {*STDOUT} "\n";
1144 if ($pushed && $dumb_push) {
1145 print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
1146 print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
1147 print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
1148 print {*STDERR} "\n";
1149 print {*STDERR} " git pull --rebase\n";
1150 print {*STDERR} "\n";
1152 return;
1155 sub mw_push_revision {
1156 my $local = shift;
1157 my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1158 my $last_local_revid = get_last_local_revision();
1159 print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
1160 my $last_remote_revid = get_last_remote_revision();
1161 my $mw_revision = $last_remote_revid;
1163 # Get sha1 of commit pointed by local HEAD
1164 my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
1165 chomp($HEAD_sha1);
1166 # Get sha1 of commit pointed by remotes/$remotename/master
1167 my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
1168 chomp($remoteorigin_sha1);
1170 if ($last_local_revid > 0 &&
1171 $last_local_revid < $last_remote_revid) {
1172 return error_non_fast_forward($remote);
1175 if ($HEAD_sha1 eq $remoteorigin_sha1) {
1176 # nothing to push
1177 return 0;
1180 # Get every commit in between HEAD and refs/remotes/origin/master,
1181 # including HEAD and refs/remotes/origin/master
1182 my @commit_pairs = ();
1183 if ($last_local_revid > 0) {
1184 my $parsed_sha1 = $remoteorigin_sha1;
1185 # Find a path from last MediaWiki commit to pushed commit
1186 print {*STDERR} "Computing path from local to remote ...\n";
1187 my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
1188 my %local_ancestry;
1189 foreach my $line (@local_ancestry) {
1190 if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1191 foreach my $parent (split(/ /, $parents)) {
1192 $local_ancestry{$parent} = $child;
1194 } elsif (!$line =~ /^([a-f0-9]+)/) {
1195 die "Unexpected output from git rev-list: ${line}\n";
1198 while ($parsed_sha1 ne $HEAD_sha1) {
1199 my $child = $local_ancestry{$parsed_sha1};
1200 if (!$child) {
1201 print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
1202 return error_non_fast_forward($remote);
1204 push(@commit_pairs, [$parsed_sha1, $child]);
1205 $parsed_sha1 = $child;
1207 } else {
1208 # No remote mediawiki revision. Export the whole
1209 # history (linearized with --first-parent)
1210 print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
1211 my $history = run_git("rev-list --first-parent --children ${local}");
1212 my @history = split(/\n/, $history);
1213 @history = @history[1..$#history];
1214 foreach my $line (reverse @history) {
1215 my @commit_info_split = split(/[ \n]/, $line);
1216 push(@commit_pairs, \@commit_info_split);
1220 foreach my $commit_info_split (@commit_pairs) {
1221 my $sha1_child = @{$commit_info_split}[0];
1222 my $sha1_commit = @{$commit_info_split}[1];
1223 my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
1224 # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1225 # TODO: for now, it's just a delete+add
1226 my @diff_info_list = split(/\0/, $diff_infos);
1227 # Keep the subject line of the commit message as mediawiki comment for the revision
1228 my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
1229 chomp($commit_msg);
1230 # Push every blob
1231 while (@diff_info_list) {
1232 my $status;
1233 # git diff-tree -z gives an output like
1234 # <metadata>\0<filename1>\0
1235 # <metadata>\0<filename2>\0
1236 # and we've split on \0.
1237 my $info = shift(@diff_info_list);
1238 my $file = shift(@diff_info_list);
1239 ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
1240 if ($status eq 'non-fast-forward') {
1241 # we may already have sent part of the
1242 # commit to MediaWiki, but it's too
1243 # late to cancel it. Stop the push in
1244 # the middle, but still give an
1245 # accurate error message.
1246 return error_non_fast_forward($remote);
1248 if ($status ne 'ok') {
1249 die("Unknown error from mw_push_file()\n");
1252 if (!$dumb_push) {
1253 run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
1254 run_git(qq(update-ref -m "Git-MediaWiki push" refs/mediawiki/${remotename}/master ${sha1_commit} ${sha1_child}));
1258 print {*STDOUT} "ok ${remote}\n";
1259 return 1;
1262 sub get_allowed_file_extensions {
1263 mw_connect_maybe();
1265 my $query = {
1266 action => 'query',
1267 meta => 'siteinfo',
1268 siprop => 'fileextensions'
1270 my $result = $mediawiki->api($query);
1271 my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
1272 my %hashFile = map { $_ => 1 } @file_extensions;
1274 return %hashFile;
1277 # In memory cache for MediaWiki namespace ids.
1278 my %namespace_id;
1280 # Namespaces whose id is cached in the configuration file
1281 # (to avoid duplicates)
1282 my %cached_mw_namespace_id;
1284 # Return MediaWiki id for a canonical namespace name.
1285 # Ex.: "File", "Project".
1286 sub get_mw_namespace_id {
1287 mw_connect_maybe();
1288 my $name = shift;
1290 if (!exists $namespace_id{$name}) {
1291 # Look at configuration file, if the record for that namespace is
1292 # already cached. Namespaces are stored in form:
1293 # "Name_of_namespace:Id_namespace", ex.: "File:6".
1294 my @temp = split(/\n/,
1295 run_git("config --get-all remote.${remotename}.namespaceCache"));
1296 chomp(@temp);
1297 foreach my $ns (@temp) {
1298 my ($n, $id) = split(/:/, $ns);
1299 if ($id eq 'notANameSpace') {
1300 $namespace_id{$n} = {is_namespace => 0};
1301 } else {
1302 $namespace_id{$n} = {is_namespace => 1, id => $id};
1304 $cached_mw_namespace_id{$n} = 1;
1308 if (!exists $namespace_id{$name}) {
1309 print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
1310 # NS not found => get namespace id from MW and store it in
1311 # configuration file.
1312 my $query = {
1313 action => 'query',
1314 meta => 'siteinfo',
1315 siprop => 'namespaces'
1317 my $result = $mediawiki->api($query);
1319 while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
1320 if (defined($ns->{id}) && defined($ns->{canonical})) {
1321 $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
1322 if ($ns->{'*'}) {
1323 # alias (e.g. french Fichier: as alias for canonical File:)
1324 $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
1330 my $ns = $namespace_id{$name};
1331 my $id;
1333 if (!defined $ns) {
1334 print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
1335 $ns = {is_namespace => 0};
1336 $namespace_id{$name} = $ns;
1339 if ($ns->{is_namespace}) {
1340 $id = $ns->{id};
1343 # Store "notANameSpace" as special value for inexisting namespaces
1344 my $store_id = ($id || 'notANameSpace');
1346 # Store explicitely requested namespaces on disk
1347 if (!exists $cached_mw_namespace_id{$name}) {
1348 run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
1349 $cached_mw_namespace_id{$name} = 1;
1351 return $id;
1354 sub get_mw_namespace_id_for_page {
1355 my $namespace = shift;
1356 if ($namespace =~ /^([^:]*):/) {
1357 return get_mw_namespace_id($namespace);
1358 } else {
1359 return;